diff --git a/Cargo.lock b/Cargo.lock index 5c7d916c17..6dd0b1cf1f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -457,6 +457,8 @@ dependencies = [ "serde-map-to-array", "serde_json", "serde_test", + "strum 0.26.2", + "strum_macros 0.26.4", "thiserror", "tokio-util 0.6.10", ] @@ -565,7 +567,7 @@ dependencies = [ "serde", "serde_bytes", "serde_json", - "strum", + "strum 0.24.1", "tempfile", "thiserror", "tracing", @@ -647,7 +649,7 @@ dependencies = [ "static_assertions", "stats_alloc", "structopt", - "strum", + "strum 0.24.1", "sys-info", "tempfile", "thiserror", @@ -738,7 +740,7 @@ dependencies = [ "serde_bytes", "serde_json", "serde_test", - "strum", + "strum 0.24.1", "tempfile", "thiserror", "tracing", @@ -3033,9 +3035,9 @@ dependencies = [ [[package]] name = "h2" -version = "0.3.24" +version = "0.3.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb2c4422095b67ee78da96fbb51a4cc413b3b25883c7717ff7ca1ab31022c9c9" +checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8" dependencies = [ "bytes", "fnv", @@ -3108,6 +3110,12 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + [[package]] name = "hello-world" version = "0.1.0" @@ -5445,9 +5453,15 @@ version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" dependencies = [ - "strum_macros", + "strum_macros 0.24.3", ] +[[package]] +name = "strum" +version = "0.26.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d8cec3501a5194c432b2b7976db6b7d10ec95c253208b45f83f7136aa985e29" + [[package]] name = "strum_macros" version = "0.24.3" @@ -5461,6 +5475,19 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "strum_macros" +version = "0.26.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be" +dependencies = [ + "heck 0.5.0", + "proc-macro2 1.0.66", + "quote 1.0.32", + "rustversion", + "syn 2.0.28", +] + [[package]] name = "subtle" version = "2.4.1" diff --git a/Makefile b/Makefile index 4e1915a105..eebcd962a1 100644 --- a/Makefile +++ b/Makefile @@ -145,7 +145,7 @@ lint-smart-contracts: .PHONY: audit-rs audit-rs: - $(CARGO) audit --ignore RUSTSEC-2024-0332 + $(CARGO) audit --ignore RUSTSEC-2024-0344 .PHONY: audit-as audit-as: diff --git a/binary_port/Cargo.toml b/binary_port/Cargo.toml index 3ab9705f51..afbeb65714 100644 --- a/binary_port/Cargo.toml +++ b/binary_port/Cargo.toml @@ -26,6 +26,8 @@ tokio-util = { version = "0.6.4", features = ["codec"] } casper-types = { path = "../types", features = ["datasize", "json-schema", "std", "testing"] } serde_json = "1" serde_test = "1" +strum = "0.26.2" +strum_macros = "0.26.4" [package.metadata.docs.rs] all-features = true diff --git a/binary_port/src/error_code.rs b/binary_port/src/error_code.rs index 4566166c0a..5f617a46bc 100644 --- a/binary_port/src/error_code.rs +++ b/binary_port/src/error_code.rs @@ -2,9 +2,13 @@ use core::{convert::TryFrom, fmt}; use casper_types::{InvalidDeploy, InvalidTransaction, InvalidTransactionV1}; +#[cfg(test)] +use strum_macros::EnumIter; + /// The error code indicating the result of handling the binary request. -#[derive(Debug, Clone, thiserror::Error)] +#[derive(Debug, Copy, Clone, thiserror::Error, Eq, PartialEq)] #[repr(u16)] +#[cfg_attr(test, derive(EnumIter))] pub enum ErrorCode { /// Request executed correctly. #[error("request executed correctly")] @@ -198,6 +202,75 @@ pub enum ErrorCode { /// Invalid binary port version. #[error("binary protocol version mismatch")] BinaryProtocolVersionMismatch = 61, + /// Blockchain is empty + #[error("blockchain is empty")] + EmptyBlockchain = 62, + /// Expected deploy, but got transaction + #[error("expected deploy, got transaction")] + ExpectedDeploy = 63, + /// Expected transaction, but got deploy + #[error("expected transaction V1, got deploy")] + ExpectedTransaction = 64, + /// Transaction has expired + #[error("transaction has expired")] + TransactionExpired = 65, + /// Transactions parameters are missing or incorrect + #[error("missing or incorrect transaction parameters")] + MissingOrIncorrectParameters = 66, + /// No such addressable entity + #[error("no such addressable entity")] + NoSuchAddressableEntity = 67, + // No such contract at hash + #[error("no such contract at hash")] + NoSuchContractAtHash = 68, + /// No such entry point + #[error("no such entry point")] + NoSuchEntryPoint = 69, + /// No such package at hash + #[error("no such package at hash")] + NoSuchPackageAtHash = 70, + /// Invalid entity at version + #[error("invalid entity at version")] + InvalidEntityAtVersion = 71, + /// Disabled entity at version + #[error("disabled entity at version")] + DisabledEntityAtVersion = 72, + /// Missing entity at version + #[error("missing entity at version")] + MissingEntityAtVersion = 73, + /// Invalid associated keys + #[error("invalid associated keys")] + InvalidAssociatedKeys = 74, + /// Insufficient signature weight + #[error("insufficient signature weight")] + InsufficientSignatureWeight = 75, + /// Insufficient balance + #[error("insufficient balance")] + InsufficientBalance = 76, + /// Unknown balance + #[error("unknown balance")] + UnknownBalance = 77, + /// Invalid payment variant for deploy + #[error("invalid payment variant for deploy")] + DeployInvalidPaymentVariant = 78, + /// Missing payment amount for deploy + #[error("missing payment amount for deploy")] + DeployMissingPaymentAmount = 79, + /// Failed to parse payment amount for deploy + #[error("failed to parse payment amount for deploy")] + DeployFailedToParsePaymentAmount = 80, + /// Missing transfer target for deploy + #[error("missing transfer target for deploy")] + DeployMissingTransferTarget = 81, + /// Missing module bytes for deploy + #[error("missing module bytes for deploy")] + DeployMissingModuleBytes = 82, + /// Entry point cannot be 'call' + #[error("entry point cannot be 'call'")] + InvalidTransactionEntryPointCannotBeCall = 83, + /// Invalid transaction kind + #[error("invalid transaction kind")] + InvalidTransactionInvalidTransactionKind = 84, } impl TryFrom for ErrorCode { @@ -266,6 +339,30 @@ impl TryFrom for ErrorCode { 58 => Ok(ErrorCode::SwitchBlockNotFound), 59 => Ok(ErrorCode::SwitchBlockParentNotFound), 60 => Ok(ErrorCode::UnsupportedRewardsV1Request), + 61 => Ok(ErrorCode::BinaryProtocolVersionMismatch), + 62 => Ok(ErrorCode::EmptyBlockchain), + 63 => Ok(ErrorCode::ExpectedDeploy), + 64 => Ok(ErrorCode::ExpectedTransaction), + 65 => Ok(ErrorCode::TransactionExpired), + 66 => Ok(ErrorCode::MissingOrIncorrectParameters), + 67 => Ok(ErrorCode::NoSuchAddressableEntity), + 68 => Ok(ErrorCode::NoSuchContractAtHash), + 69 => Ok(ErrorCode::NoSuchEntryPoint), + 70 => Ok(ErrorCode::NoSuchPackageAtHash), + 71 => Ok(ErrorCode::InvalidEntityAtVersion), + 72 => Ok(ErrorCode::DisabledEntityAtVersion), + 73 => Ok(ErrorCode::MissingEntityAtVersion), + 74 => Ok(ErrorCode::InvalidAssociatedKeys), + 75 => Ok(ErrorCode::InsufficientSignatureWeight), + 76 => Ok(ErrorCode::InsufficientBalance), + 77 => Ok(ErrorCode::UnknownBalance), + 78 => Ok(ErrorCode::DeployInvalidPaymentVariant), + 79 => Ok(ErrorCode::DeployMissingPaymentAmount), + 80 => Ok(ErrorCode::DeployFailedToParsePaymentAmount), + 81 => Ok(ErrorCode::DeployMissingTransferTarget), + 82 => Ok(ErrorCode::DeployMissingModuleBytes), + 83 => Ok(ErrorCode::InvalidTransactionEntryPointCannotBeCall), + 84 => Ok(ErrorCode::InvalidTransactionInvalidTransactionKind), _ => Err(UnknownErrorCode), } } @@ -394,7 +491,35 @@ impl From for ErrorCode { InvalidTransactionV1::InvalidPricingMode { .. } => { ErrorCode::InvalidTransactionPricingMode } + InvalidTransactionV1::EntryPointCannotBeCall => { + ErrorCode::InvalidTransactionEntryPointCannotBeCall + } + InvalidTransactionV1::InvalidTransactionKind(_) => { + ErrorCode::InvalidTransactionInvalidTransactionKind + } _ => ErrorCode::InvalidTransactionUnspecified, } } } + +#[cfg(test)] +mod tests { + use std::convert::TryFrom; + + use strum::IntoEnumIterator; + + use crate::ErrorCode; + + #[test] + fn try_from_decoded_all_variants() { + for variant in ErrorCode::iter() { + let as_int = variant as u16; + let decoded = ErrorCode::try_from(as_int); + assert!( + decoded.is_ok(), + "variant {} not covered by TryFrom implementation", + as_int + ); + } + } +} diff --git a/binary_port/src/global_state_query_result.rs b/binary_port/src/global_state_query_result.rs index 6dfab35e76..009e5b60a5 100644 --- a/binary_port/src/global_state_query_result.rs +++ b/binary_port/src/global_state_query_result.rs @@ -11,9 +11,10 @@ use casper_types::testing::TestRng; #[cfg(test)] use casper_types::{ByteCode, ByteCodeKind}; +use serde::Serialize; /// Carries the successful result of the global state query. -#[derive(Debug, PartialEq, Clone)] +#[derive(Debug, PartialEq, Clone, Serialize)] pub struct GlobalStateQueryResult { /// Stored value. value: StoredValue, diff --git a/binary_port/src/node_status.rs b/binary_port/src/node_status.rs index f0d8a8c36f..9a67ce2cae 100644 --- a/binary_port/src/node_status.rs +++ b/binary_port/src/node_status.rs @@ -8,11 +8,12 @@ use casper_types::{ use casper_types::testing::TestRng; #[cfg(test)] use rand::Rng; +use serde::Serialize; use crate::{minimal_block_info::MinimalBlockInfo, type_wrappers::ReactorStateName}; /// Status information about the node. -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Serialize)] pub struct NodeStatus { /// The node ID and network address of each connected peer. pub peers: Peers, diff --git a/binary_port/src/type_wrappers.rs b/binary_port/src/type_wrappers.rs index c933226bf5..10e687f573 100644 --- a/binary_port/src/type_wrappers.rs +++ b/binary_port/src/type_wrappers.rs @@ -8,6 +8,7 @@ use casper_types::{ bytesrepr::{self, Bytes, FromBytes, ToBytes}, EraId, ExecutionInfo, Key, PublicKey, TimeDiff, Timestamp, Transaction, ValidatorChange, U512, }; +use serde::Serialize; use super::GlobalStateQueryResult; @@ -39,7 +40,7 @@ macro_rules! impl_bytesrepr_for_type_wrapper { } /// Type representing uptime. -#[derive(Debug, Clone, Copy, PartialEq, Eq)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize)] pub struct Uptime(u64); impl Uptime { @@ -69,7 +70,7 @@ impl TryFrom for TimeDiff { } /// Type representing changes in consensus validators. -#[derive(Debug, PartialEq, Eq)] +#[derive(Debug, PartialEq, Eq, Serialize)] #[cfg_attr(feature = "datasize", derive(DataSize))] pub struct ConsensusValidatorChanges(BTreeMap>); @@ -92,7 +93,7 @@ impl From for BTreeMap for String { } /// Type representing the reactor state name. -#[derive(Debug, PartialEq, Eq)] +#[derive(Debug, PartialEq, Eq, Serialize)] pub struct ReactorStateName(String); impl ReactorStateName { @@ -136,7 +137,7 @@ impl From for String { } /// Type representing last progress of the sync process. -#[derive(Debug, PartialEq, Eq)] +#[derive(Debug, PartialEq, Eq, Serialize)] pub struct LastProgress(Timestamp); impl LastProgress { @@ -174,7 +175,7 @@ impl GetTrieFullResult { } /// Type representing the reward of a validator or a delegator. -#[derive(Debug, PartialEq, Eq)] +#[derive(Debug, PartialEq, Eq, Serialize)] pub struct RewardResponse { amount: U512, era_id: EraId, @@ -223,7 +224,7 @@ impl FromBytes for RewardResponse { } /// Describes the consensus status. -#[derive(Debug, PartialEq, Eq)] +#[derive(Debug, PartialEq, Eq, Serialize)] pub struct ConsensusStatus { validator_public_key: PublicKey, round_length: Option, @@ -278,7 +279,7 @@ impl FromBytes for ConsensusStatus { } /// A transaction with execution info. -#[derive(Debug, PartialEq, Eq)] +#[derive(Debug, PartialEq, Eq, Serialize)] pub struct TransactionWithExecutionInfo { transaction: Transaction, execution_info: Option, diff --git a/execution_engine/src/runtime/mint_internal.rs b/execution_engine/src/runtime/mint_internal.rs index ae77487561..5bf26e0aaf 100644 --- a/execution_engine/src/runtime/mint_internal.rs +++ b/execution_engine/src/runtime/mint_internal.rs @@ -189,4 +189,19 @@ where } } -impl<'a, R> Mint for Runtime<'a, R> where R: StateReader {} +impl<'a, R> Mint for Runtime<'a, R> +where + R: StateReader, +{ + fn purse_exists(&mut self, uref: URef) -> Result { + let maybe_value = self + .context + .read_gs(&Key::Balance(uref.addr())) + .map_err(|exec_error| >::from(exec_error).unwrap_or(Error::Storage))?; + match maybe_value { + Some(StoredValue::CLValue(value)) => Ok(*value.cl_type() == U512::cl_type()), + Some(_non_cl_value) => Err(Error::CLValue), + None => Ok(false), + } + } +} diff --git a/execution_engine/src/runtime/mod.rs b/execution_engine/src/runtime/mod.rs index 90071135db..035c828a95 100644 --- a/execution_engine/src/runtime/mod.rs +++ b/execution_engine/src/runtime/mod.rs @@ -2639,7 +2639,7 @@ where Ok(()) => { let protocol_version = self.context.protocol_version(); let byte_code_hash = ByteCodeHash::default(); - let entity_hash = AddressableEntityHash::new(self.context.new_hash_address()?); + let entity_hash = AddressableEntityHash::new(target.value()); let package_hash = PackageHash::new(self.context.new_hash_address()?); let main_purse = target_purse; let associated_keys = AssociatedKeys::new(target, Weight::new(1)); @@ -3530,9 +3530,9 @@ where // Check if the topic exists and get the summary. let Some(StoredValue::MessageTopic(prev_topic_summary)) = self.context.read_gs(&topic_key)? - else { - return Ok(Err(ApiError::MessageTopicNotRegistered)); - }; + else { + return Ok(Err(ApiError::MessageTopicNotRegistered)); + }; let current_blocktime = self.context.get_blocktime(); let topic_message_index = if prev_topic_summary.blocktime() != current_blocktime { diff --git a/execution_engine_testing/tests/src/test/system_contracts/genesis.rs b/execution_engine_testing/tests/src/test/system_contracts/genesis.rs index 32e16308dc..3811d02a19 100644 --- a/execution_engine_testing/tests/src/test/system_contracts/genesis.rs +++ b/execution_engine_testing/tests/src/test/system_contracts/genesis.rs @@ -78,6 +78,12 @@ fn should_run_genesis() { .get_entity_by_account_hash(PublicKey::System.to_account_hash()) .expect("system account should exist"); + let account_1_addr = builder + .get_entity_hash_by_account_hash(*ACCOUNT_1_ADDR) + .expect("must get addr for entity account 1"); + + assert_eq!(account_1_addr.value(), ACCOUNT_1_ADDR.value()); + let account_1 = builder .get_entity_by_account_hash(*ACCOUNT_1_ADDR) .expect("account 1 should exist"); diff --git a/node/BINARY_PORT_PROTOCOL.md b/node/BINARY_PORT_PROTOCOL.md index 064466cbc3..d1fc124cd8 100644 --- a/node/BINARY_PORT_PROTOCOL.md +++ b/node/BINARY_PORT_PROTOCOL.md @@ -1,58 +1,58 @@ -# Binary port protocol -The specification of the protocol used to communicate between the RPC sidecar and binary port casper-node. +# The Binary Port Protocol +This page specifies the communication protocol between the [RPC Sidecar](https://github.com/casper-network/casper-sidecar) and a Casper node's binary port. ## Synopsis -This is a binary protocol which follows a simple request-response model. The protocol consists of one party (the client) sending requests to another party (the server) and the server sending responses back to the client. Both requests and responses are wrapped in envelopes containing a version and a payload type tag. The versioning scheme is based on [SemVer](https://semver.org/), see [versioning](#versioning) for more details. The payload type tags are used to interpret the contents of the payloads. +The communication protocol between the Sidecar and the binary port is a binary protocol that follows a simple request-response model. The protocol consists of one party (the client) sending requests to another party (the server) and the server sending responses back to the client. Both requests and responses are wrapped in envelopes containing a version and a payload type tag. The versioning scheme is based on [SemVer](https://semver.org/). See [versioning](#versioning) for more details. The payload type tags are used to interpret the contents of the payloads. ### Request format | Size in bytes | Field | Description | |---------------|-----------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| 2 | Version of the binary port header | Version of the binary port header serialized as a single u16 number. Upon receiving the request, binary port component will first read data from this field and check it against the currently supported version. In case of version mismatch the appropriate error response will be sent. | +| 2 | Version of the binary port header | Version of the binary port header serialized as a single u16 number. Upon receiving the request, the binary port component will first read data from this field and check it against the currently supported version. In case of a version mismatch, the appropriate error response will be sent. | | 12 | Chain protocol version | Chain protocol version as a u32 triplet (major, minor, patch). This parameter is used to determine whether an incoming request is compatible (according to semver rules) with the current chain protocol version. If not, the appropriate error response will be sent. | | 1 | BinaryRequestTag | Tag identifying the request. | -| ... | RequestPayload | Payload to be interpreted according to `BinaryRequestTag`. | +| Variable | RequestPayload | Payload to be interpreted according to the `BinaryRequestTag`. | -Request bytes can be constructed from bytesrepr-serialized `BinaryRequestHeader` followed by bytesrepr-serialized `BinaryRequest`. +Request bytes can be constructed from the bytesrepr-serialized `BinaryRequestHeader` followed by the bytesrepr-serialized `BinaryRequest`. ### Response format | Size in bytes | Field | Description | |-----------------|-----------------|--------------------------------------------------------------------------| -| 2 | Request id | Request id (u16). | -| 4 | LengthOfRequest | Length of the request (encoded as bytes) being responded to. | -| LengthOfRequest | RequestBytes | The request being responded to encoded as bytes. | +| 2 | Request ID | Request ID as a u16 number. | +| 4 | LengthOfRequest | Length of the request (encoded as bytes) for this response. | +| LengthOfRequest | RequestBytes | The request, encoded as bytes, corresponding to this response. | | 12 | ProtocolVersion | Protocol version as a u32 triplet (major, minor, patch). | | 2 | ErrorCode | Error code, where 0 indicates success. | | 1-2 | PayloadType | Optional payload type tag (first byte being 1 indicates that it exists). | -| ... | Payload | Payload to be interpreted according to `PayloadTag`. | +| Variable | Payload | Payload to be interpreted according to the `PayloadTag`. | `BinaryResponseAndRequest` object can be bytesrepr-deserialized from these bytes. -**Notes:** `...` means that the payload size is variable in size and depends on the tag. +**Notes:** `Variable` means that the payload size is variable and depends on the tag. ## Versioning -Versioning is based on the protocol version of the Casper Platform and the request/response model was designed to support **backwards-compatible** changes to some parts of it. These are allowed to change between **MINOR** versions: -- addition of new [`BinaryRequestTag`](#request-format) with its own payload -- addition of new [`PayloadType`](#response-format) with its own payload -- addition of new [`RecordId`](#request-model-details) -- addition of new [`InformationRequestTag`](#request-model-details) -- addition of new [`ErrorCode`](#response-format) +Versioning is based on the protocol version of the Casper Platform. The request/response model was designed to support **backward-compatible** changes to some parts, which are allowed to change between **MINOR** versions: +- addition of a new [`BinaryRequestTag`](#request-format) with its own payload +- addition of a new [`PayloadType`](#response-format) with its own payload +- addition of a new [`RecordId`](#request-model-details) +- addition of a new [`InformationRequestTag`](#request-model-details) +- addition of a new [`ErrorCode`](#response-format) -Implementations of the protocol can handle requests/responses with a different **MINOR** version than their own. It is possible that they receive a payload they don't support if their version is lower. In that case they should respond with an error code indicating the lack of support for the given payload (`ErrorCode::UnsupportedRequest`). +Implementations of the protocol can handle requests/responses with a different **MINOR** version than their own. It is possible that they receive a payload they don't support if their version is lower. In that case, they should respond with an error code indicating the lack of support for the given payload (`ErrorCode::UnsupportedRequest`). -Other changes to the protocol such as changes to the format of existing requests/responses or removal of existing requests/responses are only allowed between **MAJOR** versions. Implementations of the protocol should not handle requests/responses with a different **MAJOR** version than their own and immediately respond with an error code indicating the lack of support for the given version (`ErrorCode::UnsupportedRequest`). +Other changes to the protocol, such as changes to the format of existing requests/responses or removal of existing requests/responses, are only allowed between **MAJOR** versions. Implementations of the protocol should not handle requests/responses with a different **MAJOR** version than their own and immediately respond with an error code indicating the lack of support for the given version (`ErrorCode::UnsupportedRequest`). -Changes to the envelopes (the request/response headers) are allowed, but are breaking. When such a change is required, the "Header version" should in the request header should also be changed to prevent binary port from trying to handle requests it can't process. +Changes to the envelopes (the request/response headers) are allowed but are breaking. When such a change is required, the "Header version" should in the request header should also be changed to prevent binary port from trying to handle requests it can't process. ## Request model details -There are currently 3 supported types of requests, but the request model can be extended with new variants according to the [versioning](#versioning) rules. The request types are: -- `Get` request, which is one of: - - `Record` request asking for a record with an [**extensible**](#versioning) `RecordId` tag and a key - - `Information` request asking for a piece of information with an [**extensible**](#versioning) `InformationRequestTag` tag and a key - - `State` request asking for some data from the global state - - `Item` request asking for a single item by a `Key` - - `AllItems` request asking for all items by a `KeyTag` - - `Trie` request asking for a trie by a `Digest` -- `TryAcceptTransaction` request a transaction to be accepted and executed -- `TrySpeculativeExec` request a transaction to be speculatively executed +Currently, there are 3 supported types of requests, but the request model can be extended with new variants according to the [versioning](#versioning) rules. The request types are: +- A `Get` request, which is one of: + - A `Record` request asking for a record with an [**extensible**](#versioning) `RecordId` tag and a key + - An `Information` request asking for a piece of information with an [**extensible**](#versioning) `InformationRequestTag` tag and a key + - A `State` request asking for some data from global state. This can be: + - An `Item` request asking for a single item given a `Key` + - An `AllItems` request asking for all items given a `KeyTag` + - A `Trie` request asking for a trie given a `Digest` +- A `TryAcceptTransaction` request for a transaction to be accepted and executed +- A `TrySpeculativeExec` request for a transaction to be executed speculatively, without saving the transaction effects in global state diff --git a/node/src/components/binary_port.rs b/node/src/components/binary_port.rs index cc9c732c3e..1ddfef39ee 100644 --- a/node/src/components/binary_port.rs +++ b/node/src/components/binary_port.rs @@ -146,6 +146,10 @@ where BinaryRequest::TrySpeculativeExec { transaction } => { metrics.binary_port_try_speculative_exec_count.inc(); if !config.allow_request_speculative_exec { + debug!( + hash = %transaction.hash(), + "received a request for speculative execution while the feature is disabled" + ); return BinaryResponse::new_error(ErrorCode::FunctionDisabled, protocol_version); } let response = @@ -198,6 +202,7 @@ where } if RecordId::try_from(record_type_tag) == Ok(RecordId::Transfer) => { metrics.binary_port_get_record_count.inc(); let Ok(block_hash) = bytesrepr::deserialize_from_slice(&key) else { + debug!("received an incorrectly serialized key for a transfer record"); return BinaryResponse::new_error(ErrorCode::BadRequest, protocol_version); }; let Some(transfers) = effect_builder @@ -236,12 +241,19 @@ where GetRequest::Information { info_type_tag, key } => { metrics.binary_port_get_info_count.inc(); let Ok(tag) = InformationRequestTag::try_from(info_type_tag) else { + debug!( + tag = info_type_tag, + "received an unknown information request tag" + ); return BinaryResponse::new_error(ErrorCode::UnsupportedRequest, protocol_version); }; - let Ok(req) = InformationRequest::try_from((tag, &key[..])) else { - return BinaryResponse::new_error(ErrorCode::BadRequest, protocol_version); - }; - handle_info_request(req, effect_builder, protocol_version).await + match InformationRequest::try_from((tag, &key[..])) { + Ok(req) => handle_info_request(req, effect_builder, protocol_version).await, + Err(error) => { + debug!(?tag, %error, "failed to parse an information request"); + BinaryResponse::new_error(ErrorCode::BadRequest, protocol_version) + } + } } GetRequest::State(req) => { metrics.binary_port_get_state_count.inc(); @@ -261,7 +273,7 @@ where { let Some(state_root_hash) = resolve_state_root_hash(effect_builder, state_identifier).await else { - return BinaryResponse::new_empty(protocol_version); + return BinaryResponse::new_error(ErrorCode::RootNotFound, protocol_version); }; let storage_key_prefix = match key_prefix { KeyPrefix::DelegatorBidAddrsByValidator(hash) => { @@ -287,7 +299,8 @@ where PrefixedValuesResult::RootNotFound => { BinaryResponse::new_error(ErrorCode::RootNotFound, protocol_version) } - PrefixedValuesResult::Failure(_err) => { + PrefixedValuesResult::Failure(error) => { + debug!(%error, "failed when querying for values by prefix"); BinaryResponse::new_error(ErrorCode::InternalError, protocol_version) } } @@ -304,7 +317,7 @@ where { let Some(state_root_hash) = resolve_state_root_hash(effect_builder, state_identifier).await else { - return BinaryResponse::new_empty(protocol_version); + return BinaryResponse::new_error(ErrorCode::RootNotFound, protocol_version); }; let request = TaggedValuesRequest::new(state_root_hash, TaggedValuesSelection::All(key_tag)); match effect_builder.get_tagged_values(request).await { @@ -314,7 +327,8 @@ where TaggedValuesResult::RootNotFound => { BinaryResponse::new_error(ErrorCode::RootNotFound, protocol_version) } - TaggedValuesResult::Failure(_err) => { + TaggedValuesResult::Failure(error) => { + debug!(%error, "failed when querying for all values by tag"); BinaryResponse::new_error(ErrorCode::InternalError, protocol_version) } } @@ -342,7 +356,7 @@ where let Some(state_root_hash) = resolve_state_root_hash(effect_builder, state_identifier).await else { - return BinaryResponse::new_empty(protocol_version); + return BinaryResponse::new_error(ErrorCode::RootNotFound, protocol_version); }; match get_global_state_item(effect_builder, state_root_hash, base_key, path).await { Ok(Some(result)) => BinaryResponse::from_value(result, protocol_version), @@ -355,6 +369,7 @@ where key_tag, } => { if !config.allow_request_get_all_values { + debug!(%key_tag, "received a request for items by key tag while the feature is disabled"); BinaryResponse::new_error(ErrorCode::FunctionDisabled, protocol_version) } else { handle_get_all_items(state_identifier, key_tag, effect_builder, protocol_version) @@ -363,6 +378,7 @@ where } GlobalStateRequest::Trie { trie_key } => { if !config.allow_request_get_trie { + debug!(%trie_key, "received a trie request while the feature is disabled"); BinaryResponse::new_error(ErrorCode::FunctionDisabled, protocol_version) } else { let req = TrieRequest::new(trie_key, None); @@ -371,7 +387,8 @@ where GetTrieFullResult::new(result.map(TrieRaw::into_inner)), protocol_version, ), - Err(_err) => { + Err(error) => { + debug!(%error, "failed when querying for a trie"); BinaryResponse::new_error(ErrorCode::InternalError, protocol_version) } } @@ -384,7 +401,7 @@ where let Some(state_root_hash) = resolve_state_root_hash(effect_builder, state_identifier).await else { - return BinaryResponse::new_empty(protocol_version); + return BinaryResponse::new_error(ErrorCode::RootNotFound, protocol_version); }; let result = match identifier { DictionaryItemIdentifier::AccountNamedKey { @@ -501,9 +518,19 @@ where let named_keys = match &*value { StoredValue::Account(account) => account.named_keys(), StoredValue::Contract(contract) => contract.named_keys(), - _ => return Err(ErrorCode::DictionaryURefNotFound), + value => { + debug!( + value_type = value.type_name(), + "unexpected stored value found when querying for a dictionary" + ); + return Err(ErrorCode::DictionaryURefNotFound); + } }; let Some(uref) = named_keys.get(&dictionary_name).and_then(Key::as_uref) else { + debug!( + dictionary_name, + "dictionary seed URef not found in named keys" + ); return Err(ErrorCode::DictionaryURefNotFound); }; let key = Key::dictionary(*uref, dictionary_item_key.as_bytes()); @@ -515,10 +542,18 @@ where Ok(Some(DictionaryQueryResult::new(key, query_result))) } - QueryResult::RootNotFound | QueryResult::ValueNotFound(_) => { + QueryResult::RootNotFound => { + debug!("root not found when querying for a dictionary seed URef"); + Err(ErrorCode::DictionaryURefNotFound) + } + QueryResult::ValueNotFound(error) => { + debug!(%error, "value not found when querying for a dictionary seed URef"); Err(ErrorCode::DictionaryURefNotFound) } - QueryResult::Failure(_) => Err(ErrorCode::FailedQuery), + QueryResult::Failure(error) => { + debug!(%error, "failed when querying for a dictionary seed URef"); + Err(ErrorCode::FailedQuery) + } } } @@ -538,11 +573,25 @@ where let req = QueryRequest::new(state_root_hash, Key::NamedKey(key_addr), vec![]); match effect_builder.query_global_state(req).await { QueryResult::Success { value, .. } => { - let StoredValue::NamedKey(key_val) = &*value else { - return Err(ErrorCode::DictionaryURefNotFound); + let key_val = match &*value { + StoredValue::NamedKey(key_val) => key_val, + value => { + debug!( + value_type = value.type_name(), + "unexpected stored value found when querying for a dictionary" + ); + return Err(ErrorCode::DictionaryURefNotFound); + } }; - let Ok(Key::URef(uref)) = key_val.get_key() else { - return Err(ErrorCode::DictionaryURefNotFound); + let uref = match key_val.get_key() { + Ok(Key::URef(uref)) => uref, + result => { + debug!( + ?result, + "unexpected named key result when querying for a dictionary" + ); + return Err(ErrorCode::DictionaryURefNotFound); + } }; let key = Key::dictionary(uref, dictionary_item_key.as_bytes()); let Some(query_result) = @@ -552,10 +601,18 @@ where }; Ok(Some(DictionaryQueryResult::new(key, query_result))) } - QueryResult::RootNotFound | QueryResult::ValueNotFound(_) => { + QueryResult::RootNotFound => { + debug!("root not found when querying for a dictionary seed URef"); Err(ErrorCode::DictionaryURefNotFound) } - QueryResult::Failure(_) => Err(ErrorCode::FailedQuery), + QueryResult::ValueNotFound(error) => { + debug!(%error, "value not found when querying for a dictionary seed URef"); + Err(ErrorCode::DictionaryURefNotFound) + } + QueryResult::Failure(error) => { + debug!(%error, "failed when querying for a dictionary seed URef"); + Err(ErrorCode::FailedQuery) + } } } @@ -637,8 +694,14 @@ where Ok(Some(GlobalStateQueryResult::new(*value, proofs))) } QueryResult::RootNotFound => Err(ErrorCode::RootNotFound), - QueryResult::ValueNotFound(_) => Err(ErrorCode::NotFound), - QueryResult::Failure(_) => Err(ErrorCode::FailedQuery), + QueryResult::ValueNotFound(error) => { + debug!(%error, "value not found when querying for a global state item"); + Err(ErrorCode::NotFound) + } + QueryResult::Failure(error) => { + debug!(%error, "failed when querying for a global state item"); + Err(ErrorCode::FailedQuery) + } } } @@ -816,6 +879,7 @@ where }; let Some(previous_height) = header.height().checked_sub(1) else { // there's not going to be any rewards for the genesis block + debug!("received a request for rewards in the genesis block"); return BinaryResponse::new_empty(protocol_version); }; let Some(parent_header) = effect_builder @@ -829,7 +893,7 @@ where }; let snapshot_request = SeigniorageRecipientsRequest::new( *parent_header.state_root_hash(), - protocol_version, + parent_header.protocol_version(), ); let snapshot = match effect_builder @@ -842,16 +906,25 @@ where SeigniorageRecipientsResult::RootNotFound => { return BinaryResponse::new_error(ErrorCode::RootNotFound, protocol_version) } - SeigniorageRecipientsResult::Failure(_) => { - return BinaryResponse::new_error(ErrorCode::FailedQuery, protocol_version) + SeigniorageRecipientsResult::Failure(error) => { + warn!(%error, "failed when querying for seigniorage recipients"); + return BinaryResponse::new_error(ErrorCode::FailedQuery, protocol_version); + } + SeigniorageRecipientsResult::AuctionNotFound => { + warn!("auction not found when querying for seigniorage recipients"); + return BinaryResponse::new_error(ErrorCode::InternalError, protocol_version); } - SeigniorageRecipientsResult::AuctionNotFound - | SeigniorageRecipientsResult::ValueNotFound(_) => { - return BinaryResponse::new_error(ErrorCode::InternalError, protocol_version) + SeigniorageRecipientsResult::ValueNotFound(error) => { + warn!(%error, "value not found when querying for seigniorage recipients"); + return BinaryResponse::new_error(ErrorCode::InternalError, protocol_version); } }; let Some(era_end) = header.clone_era_end() else { // switch block should have an era end + warn!( + hash = %header.block_hash(), + "era end not found in the switch block retrieved from storage" + ); return BinaryResponse::new_error(ErrorCode::InternalError, protocol_version); }; let block_rewards = match era_end.rewards() { @@ -878,7 +951,10 @@ where BinaryResponse::from_value(response, protocol_version) } Ok(None) => BinaryResponse::new_empty(protocol_version), - Err(_) => BinaryResponse::new_error(ErrorCode::InternalError, protocol_version), + Err(error) => { + warn!(%error, "failed when calculating rewards"); + BinaryResponse::new_error(ErrorCode::InternalError, protocol_version) + } } } } @@ -923,8 +999,9 @@ where .await; match result { - SpeculativeExecutionResult::InvalidTransaction(ite) => { - BinaryResponse::new_error(ite.into(), protocol_version) + SpeculativeExecutionResult::InvalidTransaction(error) => { + debug!(%error, "invalid transaction submitted for speculative execution"); + BinaryResponse::new_error(error.into(), protocol_version) } SpeculativeExecutionResult::WasmV1(spec_exec_result) => { BinaryResponse::from_value(spec_exec_result, protocol_version) @@ -989,11 +1066,13 @@ fn extract_header(payload: &[u8]) -> Result<(BinaryRequestHeader, &[u8]), ErrorC return Err(ErrorCode::BinaryProtocolVersionMismatch); } - let Ok((header, remainder)) = BinaryRequestHeader::from_bytes(payload) else { - return Err(ErrorCode::BadRequest); - }; - - Ok((header, remainder)) + match BinaryRequestHeader::from_bytes(payload) { + Ok((header, remainder)) => Ok((header, remainder)), + Err(error) => { + debug!(%error, "failed to parse binary request header"); + Err(ErrorCode::BadRequest) + } + } } async fn handle_payload( @@ -1029,11 +1108,15 @@ where ); }; - let Ok(request) = BinaryRequest::try_from((tag, remainder)) else { - return ( - BinaryResponse::new_error(ErrorCode::BadRequest, protocol_version), - request_id, - ); + let request = match BinaryRequest::try_from((tag, remainder)) { + Ok(request) => request, + Err(error) => { + debug!(%error, "failed to parse binary request body"); + return ( + BinaryResponse::new_error(ErrorCode::BadRequest, protocol_version), + request_id, + ); + } }; ( diff --git a/node/src/components/transaction_acceptor/error.rs b/node/src/components/transaction_acceptor/error.rs index 68f431ccf6..7f9cbecaa3 100644 --- a/node/src/components/transaction_acceptor/error.rs +++ b/node/src/components/transaction_acceptor/error.rs @@ -68,13 +68,58 @@ impl Error { impl From for BinaryPortErrorCode { fn from(err: Error) -> Self { match err { - Error::EmptyBlockchain - | Error::Parameters { .. } - | Error::Expired { .. } - | Error::ExpectedDeploy - | Error::ExpectedTransactionV1 => { - BinaryPortErrorCode::InvalidTransactionOrDeployUnspecified - } + Error::EmptyBlockchain => BinaryPortErrorCode::EmptyBlockchain, + Error::ExpectedDeploy => BinaryPortErrorCode::ExpectedDeploy, + Error::ExpectedTransactionV1 => BinaryPortErrorCode::ExpectedTransaction, + Error::Expired { .. } => BinaryPortErrorCode::TransactionExpired, + Error::Parameters { failure, .. } => match failure { + ParameterFailure::NoSuchAddressableEntity { .. } => { + BinaryPortErrorCode::NoSuchAddressableEntity + } + ParameterFailure::NoSuchContractAtHash { .. } => { + BinaryPortErrorCode::NoSuchContractAtHash + } + ParameterFailure::NoSuchEntryPoint { .. } => BinaryPortErrorCode::NoSuchEntryPoint, + ParameterFailure::NoSuchPackageAtHash { .. } => { + BinaryPortErrorCode::NoSuchPackageAtHash + } + ParameterFailure::InvalidEntityAtVersion { .. } => { + BinaryPortErrorCode::InvalidEntityAtVersion + } + ParameterFailure::DisabledEntityAtVersion { .. } => { + BinaryPortErrorCode::DisabledEntityAtVersion + } + ParameterFailure::MissingEntityAtVersion { .. } => { + BinaryPortErrorCode::MissingEntityAtVersion + } + ParameterFailure::InvalidAssociatedKeys => { + BinaryPortErrorCode::InvalidAssociatedKeys + } + ParameterFailure::InsufficientSignatureWeight => { + BinaryPortErrorCode::InsufficientSignatureWeight + } + ParameterFailure::InsufficientBalance { .. } => { + BinaryPortErrorCode::InsufficientBalance + } + ParameterFailure::UnknownBalance { .. } => BinaryPortErrorCode::UnknownBalance, + ParameterFailure::Deploy(deploy_failure) => match deploy_failure { + DeployParameterFailure::InvalidPaymentVariant => { + BinaryPortErrorCode::DeployInvalidPaymentVariant + } + DeployParameterFailure::MissingPaymentAmount => { + BinaryPortErrorCode::DeployMissingPaymentAmount + } + DeployParameterFailure::FailedToParsePaymentAmount => { + BinaryPortErrorCode::DeployFailedToParsePaymentAmount + } + DeployParameterFailure::MissingTransferTarget => { + BinaryPortErrorCode::DeployMissingTransferTarget + } + DeployParameterFailure::MissingModuleBytes => { + BinaryPortErrorCode::DeployMissingModuleBytes + } + }, + }, Error::InvalidTransaction(invalid_transaction) => { BinaryPortErrorCode::from(invalid_transaction) } diff --git a/storage/src/global_state/state/lmdb.rs b/storage/src/global_state/state/lmdb.rs index f7936ce4a0..96e33e2a88 100644 --- a/storage/src/global_state/state/lmdb.rs +++ b/storage/src/global_state/state/lmdb.rs @@ -1,5 +1,5 @@ use itertools::Itertools; -use std::{collections::BTreeMap, ops::Deref, sync::Arc}; +use std::{ops::Deref, sync::Arc}; use lmdb::{DatabaseFlags, RwTransaction}; @@ -118,7 +118,7 @@ impl LmdbGlobalState { pub fn put_stored_values( &self, prestate_hash: Digest, - stored_values: BTreeMap, + stored_values: Vec<(Key, StoredValue)>, ) -> Result { let scratch_trie = self.get_scratch_store(); let new_state_root = put_stored_values::<_, _, GlobalStateError>( diff --git a/storage/src/global_state/state/mod.rs b/storage/src/global_state/state/mod.rs index eeeef3b50f..943217460c 100644 --- a/storage/src/global_state/state/mod.rs +++ b/storage/src/global_state/state/mod.rs @@ -703,10 +703,18 @@ pub trait StateProvider { }; let balance_holds = match request.balance_handling() { BalanceHandling::Total => BTreeMap::new(), - BalanceHandling::Available => match tc.get_balance_holds(purse_addr) { - Ok(holds) => holds, - Err(tce) => return tce.into(), - }, + BalanceHandling::Available => { + match tc.get_balance_hold_config(BalanceHoldAddrTag::Gas) { + Ok(Some((block_time, _, interval))) => { + match tc.get_balance_holds(purse_addr, block_time, interval) { + Ok(holds) => holds, + Err(tce) => return tce.into(), + } + } + Ok(None) => BTreeMap::new(), + Err(tce) => return tce.into(), + } + } }; (total_balance, ProofsResult::NotRequested { balance_holds }) } @@ -2150,7 +2158,7 @@ pub fn put_stored_values<'a, R, S, E>( environment: &'a R, store: &S, prestate_hash: Digest, - stored_values: BTreeMap, + stored_values: Vec<(Key, StoredValue)>, ) -> Result where R: TransactionSource<'a, Handle = S::Handle>, diff --git a/storage/src/global_state/state/scratch.rs b/storage/src/global_state/state/scratch.rs index be83ccce14..dec12c369b 100644 --- a/storage/src/global_state/state/scratch.rs +++ b/storage/src/global_state/state/scratch.rs @@ -1,6 +1,6 @@ use lmdb::RwTransaction; use std::{ - collections::{BTreeMap, BTreeSet}, + collections::{BTreeMap, BTreeSet, HashMap, VecDeque}, mem, ops::Deref, sync::{Arc, RwLock}, @@ -9,7 +9,7 @@ use std::{ use tracing::{debug, error}; use casper_types::{ - bytesrepr::ToBytes, + bytesrepr::{self, ToBytes}, execution::{Effects, TransformInstruction, TransformKindV2, TransformV2}, global_state::TrieMerkleProof, Digest, Key, StoredValue, @@ -40,15 +40,106 @@ use crate::tracking_copy::TrackingCopy; type SharedCache = Arc>; struct Cache { - cached_values: BTreeMap, + cached_values: HashMap, pruned: BTreeSet, + cached_keys: CacheTrie, +} + +struct CacheTrieNode { + children: BTreeMap>, + value: Option, +} + +impl CacheTrieNode { + fn new() -> Self { + CacheTrieNode { + children: BTreeMap::new(), + value: None, + } + } + + fn remove(&mut self, bytes: &[u8], depth: usize) -> bool { + if depth == bytes.len() { + if self.value.is_some() { + self.value = None; + return self.children.is_empty(); + } + return false; + } + + if let Some(child_node) = self.children.get_mut(&bytes[depth]) { + if child_node.remove(bytes, depth + 1) { + self.children.remove(&bytes[depth]); + return self.value.is_none() && self.children.is_empty(); + } + } + false + } +} + +struct CacheTrie { + root: CacheTrieNode, +} + +impl CacheTrie { + fn new() -> Self { + CacheTrie { + root: CacheTrieNode::new(), + } + } + + fn insert(&mut self, key_bytes: &[u8], key: T) { + let mut current_node = &mut self.root; + for &byte in key_bytes { + current_node = current_node + .children + .entry(byte) + .or_insert(CacheTrieNode::new()); + } + current_node.value = Some(key); + } + + fn keys_with_prefix(&self, prefix: &[u8]) -> Vec { + let mut current_node = &self.root; + let mut result = Vec::new(); + + for &byte in prefix { + match current_node.children.get(&byte) { + Some(node) => current_node = node, + None => return result, + } + } + + self.collect_keys(current_node, &mut result); + result + } + + fn collect_keys(&self, start_node: &CacheTrieNode, result: &mut Vec) { + let mut stack = VecDeque::new(); + stack.push_back(start_node); + + while let Some(node) = stack.pop_back() { + if let Some(key) = node.value { + result.push(key); + } + + for child_node in node.children.values() { + stack.push_back(child_node); + } + } + } + + fn remove(&mut self, key_bytes: &[u8]) -> bool { + self.root.remove(key_bytes, 0) + } } impl Cache { fn new() -> Self { Cache { - cached_values: BTreeMap::new(), + cached_values: HashMap::new(), pruned: BTreeSet::new(), + cached_keys: CacheTrie::new(), } } @@ -57,18 +148,27 @@ impl Cache { self.cached_values.is_empty() && self.pruned.is_empty() } - fn insert_write(&mut self, key: Key, value: StoredValue) { + fn insert_write(&mut self, key: Key, value: StoredValue) -> Result<(), bytesrepr::Error> { self.pruned.remove(&key); - self.cached_values.insert(key, (true, value)); + if self.cached_values.insert(key, (true, value)).is_none() { + let key_bytes = key.to_bytes()?; + self.cached_keys.insert(&key_bytes, key); + }; + Ok(()) } - fn insert_read(&mut self, key: Key, value: StoredValue) { + fn insert_read(&mut self, key: Key, value: StoredValue) -> Result<(), bytesrepr::Error> { + let key_bytes = key.to_bytes()?; + self.cached_keys.insert(&key_bytes, key); self.cached_values.entry(key).or_insert((false, value)); + Ok(()) } - fn prune(&mut self, key: Key) { + fn prune(&mut self, key: Key) -> Result<(), bytesrepr::Error> { self.cached_values.remove(&key); + self.cached_keys.remove(&key.to_bytes()?); self.pruned.insert(key); + Ok(()) } fn get(&self, key: &Key) -> Option<&StoredValue> { @@ -80,13 +180,23 @@ impl Cache { /// Consumes self and returns only written values as values that were only read must be filtered /// out to prevent unnecessary writes. - fn into_dirty_writes(self) -> (BTreeMap, BTreeSet) { - let keys_to_prune = self.pruned; - let stored_values: BTreeMap = self - .cached_values + fn into_dirty_writes(self) -> (Vec<(Key, StoredValue)>, BTreeSet) { + let stored_values: Vec<(Key, StoredValue)> = self + .cached_keys + .keys_with_prefix(&[]) .into_iter() - .filter_map(|(key, (dirty, value))| if dirty { Some((key, value)) } else { None }) + .filter_map(|key| { + self.cached_values.get(&key).and_then(|(dirty, value)| { + if *dirty { + Some((key, value.clone())) + } else { + None + } + }) + }) .collect(); + let keys_to_prune = self.pruned; + debug!( "Cache::into_dirty_writes prune_count: {} store_count: {}", keys_to_prune.len(), @@ -148,7 +258,7 @@ impl ScratchGlobalState { } /// Consume self and return inner cache. - pub fn into_inner(self) -> (BTreeMap, BTreeSet) { + pub fn into_inner(self) -> (Vec<(Key, StoredValue)>, BTreeSet) { let cache = mem::replace(&mut *self.cache.write().unwrap(), Cache::new()); cache.into_dirty_writes() } @@ -175,7 +285,10 @@ impl StateReader for ScratchGlobalStateView { key, )? { ReadResult::Found(value) => { - self.cache.write().unwrap().insert_read(*key, value.clone()); + self.cache + .write() + .expect("poisoned scratch cache lock") + .insert_read(*key, value.clone())?; Some(value) } ReadResult::NotFound => None, @@ -213,13 +326,9 @@ impl StateReader for ScratchGlobalStateView { fn keys_with_prefix(&self, prefix: &[u8]) -> Result, Self::Error> { let mut ret = Vec::new(); - let cache = self.cache.read().unwrap(); - for cached_key in cache.cached_values.keys() { - let serialized_key = cached_key.to_bytes()?; - if serialized_key.starts_with(prefix) && !cache.pruned.contains(cached_key) { - ret.push(*cached_key) - } - } + let cache = self.cache.read().expect("poisoned scratch cache mutex"); + let cached_keys = cache.cached_keys.keys_with_prefix(prefix); + ret.extend(cached_keys); let txn = self.environment.create_read_txn()?; let keys_iter = keys_with_prefix::( @@ -250,6 +359,7 @@ impl CommitProvider for ScratchGlobalState { /// State hash returned is the one provided, as we do not write to lmdb with this kind of global /// state. Note that the state hash is NOT used, and simply passed back to the caller. fn commit(&self, state_hash: Digest, effects: Effects) -> Result { + let txn = self.environment.create_read_txn()?; for (key, kind) in effects.value().into_iter().map(TransformV2::destructure) { let cached_value = self.cache.read().unwrap().get(&key).cloned(); let instruction = match (cached_value, kind) { @@ -261,16 +371,14 @@ impl CommitProvider for ScratchGlobalState { (None, transform_kind) => { // It might be the case that for `Add*` operations we don't have the previous // value in cache yet. - let txn = self.environment.create_read_txn()?; - let instruction = match read::< + match read::< Key, StoredValue, lmdb::RoTransaction, LmdbTrieStore, GlobalStateError, - >( - &txn, self.trie_store.deref(), &state_hash, &key - )? { + >(&txn, self.trie_store.deref(), &state_hash, &key)? + { ReadResult::Found(current_value) => { match transform_kind.apply(current_value.clone()) { Ok(instruction) => instruction, @@ -292,9 +400,7 @@ impl CommitProvider for ScratchGlobalState { error!(root_hash=?state_hash, "root not found"); return Err(CommitError::ReadRootNotFound(state_hash).into()); } - }; - txn.commit()?; - instruction + } } (Some(current_value), transform_kind) => { match transform_kind.apply(current_value) { @@ -309,13 +415,14 @@ impl CommitProvider for ScratchGlobalState { let mut cache = self.cache.write().unwrap(); match instruction { TransformInstruction::Store(value) => { - cache.insert_write(key, value); + cache.insert_write(key, value)?; } TransformInstruction::Prune(key) => { - cache.prune(key); + cache.prune(key)?; } } } + txn.commit()?; Ok(state_hash) } } @@ -607,10 +714,14 @@ pub(crate) mod tests { assert_eq!(all_keys.len(), stored_values.len()); for key in all_keys { - assert!(stored_values.get(&key).is_some()); assert_eq!( - stored_values.get(&key), - updated_checkout.read(&key).unwrap().as_ref() + stored_values + .iter() + .find(|(k, _)| k == &key) + .unwrap() + .1 + .clone(), + updated_checkout.read(&key).unwrap().unwrap() ); } @@ -702,4 +813,100 @@ pub(crate) mod tests { original_checkout.read(&test_pairs_updated[2].key).unwrap() ); } + + #[test] + fn cache_trie_basic_insert_get() { + let mut trie = CacheTrie::new(); + let key_hello = Key::Hash(*b"hello..........................."); + let key_world = Key::Hash(*b"world..........................."); + let key_hey = Key::Hash(*b"hey............................."); + + trie.insert(b"hello", key_hello); + trie.insert(b"world", key_world); + trie.insert(b"hey", key_hey); + + assert_eq!(trie.keys_with_prefix(b"he"), vec![key_hey, key_hello]); + assert_eq!(trie.keys_with_prefix(b"wo"), vec![key_world]); + } + + #[test] + fn cache_trie_overlapping_prefix() { + let mut trie = CacheTrie::new(); + let key_apple = Key::Hash(*b"apple..........................."); + let key_app = Key::Hash(*b"app............................."); + let key_apron = Key::Hash(*b"apron..........................."); + + trie.insert(b"apple", key_apple); + trie.insert(b"app", key_app); + trie.insert(b"apron", key_apron); + + assert_eq!( + trie.keys_with_prefix(b"ap"), + vec![key_apron, key_app, key_apple] + ); + assert_eq!(trie.keys_with_prefix(b"app"), vec![key_app, key_apple]); + } + + #[test] + fn cache_trie_leaf_removal() { + let mut trie = CacheTrie::new(); + let key_cat = Key::Hash(*b"cat............................."); + let key_category = Key::Hash(*b"category........................"); + + trie.insert(b"cat", key_cat); + trie.insert(b"category", key_category); + + trie.remove(b"category"); + assert_eq!(trie.keys_with_prefix(b"ca"), vec![key_cat]); + } + + #[test] + fn cache_trie_internal_node_removal() { + let mut trie = CacheTrie::new(); + let key_be = Key::Hash(*b"be.............................."); + let key_berry = Key::Hash(*b"berry..........................."); + + trie.insert(b"be", key_be); + trie.insert(b"berry", key_berry); + + trie.remove(b"be"); + assert_eq!(trie.keys_with_prefix(b"be"), vec![key_berry]); + } + + #[test] + fn cache_trie_non_existent_prefix() { + let mut trie = CacheTrie::new(); + + let key_apple = Key::Hash(*b"apple..........................."); + let key_mango = Key::Hash(*b"mango..........................."); + + trie.insert(b"apple", key_apple); + trie.insert(b"mango", key_mango); + + assert_eq!(trie.keys_with_prefix(b"b"), Vec::::new()); + } + + #[test] + fn cache_trie_empty_trie_search() { + let trie = CacheTrie::::new(); + + assert_eq!(trie.keys_with_prefix(b""), Vec::::new()); + } + + #[test] + fn cache_trie_empty_prefix_search_all_keys() { + let mut trie = CacheTrie::new(); + let key_hello = Key::Hash(*b"hello..........................."); + let key_world = Key::Hash(*b"world..........................."); + let key_hey = Key::Hash(*b"hey............................."); + + trie.insert(b"hello", key_hello); + trie.insert(b"world", key_world); + trie.insert(b"hey", key_hey); + + assert_eq!( + trie.keys_with_prefix(b""), + vec![key_world, key_hey, key_hello] + ); + } } diff --git a/storage/src/system/genesis.rs b/storage/src/system/genesis.rs index 2e16e67cf0..a181b5bbc9 100644 --- a/storage/src/system/genesis.rs +++ b/storage/src/system/genesis.rs @@ -731,11 +731,19 @@ where } else { ByteCodeHash::new(self.address_generator.borrow_mut().new_hash_address()) }; - let entity_hash = if entity_kind.is_system_account() { - let entity_hash_addr = PublicKey::System.to_account_hash().value(); - AddressableEntityHash::new(entity_hash_addr) - } else { - AddressableEntityHash::new(self.address_generator.borrow_mut().new_hash_address()) + + let entity_hash = match entity_kind { + EntityKind::System(_) | EntityKind::SmartContract(_) => { + AddressableEntityHash::new(self.address_generator.borrow_mut().new_hash_address()) + } + EntityKind::Account(account_hash) => { + if entity_kind.is_system_account() { + let entity_hash_addr = PublicKey::System.to_account_hash().value(); + AddressableEntityHash::new(entity_hash_addr) + } else { + AddressableEntityHash::new(account_hash.value()) + } + } }; let package_hash = PackageHash::new(self.address_generator.borrow_mut().new_hash_address()); diff --git a/storage/src/system/mint.rs b/storage/src/system/mint.rs index a9c4387a72..bbcd0b48b4 100644 --- a/storage/src/system/mint.rs +++ b/storage/src/system/mint.rs @@ -285,7 +285,7 @@ pub trait Mint: RuntimeProvider + StorageProvider + SystemProvider { // treat as noop return Ok(()); } - if self.available_balance(existing_purse)?.is_none() { + if !self.purse_exists(existing_purse)? { return Err(Error::PurseNotFound); } self.add_balance(existing_purse, amount)?; @@ -304,4 +304,7 @@ pub trait Mint: RuntimeProvider + StorageProvider + SystemProvider { self.add(total_supply_uref, amount)?; Ok(()) } + + /// Check if a purse exists. + fn purse_exists(&mut self, uref: URef) -> Result; } diff --git a/storage/src/system/mint/mint_native.rs b/storage/src/system/mint/mint_native.rs index ead72fe9db..d2801ec1a6 100644 --- a/storage/src/system/mint/mint_native.rs +++ b/storage/src/system/mint/mint_native.rs @@ -252,4 +252,21 @@ where } } -impl Mint for RuntimeNative where S: StateReader {} +impl Mint for RuntimeNative +where + S: StateReader, +{ + fn purse_exists(&mut self, uref: URef) -> Result { + let key = Key::Balance(uref.addr()); + match self + .tracking_copy() + .borrow_mut() + .read(&key) + .map_err(|_| Error::Storage)? + { + Some(StoredValue::CLValue(value)) => Ok(*value.cl_type() == U512::cl_type()), + Some(_non_cl_value) => Err(Error::CLValue), + None => Ok(false), + } + } +} diff --git a/storage/src/system/protocol_upgrade.rs b/storage/src/system/protocol_upgrade.rs index daaa52a432..2ca6f2de6c 100644 --- a/storage/src/system/protocol_upgrade.rs +++ b/storage/src/system/protocol_upgrade.rs @@ -506,7 +506,7 @@ where let mut address_generator = AddressGenerator::new(pre_state_hash.as_ref(), Phase::System); let byte_code_hash = ByteCodeHash::default(); - let entity_hash = AddressableEntityHash::new(address_generator.new_hash_address()); + let entity_hash = AddressableEntityHash::new(PublicKey::System.to_account_hash().value()); let package_hash = PackageHash::new(address_generator.new_hash_address()); let byte_code = ByteCode::new(ByteCodeKind::Empty, vec![]); diff --git a/storage/src/tracking_copy/ext.rs b/storage/src/tracking_copy/ext.rs index 7ea1bf4377..21bcc66712 100644 --- a/storage/src/tracking_copy/ext.rs +++ b/storage/src/tracking_copy/ext.rs @@ -86,6 +86,8 @@ pub trait TrackingCopyExt { fn get_balance_holds( &mut self, purse_addr: URefAddr, + block_time: BlockTime, + interval: u64, ) -> Result, Self::Error>; /// Gets the balance holds for a given balance, with Merkle proofs. @@ -322,7 +324,7 @@ where Some((block_time, handling, interval)) => (block_time, handling, interval), }; - let balance_holds = self.get_balance_holds(purse_addr)?; + let balance_holds = self.get_balance_holds(purse_addr, block_time, interval)?; let gas_handling = (handling, interval).into(); let processing_handling = ProcessingHoldBalanceHandling::new(); match balance_holds.available_balance( @@ -422,6 +424,8 @@ where fn get_balance_holds( &mut self, purse_addr: URefAddr, + block_time: BlockTime, + interval: u64, ) -> Result, Self::Error> { // NOTE: currently there are two kinds of holds, gas and processing. // Processing holds only effect one block to prevent double spend and are always @@ -434,14 +438,7 @@ where // for each hold kind and process each kind discretely in order and collate the // non-expired hold total at the end. let mut ret: BTreeMap = BTreeMap::new(); - let (block_time, interval) = match self.get_balance_hold_config(BalanceHoldAddrTag::Gas)? { - Some((block_time, _, interval)) => (block_time.value(), interval), - None => { - // if there is no holds config at this root hash, there can't be any holds - return Ok(ret); - } - }; - let holds_epoch = { HoldsEpoch::from_millis(block_time, interval) }; + let holds_epoch = { HoldsEpoch::from_millis(block_time.value(), interval) }; let holds = self.get_balance_hold_addresses(purse_addr)?; for balance_hold_addr in holds { let block_time = balance_hold_addr.block_time(); diff --git a/storage/src/tracking_copy/ext_entity.rs b/storage/src/tracking_copy/ext_entity.rs index 761d9e26bd..5debb9d371 100644 --- a/storage/src/tracking_copy/ext_entity.rs +++ b/storage/src/tracking_copy/ext_entity.rs @@ -449,7 +449,7 @@ where let mut generator = AddressGenerator::new(main_purse.addr().as_ref(), Phase::System); let byte_code_hash = ByteCodeHash::default(); - let entity_hash = AddressableEntityHash::new(generator.new_hash_address()); + let entity_hash = AddressableEntityHash::new(account_hash.value()); let package_hash = PackageHash::new(generator.new_hash_address()); let associated_keys = AssociatedKeys::new(account_hash, Weight::new(1)); diff --git a/utils/global-state-update-gen/src/decode.rs b/utils/global-state-update-gen/src/decode.rs new file mode 100644 index 0000000000..d841f280e3 --- /dev/null +++ b/utils/global-state-update-gen/src/decode.rs @@ -0,0 +1,50 @@ +use std::{collections::BTreeMap, fmt, fs::File, io::Read}; + +use clap::ArgMatches; + +use casper_types::{ + bytesrepr::FromBytes, system::auction::SeigniorageRecipientsSnapshot, CLType, + GlobalStateUpdate, GlobalStateUpdateConfig, Key, StoredValue, +}; + +struct Entries(BTreeMap); + +impl fmt::Debug for Entries { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let mut map = f.debug_map(); + for (k, v) in &self.0 { + let debug_v: Box = match v { + StoredValue::CLValue(clv) => match clv.cl_type() { + CLType::Map { key, value: _ } if **key == CLType::U64 => { + // this should be the seigniorage recipient snapshot + let snapshot: SeigniorageRecipientsSnapshot = clv.clone().into_t().unwrap(); + Box::new(snapshot) + } + _ => Box::new(clv), + }, + _ => Box::new(v), + }; + map.key(k).value(&debug_v); + } + map.finish() + } +} + +pub(crate) fn decode_file(matches: &ArgMatches<'_>) { + let file_name = matches.value_of("file").unwrap(); + let mut file = File::open(file_name).unwrap(); + + let mut contents = String::new(); + file.read_to_string(&mut contents).unwrap(); + + let config: GlobalStateUpdateConfig = toml::from_str(&contents).unwrap(); + let update_data: GlobalStateUpdate = config.try_into().unwrap(); + + println!("validators = {:#?}", &update_data.validators); + let entries: BTreeMap<_, _> = update_data + .entries + .iter() + .map(|(key, bytes)| (*key, StoredValue::from_bytes(bytes).unwrap().0)) + .collect(); + println!("entries = {:#?}", Entries(entries)); +} diff --git a/utils/global-state-update-gen/src/generic/state_tracker.rs b/utils/global-state-update-gen/src/generic/state_tracker.rs index 98be1214ce..4b6a0c2110 100644 --- a/utils/global-state-update-gen/src/generic/state_tracker.rs +++ b/utils/global-state-update-gen/src/generic/state_tracker.rs @@ -163,7 +163,7 @@ impl StateTracker { let mut rng = rand::thread_rng(); - let entity_hash = AddressableEntityHash::new(rng.gen()); + let entity_hash = AddressableEntityHash::new(account_hash.value()); let package_hash = PackageHash::new(rng.gen()); let contract_wasm_hash = ByteCodeHash::new([0u8; 32]); diff --git a/utils/global-state-update-gen/src/main.rs b/utils/global-state-update-gen/src/main.rs index 85e150b72f..247860d25d 100644 --- a/utils/global-state-update-gen/src/main.rs +++ b/utils/global-state-update-gen/src/main.rs @@ -1,5 +1,6 @@ mod admins; mod balances; +mod decode; mod generic; mod system_entity_registry; mod utils; @@ -9,7 +10,7 @@ use admins::generate_admins; use clap::{crate_version, App, Arg, SubCommand}; use crate::{ - balances::generate_balances_update, generic::generate_generic_update, + balances::generate_balances_update, decode::decode_file, generic::generate_generic_update, system_entity_registry::generate_system_entity_registry, validators::generate_validators_update, }; @@ -184,6 +185,17 @@ fn main() { .number_of_values(1), ), ) + .subcommand( + SubCommand::with_name("decode") + .about("Decodes the global_state.toml file into a readable form") + .arg( + Arg::with_name("file") + .value_name("FILE") + .index(1) + .required(true) + .help("The file to be decoded"), + ), + ) .get_matches(); match matches.subcommand() { @@ -194,6 +206,7 @@ fn main() { } ("generic", Some(sub_matches)) => generate_generic_update(sub_matches), ("generate-admins", Some(sub_matches)) => generate_admins(sub_matches), + ("decode", Some(sub_matches)) => decode_file(sub_matches), (subcommand, _) => { println!("Unknown subcommand: \"{}\"", subcommand); }