Skip to content

Commit

Permalink
Merge pull request #5546 from stacks-network/release/3.1.0.0.0
Browse files Browse the repository at this point in the history
Release/3.1.0.0.0
  • Loading branch information
wileyj authored Dec 9, 2024
2 parents 230a946 + 49777d3 commit b0d9179
Show file tree
Hide file tree
Showing 85 changed files with 3,255 additions and 369 deletions.
1 change: 1 addition & 0 deletions .github/workflows/bitcoin-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,7 @@ jobs:
- tests::nakamoto_integrations::v3_signer_api_endpoint
- tests::nakamoto_integrations::test_shadow_recovery
- tests::nakamoto_integrations::signer_chainstate
- tests::nakamoto_integrations::sip029_coinbase_change
- tests::nakamoto_integrations::clarity_cost_spend_down
- tests::nakamoto_integrations::v3_blockbyheight_api_endpoint
# TODO: enable these once v1 signer is supported by a new nakamoto epoch
Expand Down
11 changes: 11 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,17 @@ and this project adheres to the versioning scheme outlined in the [README.md](RE

### Changed

## [3.1.0.0.0]

### Added

- **SIP-029 consensus rules, activating in epoch 3.1 at block 875,000** (see [SIP-029](https://github.com/will-corcoran/sips/blob/feat/sip-029-halving-alignment/sips/sip-029/sip-029-halving-alignment.md) for details)
- New RPC endpoints
- `/v2/clarity/marf/:marf_key_hash`
- `/v2/clarity/metadata/:principal/:contract_name/:clarity_metadata_key`

### Changed

## [3.0.0.0.4]

### Added
Expand Down
2 changes: 1 addition & 1 deletion CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -579,7 +579,7 @@ _Do_ document things that are not clear, e.g.:
Keep in mind that better variable names can reduce the need for comments, e.g.:

- `burnblock_height` instead of `height` may eliminate the need to comment that `height` refers to a burnblock height
- `process_microblocks` instead of `process_blocks` is more correct, and may eliminate the need to to explain that the inputs are microblocks
- `process_microblocks` instead of `process_blocks` is more correct, and may eliminate the need to explain that the inputs are microblocks
- `add_transaction_to_microblock` explains more than `handle_transaction`, and reduces the need to even read the comment

# Licensing and contributor license agreement
Expand Down
3 changes: 2 additions & 1 deletion clarity/src/vm/analysis/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,8 @@ pub fn run_analysis(
| StacksEpochId::Epoch23
| StacksEpochId::Epoch24
| StacksEpochId::Epoch25
| StacksEpochId::Epoch30 => {
| StacksEpochId::Epoch30
| StacksEpochId::Epoch31 => {
TypeChecker2_1::run_pass(&epoch, &mut contract_analysis, db, build_type_map)
}
StacksEpochId::Epoch10 => {
Expand Down
6 changes: 4 additions & 2 deletions clarity/src/vm/analysis/type_checker/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,8 @@ impl FunctionType {
| StacksEpochId::Epoch23
| StacksEpochId::Epoch24
| StacksEpochId::Epoch25
| StacksEpochId::Epoch30 => self.check_args_2_1(accounting, args, clarity_version),
| StacksEpochId::Epoch30
| StacksEpochId::Epoch31 => self.check_args_2_1(accounting, args, clarity_version),
StacksEpochId::Epoch10 => {
return Err(CheckErrors::Expects("Epoch10 is not supported".into()).into())
}
Expand All @@ -75,7 +76,8 @@ impl FunctionType {
| StacksEpochId::Epoch23
| StacksEpochId::Epoch24
| StacksEpochId::Epoch25
| StacksEpochId::Epoch30 => {
| StacksEpochId::Epoch30
| StacksEpochId::Epoch31 => {
self.check_args_by_allowing_trait_cast_2_1(db, clarity_version, func_args)
}
StacksEpochId::Epoch10 => {
Expand Down
3 changes: 2 additions & 1 deletion clarity/src/vm/costs/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -775,7 +775,8 @@ impl LimitedCostTracker {
| StacksEpochId::Epoch23
| StacksEpochId::Epoch24
| StacksEpochId::Epoch25
| StacksEpochId::Epoch30 => COSTS_3_NAME.to_string(),
| StacksEpochId::Epoch30
| StacksEpochId::Epoch31 => COSTS_3_NAME.to_string(),
};
Ok(result)
}
Expand Down
131 changes: 120 additions & 11 deletions clarity/src/vm/database/clarity_db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ use stacks_common::consts::{
};
use stacks_common::types::chainstate::{
BlockHeaderHash, BurnchainHeaderHash, ConsensusHash, SortitionId, StacksAddress, StacksBlockId,
VRFSeed,
TrieHash, VRFSeed,
};
use stacks_common::types::{Address, StacksEpoch as GenericStacksEpoch, StacksEpochId};
use stacks_common::util::hash::{to_hex, Hash160, Sha256Sum, Sha512Trunc256Sum};
Expand Down Expand Up @@ -76,6 +76,68 @@ pub enum StoreType {
PoxUnlockHeight = 0x15,
}

impl TryFrom<&str> for StoreType {
type Error = String;

fn try_from(value: &str) -> core::result::Result<Self, Self::Error> {
use self::StoreType::*;

let hex_value = u8::from_str_radix(value, 10).map_err(|e| e.to_string())?;
match hex_value {
0x00 => Ok(DataMap),
0x01 => Ok(Variable),
0x02 => Ok(FungibleToken),
0x03 => Ok(CirculatingSupply),
0x04 => Ok(NonFungibleToken),
0x05 => Ok(DataMapMeta),
0x06 => Ok(VariableMeta),
0x07 => Ok(FungibleTokenMeta),
0x08 => Ok(NonFungibleTokenMeta),
0x09 => Ok(Contract),
0x10 => Ok(SimmedBlock),
0x11 => Ok(SimmedBlockHeight),
0x12 => Ok(Nonce),
0x13 => Ok(STXBalance),
0x14 => Ok(PoxSTXLockup),
0x15 => Ok(PoxUnlockHeight),
_ => Err("Invalid StoreType".into()),
}
}
}

pub enum ContractDataVarName {
Contract,
ContractSize,
ContractSrc,
ContractDataSize,
}

impl ContractDataVarName {
pub fn as_str(&self) -> &str {
match self {
Self::Contract => "contract",
Self::ContractSize => "contract-size",
Self::ContractSrc => "contract-src",
Self::ContractDataSize => "contract-data-size",
}
}
}

impl TryFrom<&str> for ContractDataVarName {
type Error = String;

fn try_from(value: &str) -> core::result::Result<Self, Self::Error> {
use self::ContractDataVarName::*;
match value {
"contract" => Ok(Contract),
"contract-size" => Ok(ContractSize),
"contract-src" => Ok(ContractSrc),
"contract-data-size" => Ok(ContractDataSize),
_ => Err("Invalid ContractDataVarName".into()),
}
}
}

pub struct ClarityDatabase<'a> {
pub store: RollbackWrapper<'a>,
headers_db: &'a dyn HeadersDB,
Expand Down Expand Up @@ -465,6 +527,13 @@ impl<'a> ClarityDatabase<'a> {
self.store.get_data::<T>(key)
}

pub fn get_data_by_hash<T>(&mut self, hash: &TrieHash) -> Result<Option<T>>
where
T: ClarityDeserializable<T>,
{
self.store.get_data_by_hash::<T>(hash)
}

pub fn put_value(&mut self, key: &str, value: Value, epoch: &StacksEpochId) -> Result<()> {
self.put_value_with_size(key, value, epoch)?;
Ok(())
Expand Down Expand Up @@ -522,6 +591,16 @@ impl<'a> ClarityDatabase<'a> {
self.store.get_data_with_proof(key)
}

pub fn get_data_with_proof_by_hash<T>(
&mut self,
hash: &TrieHash,
) -> Result<Option<(T, Vec<u8>)>>
where
T: ClarityDeserializable<T>,
{
self.store.get_data_with_proof_by_hash(hash)
}

pub fn make_key_for_trip(
contract_identifier: &QualifiedContractIdentifier,
data: StoreType,
Expand Down Expand Up @@ -559,12 +638,18 @@ impl<'a> ClarityDatabase<'a> {
self.store
.prepare_for_contract_metadata(contract_identifier, hash)?;
// insert contract-size
let key = ClarityDatabase::make_metadata_key(StoreType::Contract, "contract-size");
let key = ClarityDatabase::make_metadata_key(
StoreType::Contract,
ContractDataVarName::ContractSize.as_str(),
);
self.insert_metadata(contract_identifier, &key, &(contract_content.len() as u64))?;

// insert contract-src
if STORE_CONTRACT_SRC_INTERFACE {
let key = ClarityDatabase::make_metadata_key(StoreType::Contract, "contract-src");
let key = ClarityDatabase::make_metadata_key(
StoreType::Contract,
ContractDataVarName::ContractSrc.as_str(),
);
self.insert_metadata(contract_identifier, &key, &contract_content.to_string())?;
}
Ok(())
Expand All @@ -574,7 +659,10 @@ impl<'a> ClarityDatabase<'a> {
&mut self,
contract_identifier: &QualifiedContractIdentifier,
) -> Option<String> {
let key = ClarityDatabase::make_metadata_key(StoreType::Contract, "contract-src");
let key = ClarityDatabase::make_metadata_key(
StoreType::Contract,
ContractDataVarName::ContractSrc.as_str(),
);
self.fetch_metadata(contract_identifier, &key)
.ok()
.flatten()
Expand Down Expand Up @@ -683,15 +771,21 @@ impl<'a> ClarityDatabase<'a> {
&mut self,
contract_identifier: &QualifiedContractIdentifier,
) -> Result<u64> {
let key = ClarityDatabase::make_metadata_key(StoreType::Contract, "contract-size");
let key = ClarityDatabase::make_metadata_key(
StoreType::Contract,
ContractDataVarName::ContractSize.as_str(),
);
let contract_size: u64 =
self.fetch_metadata(contract_identifier, &key)?
.ok_or_else(|| {
InterpreterError::Expect(
"Failed to read non-consensus contract metadata, even though contract exists in MARF."
.into())
})?;
let key = ClarityDatabase::make_metadata_key(StoreType::Contract, "contract-data-size");
let key = ClarityDatabase::make_metadata_key(
StoreType::Contract,
ContractDataVarName::ContractDataSize.as_str(),
);
let data_size: u64 = self
.fetch_metadata(contract_identifier, &key)?
.ok_or_else(|| {
Expand All @@ -710,7 +804,10 @@ impl<'a> ClarityDatabase<'a> {
contract_identifier: &QualifiedContractIdentifier,
data_size: u64,
) -> Result<()> {
let key = ClarityDatabase::make_metadata_key(StoreType::Contract, "contract-size");
let key = ClarityDatabase::make_metadata_key(
StoreType::Contract,
ContractDataVarName::ContractSize.as_str(),
);
let contract_size: u64 =
self.fetch_metadata(contract_identifier, &key)?
.ok_or_else(|| {
Expand All @@ -720,7 +817,10 @@ impl<'a> ClarityDatabase<'a> {
})?;
contract_size.cost_overflow_add(data_size)?;

let key = ClarityDatabase::make_metadata_key(StoreType::Contract, "contract-data-size");
let key = ClarityDatabase::make_metadata_key(
StoreType::Contract,
ContractDataVarName::ContractDataSize.as_str(),
);
self.insert_metadata(contract_identifier, &key, &data_size)?;
Ok(())
}
Expand All @@ -730,21 +830,30 @@ impl<'a> ClarityDatabase<'a> {
contract_identifier: &QualifiedContractIdentifier,
contract: Contract,
) -> Result<()> {
let key = ClarityDatabase::make_metadata_key(StoreType::Contract, "contract");
let key = ClarityDatabase::make_metadata_key(
StoreType::Contract,
ContractDataVarName::Contract.as_str(),
);
self.insert_metadata(contract_identifier, &key, &contract)?;
Ok(())
}

pub fn has_contract(&mut self, contract_identifier: &QualifiedContractIdentifier) -> bool {
let key = ClarityDatabase::make_metadata_key(StoreType::Contract, "contract");
let key = ClarityDatabase::make_metadata_key(
StoreType::Contract,
ContractDataVarName::Contract.as_str(),
);
self.store.has_metadata_entry(contract_identifier, &key)
}

pub fn get_contract(
&mut self,
contract_identifier: &QualifiedContractIdentifier,
) -> Result<Contract> {
let key = ClarityDatabase::make_metadata_key(StoreType::Contract, "contract");
let key = ClarityDatabase::make_metadata_key(
StoreType::Contract,
ContractDataVarName::Contract.as_str(),
);
let mut data: Contract = self.fetch_metadata(contract_identifier, &key)?
.ok_or_else(|| InterpreterError::Expect(
"Failed to read non-consensus contract metadata, even though contract exists in MARF."
Expand Down
19 changes: 18 additions & 1 deletion clarity/src/vm/database/clarity_store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ use std::path::PathBuf;

#[cfg(feature = "canonical")]
use rusqlite::Connection;
use stacks_common::types::chainstate::{BlockHeaderHash, StacksBlockId, VRFSeed};
use stacks_common::types::chainstate::{BlockHeaderHash, StacksBlockId, TrieHash, VRFSeed};
use stacks_common::util::hash::{hex_bytes, to_hex, Hash160, Sha512Trunc256Sum};

use crate::vm::analysis::AnalysisDatabase;
Expand Down Expand Up @@ -64,9 +64,15 @@ pub trait ClarityBackingStore {
fn put_all_data(&mut self, items: Vec<(String, String)>) -> Result<()>;
/// fetch K-V out of the committed datastore
fn get_data(&mut self, key: &str) -> Result<Option<String>>;
/// fetch Hash(K)-V out of the commmitted datastore
fn get_data_from_path(&mut self, hash: &TrieHash) -> Result<Option<String>>;
/// fetch K-V out of the committed datastore, along with the byte representation
/// of the Merkle proof for that key-value pair
fn get_data_with_proof(&mut self, key: &str) -> Result<Option<(String, Vec<u8>)>>;
fn get_data_with_proof_from_path(
&mut self,
hash: &TrieHash,
) -> Result<Option<(String, Vec<u8>)>>;
fn has_entry(&mut self, key: &str) -> Result<bool> {
Ok(self.get_data(key)?.is_some())
}
Expand Down Expand Up @@ -209,10 +215,21 @@ impl ClarityBackingStore for NullBackingStore {
panic!("NullBackingStore can't retrieve data")
}

fn get_data_from_path(&mut self, _hash: &TrieHash) -> Result<Option<String>> {
panic!("NullBackingStore can't retrieve data")
}

fn get_data_with_proof(&mut self, _key: &str) -> Result<Option<(String, Vec<u8>)>> {
panic!("NullBackingStore can't retrieve data")
}

fn get_data_with_proof_from_path(
&mut self,
_hash: &TrieHash,
) -> Result<Option<(String, Vec<u8>)>> {
panic!("NullBackingStore can't retrieve data")
}

#[cfg(feature = "canonical")]
fn get_side_store(&mut self) -> &Connection {
panic!("NullBackingStore has no side store")
Expand Down
Loading

0 comments on commit b0d9179

Please sign in to comment.