Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove merge red blocks when mining #4245

Draft
wants to merge 21 commits into
base: dag-master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions chain/api/src/chain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -114,6 +114,11 @@ pub trait ChainReader {
uncles: &[BlockHeader],
header: &BlockHeader,
) -> Result<GhostdagData>;
fn merge_check_and_ghostdata(
&self,
uncles: &[BlockHeader],
header: &BlockHeader,
) -> Result<GhostdagData>;
fn is_dag_ancestor_of(&self, ancestor: HashValue, descendants: Vec<HashValue>) -> Result<bool>;
fn get_pruning_height(&self) -> BlockNumber;
fn get_pruning_config(&self) -> (u64, u64);
Expand Down
7 changes: 4 additions & 3 deletions chain/mock/src/mock_chain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -259,7 +259,7 @@ impl MockChain {

let MineNewDagBlockInfo {
tips: pruned_tips,
blue_blocks,
ghostdata,
pruning_point,
} = self.head.dag().calc_mergeset_and_tips(
previous_pruning,
Expand All @@ -270,14 +270,15 @@ impl MockChain {

debug!(
"tips: {:?}, blue_blocks: {:?}, pruning_point: {:?}",
pruned_tips, blue_blocks, pruning_point
pruned_tips, ghostdata.mergeset_blues, pruning_point
);

let (template, _) = self.head.create_block_template_by_header(
*self.miner.address(),
selected_header,
vec![],
blue_blocks
ghostdata
.mergeset_blues
.get(1..)
.unwrap_or(&[])
.iter()
Expand Down
139 changes: 101 additions & 38 deletions chain/src/chain.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
// Copyright (c) The Starcoin Core Contributors
// SPDX-License-Identifier: Apache-2.0

use crate::verifier::{BlockVerifier, DagVerifier, DagVerifierWithGhostData, FullVerifier};
use crate::verifier::{BlockVerifier, DagVerifierForMining, DagVerifierForSync, FullVerifier};
use anyhow::{bail, ensure, format_err, Ok, Result};
use sp_utils::stop_watch::{watch, CHAIN_WATCH_NAME};
use starcoin_accumulator::inmemory::InMemoryAccumulator;
Expand All @@ -13,13 +13,15 @@ use starcoin_chain_api::{
ExcludedTxns, ExecutedBlock, MintedUncleNumber, TransactionInfoWithProof, VerifiedBlock,
VerifyBlockField,
};
use starcoin_config::genesis_config::{G_MERGE_DEPTH, G_PRUNING_FINALITY};
use starcoin_consensus::Consensus;
use starcoin_crypto::hash::PlainCryptoHash;
use starcoin_crypto::HashValue;
use starcoin_dag::blockdag::{BlockDAG, MineNewDagBlockInfo};
use starcoin_dag::blockdag::BlockDAG;
use starcoin_dag::consensusdb::consenses_state::DagState;
use starcoin_dag::consensusdb::prelude::StoreError;
use starcoin_dag::consensusdb::schemadb::GhostdagStoreReader;
use starcoin_dag::types::ghostdata::GhostdagData;
use starcoin_executor::VMMetrics;
#[cfg(feature = "force-deploy")]
use starcoin_force_upgrade::force_upgrade_management::get_force_upgrade_block_number;
Expand All @@ -30,6 +32,7 @@ use starcoin_statedb::ChainStateDB;
use starcoin_storage::Store;
use starcoin_time_service::TimeService;
use starcoin_types::block::BlockIdAndNumber;
use starcoin_types::consensus_header::ConsensusHeader;
use starcoin_types::contract_event::ContractEventInfo;
use starcoin_types::filter::Filter;
use starcoin_types::startup_info::{ChainInfo, ChainStatus};
Expand All @@ -48,6 +51,7 @@ use starcoin_vm_types::genesis_config::{ChainId, ConsensusStrategy};
use starcoin_vm_types::on_chain_config::FlexiDagConfigV2;
use starcoin_vm_types::on_chain_resource::Epoch;
use std::cmp::min;
use std::collections::HashSet;
use std::iter::Extend;
use std::option::Option::{None, Some};
use std::sync::atomic::{AtomicBool, Ordering};
Expand Down Expand Up @@ -316,25 +320,16 @@ impl BlockChain {
(self.dag().ghostdata(&tips)?, tips)
};

let MineNewDagBlockInfo {
tips,
blue_blocks,
pruning_point: _,
} = {
let blue_blocks = ghostdata.mergeset_blues.clone()[1..].to_vec();
MineNewDagBlockInfo {
tips,
blue_blocks,
pruning_point, // TODO: new test cases will need pass this field if they have some special requirements.
}
};

debug!(
"Blue blocks:{:?} in chain/create_block_template_by_header",
blue_blocks
ghostdata.mergeset_blues,
);
let blue_blocks = blue_blocks
let blue_blocks = ghostdata
.mergeset_blues
.as_ref()
.clone()
.into_iter()
.skip(1)
.map(|block| self.storage.get_block_by_hash(block))
.collect::<Result<Vec<Option<Block>>>>()?
.into_iter()
Expand Down Expand Up @@ -1240,7 +1235,7 @@ impl ChainReader for BlockChain {
}

fn verify(&self, block: Block) -> Result<VerifiedBlock> {
DagVerifier::verify_block(self, block)
DagVerifierForMining::verify_block(self, block)
}

fn execute(&mut self, verified_block: VerifiedBlock) -> Result<ExecutedBlock> {
Expand Down Expand Up @@ -1390,34 +1385,70 @@ impl ChainReader for BlockChain {
.get_block_header_by_hash(header.parent_hash())?
.ok_or_else(|| format_err!("cannot find parent block header"))?;
let next_ghostdata = self.dag().verify_and_ghostdata(uncles, header)?;
let (pruning_depth, pruning_finality) = self.get_pruning_config();
if self.status().head().pruning_point() != HashValue::zero() {
let previous_ghostdata = if previous_header.pruning_point() == HashValue::zero() {
let genesis = self
.storage
.get_genesis()?
.ok_or_else(|| format_err!("the genesis id is none!"))?;
self.dag().storage.ghost_dag_store.get_data(genesis)?
} else {
self.dag()
.storage
.ghost_dag_store
.get_data(previous_header.pruning_point())?
};

self.dag().verify_pruning_point(
self.verify_pruning_point(
previous_header.pruning_point(),
previous_ghostdata.as_ref(),
header.pruning_point(),
&next_ghostdata,
pruning_depth,
pruning_finality,
)?;
}

Ok(next_ghostdata)
}

fn merge_check_and_ghostdata(
&self,
uncles: &[BlockHeader],
header: &BlockHeader,
) -> Result<starcoin_dag::types::ghostdata::GhostdagData> {
let next_ghostdata = self.dag().ghostdata(&header.parents())?;
if next_ghostdata
.mergeset_blues
.iter()
.skip(1)
.cloned()
.collect::<HashSet<_>>()
!= uncles
.iter()
.map(|header| header.id())
.collect::<HashSet<_>>()
{
bail!(
"Uncle verification failed: Local mergeset blues ({:?}) do not match miner's uncles ({:?}).",
next_ghostdata.mergeset_blues.iter().skip(1).collect::<Vec<_>>(),
uncles.iter().map(|header| header.id()).collect::<Vec<_>>()
);
}
let previous_header = self
.storage
.get_block_header_by_hash(header.parent_hash())?
.ok_or_else(|| format_err!("cannot find parent block header"))?;
if self.status().head().pruning_point() != HashValue::zero() {
self.verify_pruning_point(
previous_header.pruning_point(),
header.pruning_point(),
&next_ghostdata,
)?;
}
let previous_pruning_point = if header.pruning_point() == HashValue::zero() {
self.storage
.get_genesis()?
.ok_or_else(|| format_err!("cannot find generation block"))?
} else {
header.pruning_point()
};

let _ = self.dag().check_bounded_merge_depth(
header.parent_hash(),
previous_pruning_point,
&next_ghostdata,
G_MERGE_DEPTH,
G_PRUNING_FINALITY,
)?;

anyhow::Ok(next_ghostdata)
}

fn is_dag_ancestor_of(&self, ancestor: HashValue, descendants: Vec<HashValue>) -> Result<bool> {
self.dag().check_ancestor_of(ancestor, descendants)
}
Expand Down Expand Up @@ -1649,6 +1680,38 @@ impl BlockChain {
1
}
}

pub fn verify_pruning_point(
&self,
previous_pruning_point: HashValue,
next_pruning_point: HashValue,
next_ghostdata: &GhostdagData,
) -> anyhow::Result<()> {
let previous_ghostdata = if previous_pruning_point == HashValue::zero() {
let genesis = self
.storage
.get_genesis()?
.ok_or_else(|| format_err!("the genesis id is none!"))?;
self.dag().storage.ghost_dag_store.get_data(genesis)?
} else {
self.dag()
.storage
.ghost_dag_store
.get_data(previous_pruning_point)?
};

let (pruning_depth, pruning_finality) = self.get_pruning_config();

self.dag().verify_pruning_point(
previous_pruning_point,
previous_ghostdata.as_ref(),
next_pruning_point,
next_ghostdata,
pruning_depth,
pruning_finality,
)?;
anyhow::Ok(())
}
}

impl ChainWriter for BlockChain {
Expand All @@ -1666,14 +1729,14 @@ impl ChainWriter for BlockChain {
}

fn apply(&mut self, block: Block) -> Result<ExecutedBlock> {
self.apply_with_verifier::<DagVerifier>(block)
self.apply_with_verifier::<DagVerifierForMining>(block)
}

fn chain_state(&mut self) -> &ChainStateDB {
&self.statedb
}
fn apply_for_sync(&mut self, block: Block) -> Result<ExecutedBlock> {
self.apply_with_verifier::<DagVerifierWithGhostData>(block)
self.apply_with_verifier::<DagVerifierForSync>(block)
}
}

Expand Down
18 changes: 9 additions & 9 deletions chain/src/verifier/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -426,8 +426,9 @@ impl BasicDagVerifier {
}
}

pub struct DagVerifier;
impl BlockVerifier for DagVerifier {
// for mining
pub struct DagVerifierForMining;
impl BlockVerifier for DagVerifierForMining {
fn verify_header<R>(current_chain: &R, new_block_header: &BlockHeader) -> Result<()>
where
R: ChainReader,
Expand All @@ -443,16 +444,15 @@ impl BlockVerifier for DagVerifier {
where
R: ChainReader,
{
Ok(Some(BasicDagVerifier::verify_blue_blocks(
current_chain,
uncles,
header,
)?))
Ok(Some(
current_chain.merge_check_and_ghostdata(uncles, header)?,
))
}
}

pub struct DagVerifierWithGhostData;
impl BlockVerifier for DagVerifierWithGhostData {
// for sync
pub struct DagVerifierForSync;
impl BlockVerifier for DagVerifierForSync {
fn verify_header<R>(current_chain: &R, new_block_header: &BlockHeader) -> Result<()>
where
R: ChainReader,
Expand Down
1 change: 1 addition & 0 deletions config/src/genesis_config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -756,6 +756,7 @@ pub static G_BASE_BLOCK_GAS_LIMIT: u64 = 50_000_000; //must big than maximum_num

pub static G_PRUNING_DEPTH: u64 = 17280;
pub static G_PRUNING_FINALITY: u64 = 8640;
pub static G_MERGE_DEPTH: u64 = 3600; // the merge depth should be smaller than the pruning finality

static G_EMPTY_BOOT_NODES: Lazy<Vec<MultiaddrWithPeerId>> = Lazy::new(Vec::new);
const ONE_DAY: u64 = 86400;
Expand Down
Loading
Loading