Skip to content

Commit

Permalink
Merge pull request #93 from mempool/junderw/order_txes_in_block
Browse files Browse the repository at this point in the history
Order History events in the same confirmation height.
  • Loading branch information
wiz authored May 27, 2024
2 parents 62863af + 7fc4912 commit 9ab6516
Show file tree
Hide file tree
Showing 7 changed files with 91 additions and 28 deletions.
15 changes: 12 additions & 3 deletions src/chain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ impl Network {

pub fn genesis_hash(network: Network) -> BlockHash {
#[cfg(not(feature = "liquid"))]
return bitcoin_genesis_hash(network.into());
return bitcoin_genesis_hash(network);
#[cfg(feature = "liquid")]
return liquid_genesis_hash(network);
}
Expand All @@ -139,20 +139,29 @@ pub fn bitcoin_genesis_hash(network: Network) -> bitcoin::BlockHash {
genesis_block(BNetwork::Bitcoin).block_hash();
static ref TESTNET_GENESIS: bitcoin::BlockHash =
genesis_block(BNetwork::Testnet).block_hash();
static ref TESTNET4_GENESIS: bitcoin::BlockHash =
BlockHash::from_str("00000000da84f2bafbbc53dee25a72ae507ff4914b867c565be350b0da8bf043").unwrap();
static ref TESTNET4_GENESIS: bitcoin::BlockHash = bitcoin::BlockHash::from_str(
"00000000da84f2bafbbc53dee25a72ae507ff4914b867c565be350b0da8bf043"
)
.unwrap();
static ref REGTEST_GENESIS: bitcoin::BlockHash =
genesis_block(BNetwork::Regtest).block_hash();
static ref SIGNET_GENESIS: bitcoin::BlockHash =
genesis_block(BNetwork::Signet).block_hash();
}
#[cfg(not(feature = "liquid"))]
match network {
Network::Bitcoin => *BITCOIN_GENESIS,
Network::Testnet => *TESTNET_GENESIS,
Network::Testnet4 => *TESTNET4_GENESIS,
Network::Regtest => *REGTEST_GENESIS,
Network::Signet => *SIGNET_GENESIS,
}
#[cfg(feature = "liquid")]
match network {
Network::Liquid => *BITCOIN_GENESIS,
Network::LiquidTestnet => *TESTNET_GENESIS,
Network::LiquidRegtest => *REGTEST_GENESIS,
}
}

#[cfg(feature = "liquid")]
Expand Down
1 change: 1 addition & 0 deletions src/daemon.rs
Original file line number Diff line number Diff line change
Expand Up @@ -295,6 +295,7 @@ pub struct Daemon {
}

impl Daemon {
#[allow(clippy::too_many_arguments)]
pub fn new(
daemon_dir: PathBuf,
blocks_dir: PathBuf,
Expand Down
11 changes: 6 additions & 5 deletions src/elements/asset.rs
Original file line number Diff line number Diff line change
Expand Up @@ -174,17 +174,16 @@ pub struct BurningInfo {
pub fn index_confirmed_tx_assets(
tx: &Transaction,
confirmed_height: u32,
tx_position: u16,
network: Network,
parent_network: BNetwork,
rows: &mut Vec<DBRow>,
) {
let (history, issuances) = index_tx_assets(tx, network, parent_network);

rows.extend(
history.into_iter().map(|(asset_id, info)| {
asset_history_row(&asset_id, confirmed_height, info).into_row()
}),
);
rows.extend(history.into_iter().map(|(asset_id, info)| {
asset_history_row(&asset_id, confirmed_height, tx_position, info).into_row()
}));

// the initial issuance is kept twice: once in the history index under I<asset><height><txid:vin>,
// and once separately under i<asset> for asset lookup with some more associated metadata.
Expand Down Expand Up @@ -336,12 +335,14 @@ fn index_tx_assets(
fn asset_history_row(
asset_id: &AssetId,
confirmed_height: u32,
tx_position: u16,
txinfo: TxHistoryInfo,
) -> TxHistoryRow {
let key = TxHistoryKey {
code: b'I',
hash: full_hash(&asset_id.into_inner()[..]),
confirmed_height,
tx_position,
txinfo,
};
TxHistoryRow { key }
Expand Down
8 changes: 7 additions & 1 deletion src/elements/peg.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,13 @@ pub fn get_pegout_data(
let pegged_asset_id = network.pegged_asset()?;
txout.pegout_data().filter(|pegout| {
pegout.asset == Asset::Explicit(*pegged_asset_id)
&& pegout.genesis_hash == bitcoin_genesis_hash(parent_network)
&& pegout.genesis_hash
== bitcoin_genesis_hash(match parent_network {
BNetwork::Bitcoin => Network::Liquid,
BNetwork::Testnet => Network::LiquidTestnet,
BNetwork::Signet => return false,
BNetwork::Regtest => Network::LiquidRegtest,
})
})
}

Expand Down
6 changes: 5 additions & 1 deletion src/new_index/db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,11 @@ use std::path::Path;
use crate::config::Config;
use crate::util::{bincode_util, Bytes};

static DB_VERSION: u32 = 1;
/// Each version will break any running instance with a DB that has a differing version.
/// It will also break if light mode is enabled or disabled.
// 1 = Original DB (since fork from Blockstream)
// 2 = Add tx position to TxHistory rows and place Spending before Funding
static DB_VERSION: u32 = 2;

#[derive(Debug, Eq, PartialEq)]
pub struct DBRow {
Expand Down
73 changes: 56 additions & 17 deletions src/new_index/schema.rs
Original file line number Diff line number Diff line change
Expand Up @@ -533,23 +533,23 @@ impl ChainQuery {
let rows = self
.history_iter_scan_reverse(code, hash)
.map(TxHistoryRow::from_row)
.map(|row| (row.get_txid(), row.key.txinfo))
.skip_while(|(txid, _)| {
.map(|row| (row.get_txid(), row.key.txinfo, row.key.tx_position))
.skip_while(|(txid, _, _)| {
// skip until we reach the last_seen_txid
last_seen_txid.map_or(false, |last_seen_txid| last_seen_txid != txid)
})
.skip_while(|(txid, _)| {
.skip_while(|(txid, _, _)| {
// skip the last_seen_txid itself
last_seen_txid.map_or(false, |last_seen_txid| last_seen_txid == txid)
})
.filter_map(|(txid, info)| {
.filter_map(|(txid, info, tx_position)| {
self.tx_confirming_block(&txid)
.map(|b| (txid, info, b.height, b.time))
.map(|b| (txid, info, b.height, b.time, tx_position))
});

// collate utxo funding/spending events by transaction
let mut map: HashMap<Txid, TxHistorySummary> = HashMap::new();
for (txid, info, height, time) in rows {
for (txid, info, height, time, tx_position) in rows {
if !map.contains_key(&txid) && map.len() == limit {
break;
}
Expand All @@ -565,6 +565,7 @@ impl ChainQuery {
value: info.value.try_into().unwrap_or(0),
height,
time,
tx_position,
});
}
#[cfg(not(feature = "liquid"))]
Expand All @@ -578,6 +579,7 @@ impl ChainQuery {
value: 0_i64.saturating_sub(info.value.try_into().unwrap_or(0)),
height,
time,
tx_position,
});
}
#[cfg(feature = "liquid")]
Expand All @@ -587,6 +589,7 @@ impl ChainQuery {
value: 0,
height,
time,
tx_position,
});
}
#[cfg(feature = "liquid")]
Expand All @@ -596,6 +599,7 @@ impl ChainQuery {
value: 0,
height,
time,
tx_position,
});
}
#[cfg(feature = "liquid")]
Expand All @@ -606,7 +610,11 @@ impl ChainQuery {
let mut tx_summaries = map.into_values().collect::<Vec<TxHistorySummary>>();
tx_summaries.sort_by(|a, b| {
if a.height == b.height {
a.value.cmp(&b.value)
if a.tx_position == b.tx_position {
a.value.cmp(&b.value)
} else {
b.tx_position.cmp(&a.tx_position)
}
} else {
b.height.cmp(&a.height)
}
Expand Down Expand Up @@ -1246,9 +1254,16 @@ fn index_blocks(
.par_iter() // serialization is CPU-intensive
.map(|b| {
let mut rows = vec![];
for tx in &b.block.txdata {
for (idx, tx) in b.block.txdata.iter().enumerate() {
let height = b.entry.height() as u32;
index_transaction(tx, height, previous_txos_map, &mut rows, iconfig);
index_transaction(
tx,
height,
idx as u16,
previous_txos_map,
&mut rows,
iconfig,
);
}
rows.push(BlockRow::new_done(full_hash(&b.entry.hash()[..])).into_row()); // mark block as "indexed"
rows
Expand All @@ -1261,13 +1276,14 @@ fn index_blocks(
fn index_transaction(
tx: &Transaction,
confirmed_height: u32,
tx_position: u16,
previous_txos_map: &HashMap<OutPoint, TxOut>,
rows: &mut Vec<DBRow>,
iconfig: &IndexerConfig,
) {
// persist history index:
// H{funding-scripthash}{funding-height}F{funding-txid:vout} → ""
// H{funding-scripthash}{spending-height}S{spending-txid:vin}{funding-txid:vout} → ""
// H{funding-scripthash}{spending-height}{spending-block-pos}S{spending-txid:vin}{funding-txid:vout} → ""
// H{funding-scripthash}{funding-height}{funding-block-pos}F{funding-txid:vout} → ""
// persist "edges" for fast is-this-TXO-spent check
// S{funding-txid:vout}{spending-txid:vin} → ""
let txid = full_hash(&tx.txid()[..]);
Expand All @@ -1276,6 +1292,7 @@ fn index_transaction(
let history = TxHistoryRow::new(
&txo.script_pubkey,
confirmed_height,
tx_position,
TxHistoryInfo::Funding(FundingInfo {
txid,
vout: txo_index as u16,
Expand All @@ -1302,6 +1319,7 @@ fn index_transaction(
let history = TxHistoryRow::new(
&prev_txo.script_pubkey,
confirmed_height,
tx_position,
TxHistoryInfo::Spending(SpendingInfo {
txid,
vin: txi_index as u16,
Expand All @@ -1326,6 +1344,7 @@ fn index_transaction(
asset::index_confirmed_tx_assets(
tx,
confirmed_height,
tx_position,
iconfig.network,
iconfig.parent_network,
rows,
Expand Down Expand Up @@ -1567,8 +1586,11 @@ pub struct SpendingInfo {
#[derive(Serialize, Deserialize, Debug)]
#[cfg_attr(test, derive(PartialEq, Eq))]
pub enum TxHistoryInfo {
Funding(FundingInfo),
// If a spend and a fund for the same scripthash
// occur in the same tx, spends should come first.
// This ordering comes from the enum order.
Spending(SpendingInfo),
Funding(FundingInfo),

#[cfg(feature = "liquid")]
Issuing(asset::IssuingInfo),
Expand Down Expand Up @@ -1602,6 +1624,7 @@ pub struct TxHistoryKey {
pub code: u8, // H for script history or I for asset history (elements only)
pub hash: FullHash, // either a scripthash (always on bitcoin) or an asset id (elements only)
pub confirmed_height: u32, // MUST be serialized as big-endian (for correct scans).
pub tx_position: u16, // MUST be serialized as big-endian (for correct scans). Position in block.
pub txinfo: TxHistoryInfo,
}

Expand All @@ -1610,11 +1633,17 @@ pub struct TxHistoryRow {
}

impl TxHistoryRow {
fn new(script: &Script, confirmed_height: u32, txinfo: TxHistoryInfo) -> Self {
fn new(
script: &Script,
confirmed_height: u32,
tx_position: u16,
txinfo: TxHistoryInfo,
) -> Self {
let key = TxHistoryKey {
code: b'H',
hash: compute_script_hash(script),
confirmed_height,
tx_position,
txinfo,
};
TxHistoryRow { key }
Expand Down Expand Up @@ -1681,6 +1710,7 @@ pub struct TxHistorySummary {
height: usize,
value: i64,
time: u32,
tx_position: u16,
}

#[derive(Serialize, Deserialize)]
Expand Down Expand Up @@ -1845,8 +1875,10 @@ mod tests {
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
// confirmed_height
0, 0, 0, 2,
// tx_position
0, 3,
// TxHistoryInfo variant (Funding)
0, 0, 0, 0,
0, 0, 0, 1,
// FundingInfo
// txid
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
Expand All @@ -1865,7 +1897,8 @@ mod tests {
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
0, 0, 0, 2,
0, 0, 0, 0,
0, 3,
0, 0, 0, 1,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
0, 3,
Expand All @@ -1879,7 +1912,8 @@ mod tests {
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
0, 0, 0, 2,
0, 0, 0, 1,
0, 3,
0, 0, 0, 0,
18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18,
18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18,
0, 12,
Expand All @@ -1895,7 +1929,8 @@ mod tests {
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
0, 0, 0, 2,
0, 0, 0, 1,
0, 3,
0, 0, 0, 0,
18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18,
18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18,
0, 12,
Expand All @@ -1912,6 +1947,7 @@ mod tests {
code: b'H',
hash: [1; 32],
confirmed_height: 2,
tx_position: 3,
txinfo: super::TxHistoryInfo::Funding(super::FundingInfo {
txid: [2; 32],
vout: 3,
Expand All @@ -1924,6 +1960,7 @@ mod tests {
code: b'H',
hash: [1; 32],
confirmed_height: 2,
tx_position: 3,
txinfo: super::TxHistoryInfo::Funding(super::FundingInfo {
txid: [2; 32],
vout: 3,
Expand All @@ -1936,6 +1973,7 @@ mod tests {
code: b'H',
hash: [1; 32],
confirmed_height: 2,
tx_position: 3,
txinfo: super::TxHistoryInfo::Spending(super::SpendingInfo {
txid: [18; 32],
vin: 12,
Expand All @@ -1950,6 +1988,7 @@ mod tests {
code: b'H',
hash: [1; 32],
confirmed_height: 2,
tx_position: 3,
txinfo: super::TxHistoryInfo::Spending(super::SpendingInfo {
txid: [18; 32],
vin: 12,
Expand Down
5 changes: 4 additions & 1 deletion src/rest.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1668,7 +1668,10 @@ fn address_to_scripthash(addr: &str, network: Network) -> Result<FullHash, HttpE
// `addr_network` will be detected as Testnet for all of them.
addr_network == network
|| (addr_network == Network::Testnet
&& matches!(network, Network::Regtest | Network::Signet | Network::Testnet4))
&& matches!(
network,
Network::Regtest | Network::Signet | Network::Testnet4
))
};

#[cfg(feature = "liquid")]
Expand Down

0 comments on commit 9ab6516

Please sign in to comment.