Skip to content

Commit

Permalink
Limit index lookups to prevent DoS for "popular" addresses
Browse files Browse the repository at this point in the history
  • Loading branch information
romanz committed Aug 21, 2021
1 parent 321dc23 commit 31765e3
Show file tree
Hide file tree
Showing 6 changed files with 36 additions and 5 deletions.
2 changes: 1 addition & 1 deletion internal/config_specification.toml
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ name = "ignore_mempool"
doc = "Don't sync mempool - queries will show only confirmed transactions."

[[param]]
name = "txid_limit"
name = "index_lookup_limit"
type = "usize"
doc = "Number of transactions to lookup before returning an error, to prevent 'too popular' addresses from causing the RPC server to get stuck (0 - disable the limit)"
default = "100"
Expand Down
6 changes: 6 additions & 0 deletions src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -130,6 +130,7 @@ pub struct Config {
pub monitoring_addr: SocketAddr,
pub wait_duration: Duration,
pub index_batch_size: usize,
pub index_lookup_limit: Option<usize>,
pub ignore_mempool: bool,
pub server_banner: String,
pub args: Vec<String>,
Expand Down Expand Up @@ -275,6 +276,10 @@ impl Config {
_ => log::LevelFilter::Trace,
};

let index_lookup_limit = match config.index_lookup_limit {
0 => None,
_ => Some(config.index_lookup_limit),
};
let config = Config {
network: config.network,
db_path: config.db_dir,
Expand All @@ -286,6 +291,7 @@ impl Config {
monitoring_addr,
wait_duration: Duration::from_secs(config.wait_duration_secs),
index_batch_size: config.index_batch_size,
index_lookup_limit,
ignore_mempool: config.ignore_mempool,
server_banner: config.server_banner,
args: args.map(|a| a.into_string().unwrap()).collect(),
Expand Down
21 changes: 20 additions & 1 deletion src/index.rs
Original file line number Diff line number Diff line change
Expand Up @@ -85,12 +85,18 @@ impl IndexResult {
/// Confirmed transactions' address index
pub struct Index {
store: DBStore,
lookup_limit: Option<usize>,
chain: Chain,
stats: Stats,
}

impl Index {
pub(crate) fn load(store: DBStore, mut chain: Chain, metrics: &Metrics) -> Result<Self> {
pub(crate) fn load(
store: DBStore,
mut chain: Chain,
metrics: &Metrics,
lookup_limit: Option<usize>,
) -> Result<Self> {
if let Some(row) = store.get_tip() {
let tip = deserialize(&row).expect("invalid tip");
let headers = store
Expand All @@ -103,6 +109,7 @@ impl Index {

Ok(Index {
store,
lookup_limit,
chain,
stats: Stats::new(metrics),
})
Expand All @@ -112,6 +119,18 @@ impl Index {
&self.chain
}

pub(crate) fn limit_result<T>(&self, entries: impl Iterator<Item = T>) -> Result<Vec<T>> {
let mut entries = entries.fuse();
let result: Vec<T> = match self.lookup_limit {
Some(lookup_limit) => entries.by_ref().take(lookup_limit).collect(),
None => entries.by_ref().collect(),
};
if entries.next().is_some() {
bail!(">{} index entries, query may take too long", result.len())
}
Ok(result)
}

pub(crate) fn filter_by_txid(&self, txid: Txid) -> impl Iterator<Item = BlockHash> + '_ {
self.store
.iter_txid(TxidRow::scan_prefix(txid))
Expand Down
2 changes: 1 addition & 1 deletion src/status.rs
Original file line number Diff line number Diff line change
Expand Up @@ -288,7 +288,7 @@ impl ScriptHashStatus {
type PosTxid = (u32, Txid);
let mut result = HashMap::<BlockHash, HashMap<PosTxid, Entry>>::new();

let funding_blockhashes = index.filter_by_funding(self.scripthash);
let funding_blockhashes = index.limit_result(index.filter_by_funding(self.scripthash))?;
self.for_new_blocks(funding_blockhashes, daemon, |blockhash, block| {
let txids: Vec<Txid> = block.txdata.iter().map(|tx| tx.txid()).collect();
for (pos, (tx, txid)) in block.txdata.into_iter().zip(txids.iter()).enumerate() {
Expand Down
3 changes: 2 additions & 1 deletion src/tracker.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,8 @@ impl Tracker {
let store = DBStore::open(Path::new(&config.db_path))?;
let chain = Chain::new(config.network);
Ok(Self {
index: Index::load(store, chain, &metrics).context("failed to open index")?,
index: Index::load(store, chain, &metrics, config.index_lookup_limit)
.context("failed to open index")?,
mempool: Mempool::new(),
metrics,
index_batch_size: config.index_batch_size,
Expand Down
7 changes: 6 additions & 1 deletion tests/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,12 @@ echo `$BTC getblockchaininfo | jq -r '"Generated \(.blocks) regtest blocks (\(.s
TIP=`$BTC getbestblockhash`

export RUST_LOG=electrs=debug
electrs --db-dir=data/electrs --daemon-dir=data/bitcoin --network=regtest 2> data/electrs/regtest-debug.log &
electrs \
--index-lookup-limit 200 \
--db-dir=data/electrs \
--daemon-dir=data/bitcoin \
--network=regtest \
2> data/electrs/regtest-debug.log &
ELECTRS_PID=$!
tail_log data/electrs/regtest-debug.log | grep -m1 "serving Electrum RPC"

Expand Down

0 comments on commit 31765e3

Please sign in to comment.