Skip to content

Commit

Permalink
Merge branch 'develop' of https://github.com/dashpay/dash into develop
Browse files Browse the repository at this point in the history
  • Loading branch information
PastaPastaPasta committed Dec 2, 2024
2 parents bbf24f1 + a82d33e commit 6452277
Show file tree
Hide file tree
Showing 14 changed files with 40 additions and 66 deletions.
2 changes: 1 addition & 1 deletion src/init.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -570,7 +570,7 @@ void SetupServerArgs(ArgsManager& argsman)
argsman.AddArg("-maxconnections=<n>", strprintf("Maintain at most <n> connections to peers (temporary service connections excluded) (default: %u). This limit does not apply to connections manually added via -addnode or the addnode RPC, which have a separate limit of %u.", DEFAULT_MAX_PEER_CONNECTIONS, MAX_ADDNODE_CONNECTIONS), ArgsManager::ALLOW_ANY, OptionsCategory::CONNECTION);
argsman.AddArg("-maxreceivebuffer=<n>", strprintf("Maximum per-connection receive buffer, <n>*1000 bytes (default: %u)", DEFAULT_MAXRECEIVEBUFFER), ArgsManager::ALLOW_ANY, OptionsCategory::CONNECTION);
argsman.AddArg("-maxsendbuffer=<n>", strprintf("Maximum per-connection memory usage for the send buffer, <n>*1000 bytes (default: %u)", DEFAULT_MAXSENDBUFFER), ArgsManager::ALLOW_ANY, OptionsCategory::CONNECTION);
argsman.AddArg("-maxtimeadjustment", strprintf("Maximum allowed median peer time offset adjustment. Local perspective of time may be influenced by peers forward or backward by this amount. (default: %u seconds)", DEFAULT_MAX_TIME_ADJUSTMENT), ArgsManager::ALLOW_ANY, OptionsCategory::CONNECTION);
argsman.AddArg("-maxtimeadjustment", strprintf("Maximum allowed median peer time offset adjustment. Local perspective of time may be influenced by outbound peers forward or backward by this amount (default: %u seconds).", DEFAULT_MAX_TIME_ADJUSTMENT), ArgsManager::ALLOW_ANY, OptionsCategory::CONNECTION);
argsman.AddArg("-maxuploadtarget=<n>", strprintf("Tries to keep outbound traffic under the given target per 24h. Limit does not apply to peers with 'download' permission or blocks created within past week. 0 = no limit (default: %s). Optional suffix units [k|K|m|M|g|G|t|T] (default: M). Lowercase is 1000 base while uppercase is 1024 base", DEFAULT_MAX_UPLOAD_TARGET), ArgsManager::ALLOW_ANY, OptionsCategory::CONNECTION);
argsman.AddArg("-onion=<ip:port>", "Use separate SOCKS5 proxy to reach peers via Tor onion services, set -noonion to disable (default: -proxy)", ArgsManager::ALLOW_ANY, OptionsCategory::CONNECTION);
argsman.AddArg("-i2psam=<ip:port>", "I2P SAM proxy to reach I2P peers and accept I2P connections (default: none)", ArgsManager::ALLOW_ANY, OptionsCategory::CONNECTION);
Expand Down
6 changes: 2 additions & 4 deletions src/llmq/signing.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -520,10 +520,8 @@ void CSigningManager::CollectPendingRecoveredSigsToVerify(
void CSigningManager::ProcessPendingReconstructedRecoveredSigs()
{
decltype(pendingReconstructedRecoveredSigs) m;
{
LOCK(cs_pending);
m = std::move(pendingReconstructedRecoveredSigs);
}
WITH_LOCK(cs_pending, swap(m, pendingReconstructedRecoveredSigs));

for (const auto& p : m) {
ProcessRecoveredSig(p.second);
}
Expand Down
5 changes: 1 addition & 4 deletions src/llmq/signing_shares.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1490,10 +1490,7 @@ void CSigSharesManager::AsyncSign(const CQuorumCPtr& quorum, const uint256& id,
void CSigSharesManager::SignPendingSigShares()
{
std::vector<PendingSignatureData> v;
{
LOCK(cs_pendingSigns);
v = std::move(pendingSigns);
}
WITH_LOCK(cs_pendingSigns, v.swap(pendingSigns));

for (const auto& [pQuorum, id, msgHash] : v) {
auto opt_sigShare = CreateSigShare(pQuorum, id, msgHash);
Expand Down
5 changes: 2 additions & 3 deletions src/masternode/meta.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -122,9 +122,8 @@ void CMasternodeMetaMan::RemoveGovernanceObject(const uint256& nGovernanceObject

std::vector<uint256> CMasternodeMetaMan::GetAndClearDirtyGovernanceObjectHashes()
{
LOCK(cs);
std::vector<uint256> vecTmp = std::move(vecDirtyGovernanceObjectHashes);
vecDirtyGovernanceObjectHashes.clear();
std::vector<uint256> vecTmp;
WITH_LOCK(cs, vecTmp.swap(vecDirtyGovernanceObjectHashes));
return vecTmp;
}

Expand Down
6 changes: 5 additions & 1 deletion src/net_processing.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -3689,7 +3689,11 @@ void PeerManagerImpl::ProcessMessage(

int64_t nTimeOffset = nTime - GetTime();
pfrom.nTimeOffset = nTimeOffset;
AddTimeData(pfrom.addr, nTimeOffset);
if (!pfrom.IsInboundConn()) {
// Don't use timedata samples from inbound peers to make it
// harder for others to tamper with our adjusted time.
AddTimeData(pfrom.addr, nTimeOffset);
}

// Feeler connections exist only to verify if address is online.
if (pfrom.IsFeelerConn()) {
Expand Down
2 changes: 1 addition & 1 deletion src/qt/forms/optionsdialog.ui
Original file line number Diff line number Diff line change
Expand Up @@ -532,7 +532,7 @@
<number>2</number>
</property>
<property name="maximum">
<number>2000</number>
<number>21000000</number>
</property>
<property name="singleStep">
<number>10</number>
Expand Down
6 changes: 3 additions & 3 deletions src/test/coins_tests.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -334,7 +334,7 @@ BOOST_AUTO_TEST_CASE(updatecoins_simulation_test)
tx.vout.resize(1);
tx.vout[0].nValue = i; //Keep txs unique unless intended to duplicate
tx.vout[0].scriptPubKey.assign(InsecureRand32() & 0x3F, 0); // Random sizes so we can test memory usage accounting
unsigned int height = InsecureRand32();
const int height{int(InsecureRand32() >> 1)};
Coin old_coin;

// 2/20 times create a new coinbase
Expand Down Expand Up @@ -403,11 +403,11 @@ BOOST_AUTO_TEST_CASE(updatecoins_simulation_test)
// Update the expected result to know about the new output coins
assert(tx.vout.size() == 1);
const COutPoint outpoint(tx.GetHash(), 0);
result[outpoint] = Coin(tx.vout[0], height, CTransaction(tx).IsCoinBase());
result[outpoint] = Coin(tx.vout[0], height, CTransaction{tx}.IsCoinBase());

// Call UpdateCoins on the top cache
CTxUndo undo;
UpdateCoins(CTransaction(tx), *(stack.back()), undo, height);
UpdateCoins(CTransaction{tx}, *(stack.back()), undo, height);

// Update the utxo set for future spends
utxoset.insert(outpoint);
Expand Down
2 changes: 1 addition & 1 deletion src/test/fuzz/coins_view.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,7 @@ FUZZ_TARGET_INIT(coins_view, initialize_coins_view)
return;
}
bool expected_code_path = false;
const int height = fuzzed_data_provider.ConsumeIntegral<int>();
const int height{int(fuzzed_data_provider.ConsumeIntegral<uint32_t>() >> 1)};
const bool possible_overwrite = fuzzed_data_provider.ConsumeBool();
try {
AddCoins(coins_view_cache, transaction, height, possible_overwrite);
Expand Down
37 changes: 16 additions & 21 deletions test/functional/feature_asset_locks.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,11 +49,11 @@

class AssetLocksTest(DashTestFramework):
def set_test_params(self):
self.set_dash_test_params(4, 2, [[
self.set_dash_test_params(2, 0, [[
"-whitelist=127.0.0.1",
"-llmqtestinstantsenddip0024=llmq_test_instantsend",
"-testactivationheight=mn_rr@1400",
]] * 4, evo_count=2)
]] * 2, evo_count=2)

def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
Expand Down Expand Up @@ -231,15 +231,14 @@ def generate_batch(self, count):
self.log.info(f"Generating batch of blocks {count} left")
batch = min(50, count)
count -= batch
self.bump_mocktime(batch)
self.bump_mocktime(10 * 60 + 1)
self.generate(self.nodes[1], batch)

# This functional test intentionally setup only 2 MN and only 2 Evo nodes
# to ensure that corner case of quorum with minimum amount of nodes as possible
# does not cause any issues in Dash Core
def mine_quorum_2_nodes(self, llmq_type_name, llmq_type):
self.mine_quorum(llmq_type_name=llmq_type_name, expected_members=2, expected_connections=1, expected_contributions=2, expected_commitments=2, llmq_type=llmq_type)

def mine_quorum_2_nodes(self):
self.mine_quorum(llmq_type_name='llmq_test_platform', expected_members=2, expected_connections=1, expected_contributions=2, expected_commitments=2, llmq_type=106)

def run_test(self):
node_wallet = self.nodes[0]
Expand All @@ -250,17 +249,9 @@ def run_test(self):
self.activate_v20(expected_activation_height=900)
self.log.info("Activated v20 at height:" + str(node.getblockcount()))

self.nodes[0].sporkupdate("SPORK_2_INSTANTSEND_ENABLED", 0)
self.wait_for_sporks_same()

self.mine_quorum_2_nodes(llmq_type_name='llmq_test_instantsend', llmq_type=104)

for _ in range(2):
self.dynamically_add_masternode(evo=True)
self.generate(node, 8, sync_fun=lambda: self.sync_blocks())

self.set_sporks()
self.generate(node, 1)
self.mempool_size = 0

key = ECKey()
Expand Down Expand Up @@ -328,7 +319,7 @@ def test_asset_locks(self, node_wallet, node, pubkey):
self.create_and_check_block([extra_lock_tx], expected_error = 'bad-cbtx-assetlocked-amount')

self.log.info("Mine a quorum...")
self.mine_quorum_2_nodes(llmq_type_name='llmq_test_platform', llmq_type=106)
self.mine_quorum_2_nodes()

self.validate_credit_pool_balance(locked_1)

Expand All @@ -350,6 +341,10 @@ def test_asset_unlocks(self, node_wallet, node, pubkey):
asset_unlock_tx_duplicate_index.vout[0].nValue += COIN
too_late_height = node.getblockcount() + HEIGHT_DIFF_EXPIRING

self.log.info("Mine block to empty mempool")
self.bump_mocktime(10 * 60 + 1)
self.generate(self.nodes[0], 1)

self.check_mempool_result(tx=asset_unlock_tx, result_expected={'allowed': True, 'fees': {'base': Decimal(str(tiny_amount / COIN))}})
self.check_mempool_result(tx=asset_unlock_tx_too_big_fee,
result_expected={'allowed': False, 'reject-reason' : 'max-fee-exceeded'})
Expand Down Expand Up @@ -417,7 +412,7 @@ def test_asset_unlocks(self, node_wallet, node, pubkey):
reason = "double copy")

self.log.info("Mining next quorum to check tx 'asset_unlock_tx_late' is still valid...")
self.mine_quorum_2_nodes(llmq_type_name='llmq_test_platform', llmq_type=106)
self.mine_quorum_2_nodes()
self.log.info("Checking credit pool amount is same...")
self.validate_credit_pool_balance(locked - 1 * COIN)
self.check_mempool_result(tx=asset_unlock_tx_late, result_expected={'allowed': True, 'fees': {'base': Decimal(str(tiny_amount / COIN))}})
Expand All @@ -435,7 +430,7 @@ def test_asset_unlocks(self, node_wallet, node, pubkey):
result_expected={'allowed': False, 'reject-reason' : 'bad-assetunlock-too-late'})

self.log.info("Checking that two quorums later it is too late because quorum is not active...")
self.mine_quorum_2_nodes(llmq_type_name='llmq_test_platform', llmq_type=106)
self.mine_quorum_2_nodes()
self.log.info("Expecting new reject-reason...")
assert not softfork_active(self.nodes[0], 'withdrawals')
self.check_mempool_result(tx=asset_unlock_tx_too_late,
Expand Down Expand Up @@ -513,7 +508,7 @@ def test_withdrawal_limits(self, node_wallet, node, pubkey):

self.log.info("Fast forward to the next day to reset all current unlock limits...")
self.generate_batch(blocks_in_one_day)
self.mine_quorum_2_nodes(llmq_type_name='llmq_test_platform', llmq_type=106)
self.mine_quorum_2_nodes()

total = self.get_credit_pool_balance()
coins = node_wallet.listunspent()
Expand Down Expand Up @@ -669,12 +664,12 @@ def test_withdrawal_fork(self, node_wallet, node, pubkey):

while quorumHash_str != node_wallet.quorum('list')['llmq_test_platform'][-1]:
self.log.info("Generate one more quorum until signing quorum becomes the last one in the list")
self.mine_quorum_2_nodes(llmq_type_name="llmq_test_platform", llmq_type=106)
self.mine_quorum_2_nodes()
self.check_mempool_result(tx=asset_unlock_tx, result_expected={'allowed': True, 'fees': {'base': Decimal(str(tiny_amount / COIN))}})

self.log.info("Generate one more quorum after which signing quorum is gone but Asset Unlock tx is still valid")
assert quorumHash_str in node_wallet.quorum('list')['llmq_test_platform']
self.mine_quorum_2_nodes(llmq_type_name="llmq_test_platform", llmq_type=106)
self.mine_quorum_2_nodes()
assert quorumHash_str not in node_wallet.quorum('list')['llmq_test_platform']

if asset_unlock_tx_payload.requestedHeight + HEIGHT_DIFF_EXPIRING > node_wallet.getblockcount():
Expand All @@ -686,7 +681,7 @@ def test_withdrawal_fork(self, node_wallet, node, pubkey):
index += 1

self.log.info("Generate one more quorum after which signing quorum becomes too old")
self.mine_quorum_2_nodes(llmq_type_name="llmq_test_platform", llmq_type=106)
self.mine_quorum_2_nodes()
self.check_mempool_result(tx=asset_unlock_tx, result_expected={'allowed': False, 'reject-reason': 'bad-assetunlock-too-old-quorum'})

asset_unlock_tx = self.create_assetunlock(520, 2000 * COIN + 1, pubkey)
Expand Down
15 changes: 2 additions & 13 deletions test/functional/feature_dip3_v19.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,18 +75,8 @@ def run_test(self):
self.log.info("pubkeyoperator should still be shown using legacy scheme")
assert_equal(pubkeyoperator_list_before, pubkeyoperator_list_after)

self.move_to_next_cycle()
self.log.info("Cycle H height:" + str(self.nodes[0].getblockcount()))
self.move_to_next_cycle()
self.log.info("Cycle H+C height:" + str(self.nodes[0].getblockcount()))
self.move_to_next_cycle()
self.log.info("Cycle H+2C height:" + str(self.nodes[0].getblockcount()))

self.mine_cycle_quorum(llmq_type_name='llmq_test_dip0024', llmq_type=103)

evo_info_0 = self.dynamically_add_masternode(evo=True, rnd=7)
assert evo_info_0 is not None
self.generate(self.nodes[0], 8, sync_fun=lambda: self.sync_blocks())

self.log.info("Checking that protxs with duplicate EvoNodes fields are rejected")
evo_info_1 = self.dynamically_add_masternode(evo=True, rnd=7, should_be_rejected=True)
Expand All @@ -96,7 +86,6 @@ def run_test(self):
assert evo_info_2 is None
evo_info_3 = self.dynamically_add_masternode(evo=True, rnd=9)
assert evo_info_3 is not None
self.generate(self.nodes[0], 8, sync_fun=lambda: self.sync_blocks())
self.dynamically_evo_update_service(evo_info_0, 9, should_be_rejected=True)

revoke_protx = self.mninfo[-1].proTxHash
Expand All @@ -123,12 +112,12 @@ def run_test(self):
def test_revoke_protx(self, node_idx, revoke_protx, revoke_keyoperator):
funds_address = self.nodes[0].getnewaddress()
fund_txid = self.nodes[0].sendtoaddress(funds_address, 1)
self.wait_for_instantlock(fund_txid, self.nodes[0])
self.bump_mocktime(10 * 60 + 1) # to make tx safe to include in block
tip = self.generate(self.nodes[0], 1)[0]
assert_equal(self.nodes[0].getrawtransaction(fund_txid, 1, tip)['confirmations'], 1)

protx_result = self.nodes[0].protx('revoke', revoke_protx, revoke_keyoperator, 1, funds_address)
self.wait_for_instantlock(protx_result, self.nodes[0])
self.bump_mocktime(10 * 60 + 1) # to make tx safe to include in block
tip = self.generate(self.nodes[0], 1, sync_fun=self.no_op)[0]
assert_equal(self.nodes[0].getrawtransaction(protx_result, 1, tip)['confirmations'], 1)
# Revoking a MN results in disconnects. Wait for disconnects to actually happen
Expand Down
9 changes: 0 additions & 9 deletions test/functional/feature_llmq_evo.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,15 +76,6 @@ def run_test(self):
self.nodes[0].sporkupdate("SPORK_2_INSTANTSEND_ENABLED", 0)
self.wait_for_sporks_same()

self.move_to_next_cycle()
self.log.info("Cycle H height:" + str(self.nodes[0].getblockcount()))
self.move_to_next_cycle()
self.log.info("Cycle H+C height:" + str(self.nodes[0].getblockcount()))
self.move_to_next_cycle()
self.log.info("Cycle H+2C height:" + str(self.nodes[0].getblockcount()))

self.mine_cycle_quorum(llmq_type_name='llmq_test_dip0024', llmq_type=103)

evo_protxhash_list = list()
for i in range(self.evo_count):
evo_info = self.dynamically_add_masternode(evo=True)
Expand Down
2 changes: 2 additions & 0 deletions test/functional/test_framework/messages.py
Original file line number Diff line number Diff line change
Expand Up @@ -640,6 +640,8 @@ def serialize(self):
# Calculate the merkle root given a vector of transaction hashes
@staticmethod
def get_merkle_root(hashes):
if len(hashes) == 0:
return 0
while len(hashes) > 1:
newhashes = []
for i in range(0, len(hashes), 2):
Expand Down
8 changes: 4 additions & 4 deletions test/functional/test_framework/test_framework.py
Original file line number Diff line number Diff line change
Expand Up @@ -1313,7 +1313,7 @@ def dynamically_prepare_masternode(self, idx, node_p2p_port, evo=False, rnd=None
collateral_amount = EVONODE_COLLATERAL if evo else MASTERNODE_COLLATERAL
outputs = {collateral_address: collateral_amount, funds_address: 1}
collateral_txid = self.nodes[0].sendmany("", outputs)
self.wait_for_instantlock(collateral_txid, self.nodes[0])
self.bump_mocktime(10 * 60 + 1) # to make tx safe to include in block
tip = self.generate(self.nodes[0], 1)[0]

rawtx = self.nodes[0].getrawtransaction(collateral_txid, 1, tip)
Expand All @@ -1334,7 +1334,7 @@ def dynamically_prepare_masternode(self, idx, node_p2p_port, evo=False, rnd=None
else:
protx_result = self.nodes[0].protx("register", collateral_txid, collateral_vout, ipAndPort, owner_address, bls['public'], voting_address, operatorReward, reward_address, funds_address, True)

self.wait_for_instantlock(protx_result, self.nodes[0])
self.bump_mocktime(10 * 60 + 1) # to make tx safe to include in block
tip = self.generate(self.nodes[0], 1)[0]

assert_equal(self.nodes[0].getrawtransaction(protx_result, 1, tip)['confirmations'], 1)
Expand All @@ -1356,14 +1356,14 @@ def dynamically_evo_update_service(self, evo_info, rnd=None, should_be_rejected=
platform_http_port = '%d' % (r + 2)

fund_txid = self.nodes[0].sendtoaddress(funds_address, 1)
self.wait_for_instantlock(fund_txid, self.nodes[0])
self.bump_mocktime(10 * 60 + 1) # to make tx safe to include in block
tip = self.generate(self.nodes[0], 1)[0]
assert_equal(self.nodes[0].getrawtransaction(fund_txid, 1, tip)['confirmations'], 1)

protx_success = False
try:
protx_result = self.nodes[0].protx('update_service_evo', evo_info.proTxHash, evo_info.addr, evo_info.keyOperator, platform_node_id, platform_p2p_port, platform_http_port, operator_reward_address, funds_address)
self.wait_for_instantlock(protx_result, self.nodes[0])
self.bump_mocktime(10 * 60 + 1) # to make tx safe to include in block
tip = self.generate(self.nodes[0], 1)[0]
assert_equal(self.nodes[0].getrawtransaction(protx_result, 1, tip)['confirmations'], 1)
self.log.info("Updated EvoNode %s: platformNodeID=%s, platformP2PPort=%s, platformHTTPPort=%s" % (evo_info.proTxHash, platform_node_id, platform_p2p_port, platform_http_port))
Expand Down
1 change: 0 additions & 1 deletion test/sanitizer_suppressions/ubsan
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,6 @@ implicit-integer-sign-change:bech32.cpp
implicit-integer-sign-change:common/bloom.cpp
implicit-integer-sign-change:chain.cpp
implicit-integer-sign-change:chain.h
implicit-integer-sign-change:coins.h
implicit-integer-sign-change:compat/stdin.cpp
implicit-integer-sign-change:compressor.h
implicit-integer-sign-change:crc32c/
Expand Down

0 comments on commit 6452277

Please sign in to comment.