diff --git a/test/functional/feature_assumeutxo.py b/test/functional/feature_assumeutxo.py index 60dd751ff8..5c0bc6c705 100755 --- a/test/functional/feature_assumeutxo.py +++ b/test/functional/feature_assumeutxo.py @@ -45,9 +45,9 @@ MiniWallet, ) -START_HEIGHT = 199 -SNAPSHOT_BASE_HEIGHT = 299 -FINAL_HEIGHT = 399 +START_HEIGHT = 2099 +SNAPSHOT_BASE_HEIGHT = 2199 +FINAL_HEIGHT = 2299 COMPLETE_IDX = {'synced': True, 'best_block_height': FINAL_HEIGHT} @@ -96,7 +96,7 @@ def expected_error(log_msg="", rpc_details=""): f.write(valid_snapshot_contents[:32]) f.write((valid_num_coins + off).to_bytes(8, "little")) f.write(valid_snapshot_contents[32 + 8:]) - expected_error(log_msg=f"bad snapshot - coins left over after deserializing 298 coins" if off == -1 else f"bad snapshot format or truncated snapshot after deserializing 299 coins") + expected_error(log_msg=f"bad snapshot - coins left over after deserializing 2198 coins" if off == -1 else f"bad snapshot format or truncated snapshot after deserializing 2199 coins") self.log.info(" - snapshot file with alternated UTXO data") cases = [ @@ -111,7 +111,7 @@ def expected_error(log_msg="", rpc_details=""): f.write(valid_snapshot_contents[:(32 + 8 + offset)]) f.write(content) f.write(valid_snapshot_contents[(32 + 8 + offset + len(content)):]) - expected_error(log_msg=f"[snapshot] bad snapshot content hash: expected a4bf3407ccb2cc0145c49ebba8fa91199f8a3903daf0883875941497d2493c27, got {wrong_hash}") + expected_error(log_msg=f"[snapshot] bad snapshot content hash: expected a9e20f6c0c6531e44789f7a29df1939fa1c2e7d5c451b25c5201880628c57940, got {wrong_hash}") def test_invalid_chainstate_scenarios(self): self.log.info("Test different scenarios of invalid snapshot chainstate in datadir") @@ -197,8 +197,8 @@ def run_test(self): assert_equal( dump_output['txoutset_hash'], - "a4bf3407ccb2cc0145c49ebba8fa91199f8a3903daf0883875941497d2493c27") - assert_equal(dump_output["nchaintx"], 334) + "a9e20f6c0c6531e44789f7a29df1939fa1c2e7d5c451b25c5201880628c57940") + assert_equal(dump_output["nchaintx"], 2200) assert_equal(n0.getblockchaininfo()["blocks"], SNAPSHOT_BASE_HEIGHT) # Mine more blocks on top of the snapshot that n1 hasn't yet seen. This diff --git a/test/functional/feature_assumevalid.py b/test/functional/feature_assumevalid.py index 613d2eab14..6c59714203 100755 --- a/test/functional/feature_assumevalid.py +++ b/test/functional/feature_assumevalid.py @@ -29,6 +29,7 @@ block 200. node2 will reject block 102 since it's assumed valid, but it isn't buried by at least two weeks' work. """ +import time from test_framework.blocktools import ( COINBASE_MATURITY, @@ -52,6 +53,7 @@ from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal from test_framework.wallet_util import generate_keypair +import inspect class BaseNode(P2PInterface): @@ -85,6 +87,25 @@ def send_blocks_until_disconnected(self, p2p_conn): assert not p2p_conn.is_connected break + def assert_blockchain_height(self, node, height): + """Wait until the blockchain is no longer advancing and verify it's reached the expected height.""" + last_height = node.getblock(node.getbestblockhash())['height'] + timeout = 10 + while True: + if timeout < 0: + assert False, "blockchain too short after timeout: %d" % current_height + + time.sleep(0.25) + current_height = node.getblock(node.getbestblockhash())['height'] + if current_height > height: + assert False, "blockchain too long: %d" % current_height + elif current_height != last_height: + last_height = current_height + timeout = 10 # reset the timeout + elif current_height == height: + break + timeout = timeout - 0.25 + def run_test(self): # Build the blockchain self.tip = int(self.nodes[0].getbestblockhash(), 16) @@ -107,7 +128,7 @@ def run_test(self): height += 1 # Bury the block 100 deep so the coinbase output is spendable - for _ in range(100): + for _ in range(COINBASE_MATURITY): block = create_block(self.tip, create_coinbase(height), self.block_time) block.solve() self.blocks.append(block) @@ -130,7 +151,7 @@ def run_test(self): height += 1 # Bury the assumed valid block 2100 deep - for _ in range(2100): + for _ in range(10000): block = create_block(self.tip, create_coinbase(height), self.block_time) block.solve() self.blocks.append(block) @@ -139,36 +160,47 @@ def run_test(self): height += 1 # Start node1 and node2 with assumevalid so they accept a block with a bad signature. - self.start_node(1, extra_args=["-assumevalid=" + hex(block102.sha256)]) - self.start_node(2, extra_args=["-assumevalid=" + hex(block102.sha256)]) + self.start_node(1, extra_args=["-assumevalid=" + hex(block102.sha256)[2:]]) + self.start_node(2, extra_args=["-assumevalid=" + hex(block102.sha256)[2:]]) p2p0 = self.nodes[0].add_p2p_connection(BaseNode()) p2p0.send_header_for_blocks(self.blocks[0:2000]) - p2p0.send_header_for_blocks(self.blocks[2000:]) + p2p0.send_header_for_blocks(self.blocks[2000:4000]) + p2p0.send_header_for_blocks(self.blocks[4000:6000]) + p2p0.send_header_for_blocks(self.blocks[6000:8000]) + p2p0.send_header_for_blocks(self.blocks[8000:10000]) + p2p0.send_header_for_blocks(self.blocks[10000:]) # Send blocks to node0. Block 102 will be rejected. self.send_blocks_until_disconnected(p2p0) - self.wait_until(lambda: self.nodes[0].getblockcount() >= COINBASE_MATURITY + 1) - assert_equal(self.nodes[0].getblockcount(), COINBASE_MATURITY + 1) + self.assert_blockchain_height(self.nodes[0], COINBASE_MATURITY+1) p2p1 = self.nodes[1].add_p2p_connection(BaseNode()) p2p1.send_header_for_blocks(self.blocks[0:2000]) - p2p1.send_header_for_blocks(self.blocks[2000:]) + p2p1.send_header_for_blocks(self.blocks[2000:4000]) + p2p1.send_header_for_blocks(self.blocks[4000:6000]) + p2p1.send_header_for_blocks(self.blocks[6000:8000]) + p2p1.send_header_for_blocks(self.blocks[8000:10000]) + p2p1.send_header_for_blocks(self.blocks[10000:]) # Send all blocks to node1. All blocks will be accepted. - for i in range(2202): + # Send only a subset to speed this up + p2p1 = self.nodes[1].add_p2p_connection(BaseNode()) + for i in range(1000): p2p1.send_message(msg_block(self.blocks[i])) # Syncing 2200 blocks can take a while on slow systems. Give it plenty of time to sync. - p2p1.sync_with_ping(960) - assert_equal(self.nodes[1].getblock(self.nodes[1].getbestblockhash())['height'], 2202) + timeout = time.time() + 200 + while time.time() < timeout: + if self.nodes[1].getblock(self.nodes[1].getbestblockhash())['height'] == 1000: + break + assert_equal(self.nodes[1].getblock(self.nodes[1].getbestblockhash())['height'], 1000) p2p2 = self.nodes[2].add_p2p_connection(BaseNode()) p2p2.send_header_for_blocks(self.blocks[0:200]) # Send blocks to node2. Block 102 will be rejected. self.send_blocks_until_disconnected(p2p2) - self.wait_until(lambda: self.nodes[2].getblockcount() >= COINBASE_MATURITY + 1) - assert_equal(self.nodes[2].getblockcount(), COINBASE_MATURITY + 1) + self.assert_blockchain_height(self.nodes[2], COINBASE_MATURITY+1) if __name__ == '__main__': diff --git a/test/functional/feature_bip68_sequence.py b/test/functional/feature_bip68_sequence.py index 8768d4040d..948c80643d 100755 --- a/test/functional/feature_bip68_sequence.py +++ b/test/functional/feature_bip68_sequence.py @@ -5,8 +5,10 @@ """Test BIP68 implementation.""" import time +import random from test_framework.blocktools import ( + COINBASE_MATURITY, NORMAL_GBT_REQUEST_PARAMS, add_witness_commitment, create_block, @@ -72,11 +74,11 @@ def run_test(self): self.log.info("Running test sequence-lock-unconfirmed-inputs") self.test_sequence_lock_unconfirmed_inputs() - self.log.info("Running test BIP68 not consensus before activation") - self.test_bip68_not_consensus() + #self.log.info("Running test BIP68 not consensus before activation") + #self.test_bip68_not_consensus() - self.log.info("Activating BIP68 (and 112/113)") - self.activateCSV() + #self.log.info("Activating BIP68 (and 112/113)") + #self.activateCSV() self.log.info("Verifying nVersion=2 transactions are standard.") self.log.info("Note that nVersion=2 transactions are always standard (independent of BIP68 activation status).") @@ -138,7 +140,7 @@ def test_sequence_lock_confirmed_inputs(self): import random num_outputs = random.randint(1, max_outputs) self.wallet.send_self_transfer_multi(from_node=self.nodes[0], num_outputs=num_outputs) - self.generate(self.wallet, 1) + self.generate(self.wallet, COINBASE_MATURITY + 1) utxos = self.wallet.get_utxos(include_immature_coinbase=False) diff --git a/test/functional/feature_block.py b/test/functional/feature_block.py index 8a95975184..e2f1be253c 100755 --- a/test/functional/feature_block.py +++ b/test/functional/feature_block.py @@ -56,6 +56,7 @@ from test_framework.wallet_util import generate_keypair from data import invalid_txs +from test_framework.qtumconfig import * # Use this class for tests that require behavior other than normal p2p behavior. # For now, it is used to serialize a bloated varint (b64). @@ -104,13 +105,13 @@ def run_test(self): self.block_heights[self.genesis_hash] = 0 self.spendable_outputs = [] - # Create a new block - b_dup_cb = self.next_block('dup_cb') - b_dup_cb.vtx[0].vin[0].scriptSig = DUPLICATE_COINBASE_SCRIPT_SIG - b_dup_cb.vtx[0].rehash() - duplicate_tx = b_dup_cb.vtx[0] - b_dup_cb = self.update_block('dup_cb', []) - self.send_blocks([b_dup_cb]) + # Create a new block -> QTUM: not possible since the coinbase script must strictly conform to bip34 from genesis + #b_dup_cb = self.next_block('dup_cb') + #b_dup_cb.vtx[0].vin[0].scriptSig = DUPLICATE_COINBASE_SCRIPT_SIG + #b_dup_cb.vtx[0].rehash() + #duplicate_tx = b_dup_cb.vtx[0] + #b_dup_cb = self.update_block('dup_cb', []) + #self.send_blocks([b_dup_cb]) b0 = self.next_block(0) self.save_spendable_output() @@ -118,15 +119,18 @@ def run_test(self): # These constants chosen specifically to trigger an immature coinbase spend # at a certain time below. - NUM_BUFFER_BLOCKS_TO_GENERATE = 99 + # NUM_BUFFER_BLOCKS_TO_GENERATE = 98-15+COINBASE_MATURITY + NUM_BUFFER_BLOCKS_TO_GENERATE = COINBASE_MATURITY-1 NUM_OUTPUTS_TO_COLLECT = 33 # Allow the block to mature blocks = [] for i in range(NUM_BUFFER_BLOCKS_TO_GENERATE): blocks.append(self.next_block(f"maturitybuffer.{i}")) - self.save_spendable_output() - self.send_blocks(blocks) + self.save_spendable_output() + for i in range(0, len(blocks), 100): + self.send_blocks(blocks[i:i+100]) + self.send_blocks(blocks[i:]) # collect spendable outputs now to avoid cluttering the code later on out = [] @@ -222,7 +226,7 @@ def run_test(self): self.log.info("Reject a block where the miner creates too much coinbase reward") self.move_tip(6) b9 = self.next_block(9, spend=out[4], additional_coinbase_value=1) - self.send_blocks([b9], success=False, reject_reason='bad-cb-amount', reconnect=True) + self.send_blocks([b9], success=False, reject_reason='Reward check failed', reconnect=True) # Create a fork that ends in a block with too much fee (the one that causes the reorg) # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) @@ -234,7 +238,7 @@ def run_test(self): self.send_blocks([b10], False) b11 = self.next_block(11, spend=out[4], additional_coinbase_value=1) - self.send_blocks([b11], success=False, reject_reason='bad-cb-amount', reconnect=True) + self.send_blocks([b11], success=False, reject_reason='Reward check failed', reconnect=True) # Try again, but with a valid fork first # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) @@ -247,7 +251,7 @@ def run_test(self): b13 = self.next_block(13, spend=out[4]) self.save_spendable_output() b14 = self.next_block(14, spend=out[5], additional_coinbase_value=1) - self.send_blocks([b12, b13, b14], success=False, reject_reason='bad-cb-amount', reconnect=True) + self.send_blocks([b12, b13, b14], success=False, reject_reason='Reward check failed', reconnect=True) # New tip should be b13. assert_equal(node.getbestblockhash(), b13.hash) @@ -332,7 +336,7 @@ def run_test(self): self.save_spendable_output() self.log.info("Reject a block of weight MAX_BLOCK_WEIGHT + 4") - self.move_tip(15) + self.move_tip(23) b24 = self.next_block(24, spend=out[6]) script_length = (MAX_BLOCK_WEIGHT - b24.get_weight() - 276) // 4 script_output = CScript([b'\x00' * (script_length + 1)]) @@ -340,6 +344,7 @@ def run_test(self): b24 = self.update_block(24, [tx]) assert_equal(b24.get_weight(), MAX_BLOCK_WEIGHT + 1 * 4) self.send_blocks([b24], success=False, reject_reason='bad-blk-length', reconnect=True) + self.move_tip(15) b25 = self.next_block(25, spend=out[7]) self.send_blocks([b25], False) @@ -379,7 +384,7 @@ def run_test(self): self.move_tip(23) b30 = self.next_block(30) b30.vtx[0].vin[0].scriptSig = bytes(b30.vtx[0].vin[0].scriptSig) # Convert CScript to raw bytes - b30.vtx[0].vin[0].scriptSig += b'\x00' * (100 - len(b30.vtx[0].vin[0].scriptSig)) # Fill with 0s + b30.vtx[0].vin[0].scriptSig += b'\x00' * 97 assert_equal(len(b30.vtx[0].vin[0].scriptSig), 100) b30.vtx[0].rehash() b30 = self.update_block(30, []) @@ -629,7 +634,7 @@ def run_test(self): self.tip = b46 assert 46 not in self.blocks self.blocks[46] = b46 - self.send_blocks([b46], success=False, reject_reason='bad-blk-length', reconnect=True) + self.send_blocks([b46], success=False, reject_reason='bad-cb-missing', reconnect=True) self.log.info("Reject a block with invalid work") self.move_tip(44) @@ -644,10 +649,10 @@ def run_test(self): self.log.info("Reject a block with a timestamp >2 hours in the future") self.move_tip(44) b48 = self.next_block(48) - b48.nTime = int(time.time()) + 60 * 60 * 3 + b48.nBits -= 1 # Header timestamp has changed. Re-solve the block. b48.solve() - self.send_blocks([b48], False, force_send=True, reject_reason='time-too-new') + self.send_blocks([b48], False, force_send=True, reconnect=True) self.log.info("Reject a block with invalid merkle hash") self.move_tip(44) @@ -689,9 +694,9 @@ def run_test(self): self.log.info("Reject a block with timestamp before MedianTimePast") b54 = self.next_block(54, spend=out[15]) - b54.nTime = b35.nTime - 1 + b54.nBits -= 1 b54.solve() - self.send_blocks([b54], False, force_send=True, reject_reason='time-too-old', reconnect=True) + self.send_blocks([b54], False, force_send=True, reconnect=True) # valid timestamp self.move_tip(53) @@ -707,14 +712,14 @@ def run_test(self): # -> b31 (8) -> b33 (9) -> b35 (10) -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) # \-> b54 (15) # -> b44 (14)\-> b48 () -> b48p () - self.log.info("Accept a previously rejected future block at a later time") - node.setmocktime(int(time.time()) + 2*60*60) - self.move_tip(48) - self.block_heights[b48.sha256] = self.block_heights[b44.sha256] + 1 # b48 is a parent of b44 - b48p = self.next_block("48p") - self.send_blocks([b48, b48p], success=True) # Reorg to the longer chain - node.invalidateblock(b48p.hash) # mark b48p as invalid - node.setmocktime(0) + # self.log.info("Accept a previously rejected future block at a later time") + # node.setmocktime(int(time.time()) + 2*60*60) + # self.move_tip(48) + # self.block_heights[b48.sha256] = self.block_heights[b44.sha256] + 1 # b48 is a parent of b44 + # b48p = self.next_block("48p") + # self.send_blocks([b48, b48p], success=True) # Reorg to the longer chain + # node.invalidateblock(b48p.hash) # mark b48p as invalid + # node.setmocktime(0) # Test Merkle tree malleability # @@ -800,16 +805,16 @@ def run_test(self): tx = CTransaction() assert len(out[17].vout) < 42 tx.vin.append(CTxIn(COutPoint(out[17].sha256, 42), CScript([OP_TRUE]), SEQUENCE_FINAL)) - tx.vout.append(CTxOut(0, b"")) + tx.vout.append(CTxOut(0, CScript([OP_TRUE]))) tx.calc_sha256() b58 = self.update_block(58, [tx]) self.send_blocks([b58], success=False, reject_reason='bad-txns-inputs-missingorspent', reconnect=True) # tx with output value > input value self.log.info("Reject a block with a transaction with outputs > inputs") - self.move_tip(57) + self.move_tip("57p2") self.next_block(59) - tx = self.create_and_sign_transaction(out[17], 51 * COIN) + tx = self.create_and_sign_transaction(out[17], int(INITIAL_BLOCK_REWARD+1) * COIN) b59 = self.update_block(59, [tx]) self.send_blocks([b59], success=False, reject_reason='bad-txns-in-belowout', reconnect=True) @@ -828,47 +833,50 @@ def run_test(self): # not-fully-spent transaction in the same chain. To test, make identical coinbases; # the second one should be rejected. See also CVE-2012-1909. # - self.log.info("Reject a block with a transaction with a duplicate hash of a previous transaction (BIP30)") - self.move_tip(60) - b61 = self.next_block(61) - b61.vtx[0].vin[0].scriptSig = DUPLICATE_COINBASE_SCRIPT_SIG - b61.vtx[0].rehash() - b61 = self.update_block(61, []) - assert_equal(duplicate_tx.serialize(), b61.vtx[0].serialize()) - # BIP30 is always checked on regtest, regardless of the BIP34 activation height - self.send_blocks([b61], success=False, reject_reason='bad-txns-BIP30', reconnect=True) + #self.log.info("Reject a block with a transaction with a duplicate hash of a previous transaction (BIP30)") + #self.move_tip(60) + # QTUM: Since we enable BIP34 from block 0, this BIP30 test is no longer relevant. This test has therefore been removed. + # QTUM: Since we enable BIP34 from block 0, this BIP30 test is no longer relevant. This test has therefore been removed. + #self.log.info("Reject a block with a transaction with a duplicate hash of a previous transaction (BIP30)") + #self.move_tip(60) + #b61 = self.next_block(61, spend=out[18]) + #b61.vtx[0].vin[0].scriptSig = b60.vtx[0].vin[0].scriptSig # Equalize the coinbases + #b61.vtx[0].rehash() + #b61 = self.update_block(61, []) + #assert_equal(b60.vtx[0].serialize(), b61.vtx[0].serialize()) + #self.send_blocks([b61], success=False, reject_reason='bad-txns-BIP30', reconnect=True) # Test BIP30 (allow duplicate if spent) # # -> b57 (16) -> b60 () # \-> b_spend_dup_cb (b_dup_cb) -> b_dup_2 () # - self.move_tip(57) - self.next_block('spend_dup_cb') - tx = CTransaction() - tx.vin.append(CTxIn(COutPoint(duplicate_tx.sha256, 0))) - tx.vout.append(CTxOut(0, CScript([OP_TRUE]))) - self.sign_tx(tx, duplicate_tx) - tx.rehash() - b_spend_dup_cb = self.update_block('spend_dup_cb', [tx]) - - b_dup_2 = self.next_block('dup_2') - b_dup_2.vtx[0].vin[0].scriptSig = DUPLICATE_COINBASE_SCRIPT_SIG - b_dup_2.vtx[0].rehash() - b_dup_2 = self.update_block('dup_2', []) - assert_equal(duplicate_tx.serialize(), b_dup_2.vtx[0].serialize()) - assert_equal(self.nodes[0].gettxout(txid=duplicate_tx.hash, n=0)['confirmations'], 119) - self.send_blocks([b_spend_dup_cb, b_dup_2], success=True) - # The duplicate has less confirmations - assert_equal(self.nodes[0].gettxout(txid=duplicate_tx.hash, n=0)['confirmations'], 1) + # self.move_tip(57) + # b_spend_dup_cb = self.next_block('spend_dup_cb') + # tx = CTransaction() + # tx.vin.append(CTxIn(COutPoint(duplicate_tx.sha256, 0))) + # tx.vout.append(CTxOut(0, CScript([OP_TRUE]))) + # self.sign_tx(tx, duplicate_tx) + # tx.rehash() + # b_spend_dup_cb = self.update_block('spend_dup_cb', [tx]) + # + # b_dup_2 = self.next_block('dup_2') + # b_dup_2.vtx[0].vin[0].scriptSig = DUPLICATE_COINBASE_SCRIPT_SIG + # b_dup_2.vtx[0].rehash() + # b_dup_2 = self.update_block('dup_2', []) + # assert_equal(duplicate_tx.serialize(), b_dup_2.vtx[0].serialize()) + # assert_equal(self.nodes[0].gettxout(txid=duplicate_tx.hash, n=0)['confirmations'], 119) + # self.send_blocks([b_spend_dup_cb, b_dup_2], success=True) + # # The duplicate has less confirmations + # assert_equal(self.nodes[0].gettxout(txid=duplicate_tx.hash, n=0)['confirmations'], 1) # Test tx.isFinal is properly rejected (not an exhaustive tx.isFinal test, that should be in data-driven transaction tests) # - # -> b_spend_dup_cb (b_dup_cb) -> b_dup_2 () + # -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) # \-> b62 (18) # self.log.info("Reject a block with a transaction with a nonfinal locktime") - self.move_tip('dup_2') + self.move_tip(60) self.next_block(62) tx = CTransaction() tx.nLockTime = 0xffffffff # this locktime is non-final @@ -885,7 +893,7 @@ def run_test(self): # \-> b63 (-) # self.log.info("Reject a block with a coinbase transaction with a nonfinal locktime") - self.move_tip('dup_2') + self.move_tip(60) b63 = self.next_block(63) b63.vtx[0].nLockTime = 0xffffffff b63.vtx[0].vin[0].nSequence = 0xDEADBEEF @@ -894,7 +902,7 @@ def run_test(self): self.send_blocks([b63], success=False, reject_reason='bad-txns-nonfinal', reconnect=True) # This checks that a block with a bloated VARINT between the block_header and the array of tx such that - # the block is > MAX_BLOCK_WEIGHT with the bloated varint, but <= MAX_BLOCK_WEIGHT without the bloated varint, + # the block is > MAX_BLOCK_BASE_SIZE with the bloated varint, but <= MAX_BLOCK_BASE_SIZE without the bloated varint, # does not cause a subsequent, identical block with canonical encoding to be rejected. The test does not # care whether the bloated block is accepted or rejected; it only cares that the second block is accepted. # @@ -908,7 +916,7 @@ def run_test(self): # b64 is a good block (same as b64 but w/ canonical varint) # self.log.info("Accept a valid block even if a bloated version of the block has previously been sent") - self.move_tip('dup_2') + self.move_tip(60) regular_block = self.next_block("64a", spend=out[18]) # make it a "broken_block," with non-canonical serialization @@ -934,7 +942,7 @@ def run_test(self): node.disconnect_p2ps() self.reconnect_p2p() - self.move_tip('dup_2') + self.move_tip(60) b64 = CBlock(b64a) b64.vtx = copy.deepcopy(b64a.vtx) assert_equal(b64.hash, b64a.hash) @@ -1001,7 +1009,7 @@ def run_test(self): self.next_block(68, additional_coinbase_value=10) tx = self.create_and_sign_transaction(out[20], out[20].vout[0].nValue - 9) b68 = self.update_block(68, [tx]) - self.send_blocks([b68], success=False, reject_reason='bad-cb-amount', reconnect=True) + self.send_blocks([b68], success=False, reject_reason='block-reward-invalid', reconnect=True) self.log.info("Accept a block claiming the correct subsidy in the coinbase transaction") self.move_tip(65) @@ -1270,7 +1278,7 @@ def run_test(self): self.log.info("Test a re-org of one week's worth of blocks (1088 blocks)") self.move_tip(88) - LARGE_REORG_SIZE = 1088 + LARGE_REORG_SIZE = 200 blocks = [] spend = out[32] for i in range(89, LARGE_REORG_SIZE + 89): @@ -1286,7 +1294,7 @@ def run_test(self): self.save_spendable_output() spend = self.get_spendable_output() - self.send_blocks(blocks, True, timeout=2440) + self.send_blocks(blocks, True, timeout=480) chain1_tip = i # now create alt chain of same length @@ -1294,18 +1302,18 @@ def run_test(self): blocks2 = [] for i in range(89, LARGE_REORG_SIZE + 89): blocks2.append(self.next_block("alt" + str(i))) - self.send_blocks(blocks2, False, force_send=False) + self.send_blocks(blocks2, False, force_send=True) # extend alt chain to trigger re-org block = self.next_block("alt" + str(chain1_tip + 1)) - self.send_blocks([block], True, timeout=2440) + self.send_blocks([block], True, timeout=480) # ... and re-org back to the first chain self.move_tip(chain1_tip) block = self.next_block(chain1_tip + 1) self.send_blocks([block], False, force_send=True) block = self.next_block(chain1_tip + 2) - self.send_blocks([block], True, timeout=2440) + self.send_blocks([block], True, timeout=480) self.log.info("Reject a block with an invalid block header version") b_v1 = self.next_block('b_v1', version=1) @@ -1317,7 +1325,7 @@ def run_test(self): b_cb34.vtx[0].rehash() b_cb34.hashMerkleRoot = b_cb34.calc_merkle_root() b_cb34.solve() - self.send_blocks([b_cb34], success=False, reject_reason='bad-cb-height', reconnect=True) + self.send_blocks([b_cb34], success=False, reject_reason='block height mismatch in coinbase', force_send=True, reconnect=True) # Helper methods ################ diff --git a/test/functional/feature_coinstatsindex.py b/test/functional/feature_coinstatsindex.py index d6c1567e64..abfc06da9f 100755 --- a/test/functional/feature_coinstatsindex.py +++ b/test/functional/feature_coinstatsindex.py @@ -55,7 +55,7 @@ def run_test(self): self._test_init_index_after_reorg() def block_sanity_check(self, block_info): - block_subsidy = 50 + block_subsidy = 20000 assert_equal( block_info['prevout_spent'] + block_subsidy, block_info['new_outputs_ex_coinbase'] + block_info['coinbase'] + block_info['unspendable'] @@ -97,7 +97,7 @@ def _test_coin_stats_index(self): for hash_option in index_hash_options: # Fetch old stats by height - res2 = index_node.gettxoutsetinfo(hash_option, 102) + res2 = index_node.gettxoutsetinfo(hash_option, 2002) del res2['block_info'], res2['total_unspendable_amount'] res2.pop('muhash', None) assert_equal(res0, res2) @@ -116,14 +116,14 @@ def _test_coin_stats_index(self): for hash_option in index_hash_options: # Genesis block is unspendable res4 = index_node.gettxoutsetinfo(hash_option, 0) - assert_equal(res4['total_unspendable_amount'], 50) + assert_equal(res4['total_unspendable_amount'], 20000) assert_equal(res4['block_info'], { - 'unspendable': 50, + 'unspendable': 20000, 'prevout_spent': 0, 'new_outputs_ex_coinbase': 0, 'coinbase': 0, 'unspendables': { - 'genesis_block': 50, + 'genesis_block': 20000, 'bip30': 0, 'scripts': 0, 'unclaimed_rewards': 0 @@ -132,18 +132,18 @@ def _test_coin_stats_index(self): self.block_sanity_check(res4['block_info']) # Test an older block height that included a normal tx - res5 = index_node.gettxoutsetinfo(hash_option, 102) - assert_equal(res5['total_unspendable_amount'], 50) + res5 = index_node.gettxoutsetinfo(hash_option, 2002) + assert_equal(res5['total_unspendable_amount'], 20000) assert_equal(res5['block_info'], { - 'unspendable': 0, - 'prevout_spent': 50, - 'new_outputs_ex_coinbase': Decimal('49.99968800'), - 'coinbase': Decimal('50.00031200'), + 'unspendable': Decimal('0E-8'), + 'prevout_spent': Decimal('20000.00000000'), + 'new_outputs_ex_coinbase': Decimal('19999.99688000'), + 'coinbase': Decimal('20000.00312000'), 'unspendables': { - 'genesis_block': 0, - 'bip30': 0, - 'scripts': 0, - 'unclaimed_rewards': 0, + 'genesis_block': Decimal('0E-8'), + 'bip30': Decimal('0E-8'), + 'scripts': Decimal('0E-8'), + 'unclaimed_rewards': Decimal('0E-8'), } }) self.block_sanity_check(res5['block_info']) @@ -170,26 +170,26 @@ def _test_coin_stats_index(self): for hash_option in index_hash_options: # Check all amounts were registered correctly - res6 = index_node.gettxoutsetinfo(hash_option, 108) - assert_equal(res6['total_unspendable_amount'], Decimal('70.99000000')) + res6 = index_node.gettxoutsetinfo(hash_option, 2008) + assert_equal(res6['total_unspendable_amount'], Decimal('20020.99000000')) assert_equal(res6['block_info'], { 'unspendable': Decimal('20.99000000'), - 'prevout_spent': 71, - 'new_outputs_ex_coinbase': Decimal('49.99999000'), - 'coinbase': Decimal('50.01001000'), + 'prevout_spent': Decimal('20021.00000000'), + 'new_outputs_ex_coinbase': Decimal('19999.99920000'), + 'coinbase': Decimal('20000.01080000'), 'unspendables': { - 'genesis_block': 0, - 'bip30': 0, + 'genesis_block': Decimal('0E-8'), + 'bip30': Decimal('0E-8'), 'scripts': Decimal('20.99000000'), - 'unclaimed_rewards': 0, + 'unclaimed_rewards': Decimal('0E-8'), } }) self.block_sanity_check(res6['block_info']) # Create a coinbase that does not claim full subsidy and also # has two outputs - cb = create_coinbase(109, nValue=35) - cb.vout.append(CTxOut(5 * COIN, CScript([OP_FALSE]))) + cb = create_coinbase(2009, nValue=35) + cb.vout.append(CTxOut(5000 * COIN, CScript([OP_FALSE]))) cb.rehash() # Generate a block that includes previous coinbase @@ -201,18 +201,18 @@ def _test_coin_stats_index(self): self.sync_all() for hash_option in index_hash_options: - res7 = index_node.gettxoutsetinfo(hash_option, 109) - assert_equal(res7['total_unspendable_amount'], Decimal('80.99000000')) + res7 = index_node.gettxoutsetinfo(hash_option, 2009) + assert_equal(res7['total_unspendable_amount'], Decimal('35020.98999965')) assert_equal(res7['block_info'], { - 'unspendable': 10, + 'unspendable': Decimal('14999.99999965'), 'prevout_spent': 0, 'new_outputs_ex_coinbase': 0, - 'coinbase': 40, + 'coinbase': Decimal('5000.00000035'), 'unspendables': { 'genesis_block': 0, 'bip30': 0, 'scripts': 0, - 'unclaimed_rewards': 10 + 'unclaimed_rewards': Decimal('14999.99999965') } }) self.block_sanity_check(res7['block_info']) @@ -246,7 +246,7 @@ def _test_use_index_option(self): self.log.info("Test use_index option for nodes running the index") self.connect_nodes(0, 1) - self.nodes[0].waitforblockheight(110) + self.nodes[0].waitforblockheight(2010) res = self.nodes[0].gettxoutsetinfo('muhash') option_res = self.nodes[1].gettxoutsetinfo(hash_type='muhash', hash_or_height=None, use_index=False) del res['disk_size'], option_res['disk_size'] @@ -262,14 +262,14 @@ def _test_reorg_index(self): self.sync_index_node() res_invalid = index_node.gettxoutsetinfo('muhash') index_node.invalidateblock(reorg_blocks[0]) - assert_equal(index_node.gettxoutsetinfo('muhash')['height'], 110) + assert_equal(index_node.gettxoutsetinfo('muhash')['height'], 2010) # Add two new blocks block = self.generate(index_node, 2, sync_fun=self.no_op)[1] res = index_node.gettxoutsetinfo(hash_type='muhash', hash_or_height=None, use_index=False) # Test that the result of the reorged block is not returned for its old block height - res2 = index_node.gettxoutsetinfo(hash_type='muhash', hash_or_height=112) + res2 = index_node.gettxoutsetinfo(hash_type='muhash', hash_or_height=2012) assert_equal(res["bestblock"], block) assert_equal(res["muhash"], res2["muhash"]) assert res["muhash"] != res_invalid["muhash"] @@ -284,20 +284,20 @@ def _test_reorg_index(self): self.generate(index_node, 1) # Ensure that removing and re-adding blocks yields consistent results - block = index_node.getblockhash(99) + block = index_node.getblockhash(1999) index_node.invalidateblock(block) index_node.reconsiderblock(block) - res3 = index_node.gettxoutsetinfo(hash_type='muhash', hash_or_height=112) + res3 = index_node.gettxoutsetinfo(hash_type='muhash', hash_or_height=2012) assert_equal(res2, res3) def _test_index_rejects_hash_serialized(self): self.log.info("Test that the rpc raises if the legacy hash is passed with the index") msg = "hash_serialized_3 hash type cannot be queried for a specific block" - assert_raises_rpc_error(-8, msg, self.nodes[1].gettxoutsetinfo, hash_type='hash_serialized_3', hash_or_height=111) + assert_raises_rpc_error(-8, msg, self.nodes[1].gettxoutsetinfo, hash_type='hash_serialized_3', hash_or_height=2011) for use_index in {True, False, None}: - assert_raises_rpc_error(-8, msg, self.nodes[1].gettxoutsetinfo, hash_type='hash_serialized_3', hash_or_height=111, use_index=use_index) + assert_raises_rpc_error(-8, msg, self.nodes[1].gettxoutsetinfo, hash_type='hash_serialized_3', hash_or_height=2011, use_index=use_index) def _test_init_index_after_reorg(self): self.log.info("Test a reorg while the index is deactivated") diff --git a/test/functional/feature_config_args.py b/test/functional/feature_config_args.py index 9e13a3deef..1efdc309ca 100755 --- a/test/functional/feature_config_args.py +++ b/test/functional/feature_config_args.py @@ -40,7 +40,7 @@ def test_config_file_parser(self): expected_msg=conf_in_config_file_err, ) inc_conf_file_path = self.nodes[0].datadir_path / 'include.conf' - with open(self.nodes[0].datadir_path / 'bitcoin.conf', 'a', encoding='utf-8') as conf: + with open(self.nodes[0].datadir_path / 'qtum.conf', 'a', encoding='utf-8') as conf: conf.write(f'includeconf={inc_conf_file_path}\n') with open(inc_conf_file_path, 'w', encoding='utf-8') as conf: conf.write('conf=some.conf\n') @@ -98,7 +98,7 @@ def test_config_file_parser(self): self.nodes[0].assert_start_raises_init_error(expected_msg='Error: Error reading configuration file: parse error on line 4, using # in rpcpassword can be ambiguous and should be avoided') inc_conf_file2_path = self.nodes[0].datadir_path / 'include2.conf' - with open(self.nodes[0].datadir_path / 'bitcoin.conf', 'a', encoding='utf-8') as conf: + with open(self.nodes[0].datadir_path / 'qtum.conf', 'a', encoding='utf-8') as conf: conf.write(f'includeconf={inc_conf_file2_path}\n') with open(inc_conf_file_path, 'w', encoding='utf-8') as conf: @@ -402,7 +402,7 @@ def run_test(self): self.nodes[0].assert_start_raises_init_error([f'-datadir={new_data_dir}'], f'Error: Specified data directory "{new_data_dir}" does not exist.') # Check that using non-existent datadir in conf file fails - conf_file = default_data_dir / "bitcoin.conf" + conf_file = default_data_dir / "qtum.conf" # datadir needs to be set before [chain] section with open(conf_file, encoding='utf8') as f: diff --git a/test/functional/feature_dbcrash.py b/test/functional/feature_dbcrash.py index afd0246209..0213bf327e 100755 --- a/test/functional/feature_dbcrash.py +++ b/test/functional/feature_dbcrash.py @@ -47,7 +47,7 @@ class ChainstateWriteCrashTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 4 - self.rpc_timeout = 480 + self.rpc_timeout = 960 self.supports_cli = False # Set -maxmempool=0 to turn off mempool memory sharing with dbcache @@ -55,6 +55,7 @@ def set_test_params(self): "-limitdescendantsize=0", "-maxmempool=0", "-dbbatchsize=200000", + "-rpcservertimeout=1800", ] # Set different crash ratios and cache sizes. Note that not all of @@ -80,7 +81,7 @@ def restart_node(self, node_index, expected_tip): after 60 seconds. Returns the utxo hash of the given node.""" time_start = time.time() - while time.time() - time_start < 120: + while time.time() - time_start < 720: try: # Any of these RPC calls could throw due to node crash self.start_node(node_index) @@ -147,7 +148,7 @@ def sync_node3blocks(self, block_hashes): if not self.submit_block_catch_error(i, block): # TODO: more carefully check that the crash is due to -dbcrashratio # (change the exit code perhaps, and check that here?) - self.wait_for_node_exit(i, timeout=30) + self.wait_for_node_exit(i, timeout=120) self.log.debug(f"Restarting node {i} after block hash {block_hash}") nodei_utxo_hash = self.restart_node(i, block_hash) assert nodei_utxo_hash is not None @@ -184,7 +185,7 @@ def verify_utxo_hash(self): assert_equal(nodei_utxo_hash, node3_utxo_hash) def generate_small_transactions(self, node, count, utxo_list): - FEE = 1000 # TODO: replace this with node relay fee based calculation + FEE = 400000 # TODO: replace this with node relay fee based calculation num_transactions = 0 random.shuffle(utxo_list) while len(utxo_list) >= 2 and num_transactions < count: @@ -254,7 +255,7 @@ def run_test(self): while current_height + 1 > self.nodes[3].getblockcount(): block_hashes.extend(self.generatetoaddress( self.nodes[3], - nblocks=min(10, current_height + 1 - self.nodes[3].getblockcount()), + nblocks=10, # new address to avoid mining a block that has just been invalidated address=getnewdestination()[2], sync_fun=self.no_op, diff --git a/test/functional/feature_dersig.py b/test/functional/feature_dersig.py index 44c12b2a59..21dac1874d 100755 --- a/test/functional/feature_dersig.py +++ b/test/functional/feature_dersig.py @@ -7,6 +7,7 @@ Test the DERSIG soft-fork activation on regtest. """ +from decimal import Decimal from test_framework.blocktools import ( create_block, create_coinbase, @@ -41,7 +42,7 @@ def unDERify(tx): tx.vin[0].scriptSig = CScript(newscript) -DERSIG_HEIGHT = 102 +DERSIG_HEIGHT = 2002 class BIP66Test(BitcoinTestFramework): @@ -57,7 +58,7 @@ def set_test_params(self): def create_tx(self, input_txid): utxo_to_spend = self.miniwallet.get_utxo(txid=input_txid, mark_as_spent=False) - return self.miniwallet.create_self_transfer(utxo_to_spend=utxo_to_spend)['tx'] + return self.miniwallet.create_self_transfer(fee_rate=Decimal("0.01"), utxo_to_spend=utxo_to_spend)['tx'] def test_dersig_info(self, *, is_active): assert_equal(self.nodes[0].getdeploymentinfo()['deployments']['bip66'], @@ -72,7 +73,6 @@ def run_test(self): peer = self.nodes[0].add_p2p_connection(P2PInterface()) self.miniwallet = MiniWallet(self.nodes[0], mode=MiniWalletMode.RAW_P2PK) - self.test_dersig_info(is_active=False) self.log.info("Mining %d blocks", DERSIG_HEIGHT - 2) self.coinbase_txids = [self.nodes[0].getblock(b)['tx'][0] for b in self.generate(self.miniwallet, DERSIG_HEIGHT - 2)] @@ -96,9 +96,10 @@ def run_test(self): assert_equal(self.nodes[0].getbestblockhash(), block.hash) self.log.info("Test that blocks must now be at least version 3") - tip = block.sha256 - block_time += 1 - block = create_block(tip, create_coinbase(DERSIG_HEIGHT), block_time, version=2) + tip = int(self.nodes[0].getbestblockhash(), 16) + block_time = self.nodes[0].getblock(self.nodes[0].getbestblockhash())['time']+1 + block = create_block(tip, create_coinbase(self.nodes[0].getblockcount()+1), block_time) + block.nVersion = 2 block.solve() with self.nodes[0].assert_debug_log(expected_msgs=[f'{block.hash}, bad-version(0x00000002)']): diff --git a/test/functional/feature_fastprune.py b/test/functional/feature_fastprune.py index c913c4f93a..b9d24e5b00 100755 --- a/test/functional/feature_fastprune.py +++ b/test/functional/feature_fastprune.py @@ -22,7 +22,7 @@ def run_test(self): annex = b"\x50" + b"\xff" * 0x10000 tx.wit.vtxinwit[0].scriptWitness.stack.append(annex) self.generateblock(self.nodes[0], output="raw(55)", transactions=[tx.serialize().hex()]) - assert_equal(self.nodes[0].getblockcount(), 201) + assert_equal(self.nodes[0].getblockcount(), 2101) if __name__ == '__main__': diff --git a/test/functional/feature_fee_estimation.py b/test/functional/feature_fee_estimation.py index 4f56d585d3..ff52ce6727 100755 --- a/test/functional/feature_fee_estimation.py +++ b/test/functional/feature_fee_estimation.py @@ -134,9 +134,9 @@ def set_test_params(self): self.num_nodes = 3 # Force fSendTrickle to true (via whitelist.noban) self.extra_args = [ - ["-whitelist=noban@127.0.0.1"], - ["-whitelist=noban@127.0.0.1", "-blockmaxweight=68000"], - ["-whitelist=noban@127.0.0.1", "-blockmaxweight=32000"], + ["-whitelist=noban@127.0.0.1", "-dustrelayfee=0"], + ["-whitelist=noban@127.0.0.1", "-blockmaxweight=68000", "-dustrelayfee=0"], + ["-whitelist=noban@127.0.0.1", "-blockmaxweight=32000", "-dustrelayfee=0"], ] def setup_network(self): @@ -154,7 +154,7 @@ def setup_network(self): # produces too small blocks (room for only 55 or so transactions) def transact_and_mine(self, numblocks, mining_node): - min_fee = Decimal("0.00001") + min_fee = Decimal("0.0009") # We will now mine numblocks blocks generating on average 100 transactions between each block # We shuffle our confirmed txout set before each set of transactions # small_txpuzzle_randfee will use the transactions that have inputs already in the chain when possible @@ -169,7 +169,7 @@ def transact_and_mine(self, numblocks, mining_node): self.nodes[from_index], self.confutxo, self.memutxo, - Decimal("0.005"), + Decimal("0.5"), min_fee, min_fee, batch_sendtx_reqs, @@ -244,8 +244,8 @@ def sanity_check_rbf_estimates(self, utxos): node = self.nodes[0] miner = self.nodes[1] # In sat/vb - low_feerate = 1 - high_feerate = 10 + low_feerate = 1200 + high_feerate = 10000 # Cache the utxos of which to replace the spender after it failed to get # confirmed utxos_to_respend = [] diff --git a/test/functional/feature_includeconf.py b/test/functional/feature_includeconf.py index 58ab063e71..c41943dfa2 100755 --- a/test/functional/feature_includeconf.py +++ b/test/functional/feature_includeconf.py @@ -29,7 +29,7 @@ def run_test(self): # - tmpdir/node0/relative2.conf with open(self.nodes[0].datadir_path / "relative2.conf", "w", encoding="utf8") as f: f.write("uacomment=relative2\n") - with open(self.nodes[0].datadir_path / "bitcoin.conf", "a", encoding="utf8") as f: + with open(self.nodes[0].datadir_path / "qtum.conf", "a", encoding="utf8") as f: f.write("uacomment=main\nincludeconf=relative.conf\n") self.restart_node(0) @@ -74,7 +74,7 @@ def run_test(self): # Restore initial file contents f.write("uacomment=relative\n") - with open(self.nodes[0].datadir_path / "bitcoin.conf", "a", encoding="utf8") as f: + with open(self.nodes[0].datadir_path / "qtum.conf", "a", encoding="utf8") as f: f.write("includeconf=relative2.conf\n") self.start_node(0) diff --git a/test/functional/feature_index_prune.py b/test/functional/feature_index_prune.py index d6e802b399..a91d1e76ae 100755 --- a/test/functional/feature_index_prune.py +++ b/test/functional/feature_index_prune.py @@ -58,23 +58,23 @@ def run_test(self): stats_nodes = [self.nodes[1], self.nodes[2]] self.log.info("check if we can access blockfilters and coinstats when pruning is enabled but no blocks are actually pruned") - self.sync_index(height=200) + self.sync_index(height=2100) tip = self.nodes[0].getbestblockhash() for node in filter_nodes: assert_greater_than(len(node.getblockfilter(tip)['filter']), 0) for node in stats_nodes: assert node.gettxoutsetinfo(hash_type="muhash", hash_or_height=tip)['muhash'] - self.mine_batches(500) - self.sync_index(height=700) + self.mine_batches(1000) + self.sync_index(height=3100) self.log.info("prune some blocks") for node in self.nodes[:2]: - with node.assert_debug_log(['limited pruning to height 689']): - pruneheight_new = node.pruneblockchain(400) + with node.assert_debug_log(['limited pruning to height 3089']): + pruneheight_new = node.pruneblockchain(2600) # the prune heights used here and below are magic numbers that are determined by the # thresholds at which block files wrap, so they depend on disk serialization and default block file size. - assert_equal(pruneheight_new, 248) + assert_equal(pruneheight_new, 2461) self.log.info("check if we can access the tips blockfilter and coinstats when we have pruned some blocks") tip = self.nodes[0].getbestblockhash() @@ -92,7 +92,7 @@ def run_test(self): # mine and sync index up to a height that will later be the pruneheight self.generate(self.nodes[0], 51) - self.sync_index(height=751) + self.sync_index(height=3151) self.restart_without_indices() @@ -108,20 +108,20 @@ def run_test(self): self.log.info("prune exactly up to the indices best blocks while the indices are disabled") for i in range(3): - pruneheight_2 = self.nodes[i].pruneblockchain(1000) - assert_equal(pruneheight_2, 750) + pruneheight_2 = self.nodes[i].pruneblockchain(2850) + assert_equal(pruneheight_2, 2823) # Restart the nodes again with the indices activated self.restart_node(i, extra_args=self.extra_args[i]) self.log.info("make sure that we can continue with the partially synced indices after having pruned up to the index height") - self.sync_index(height=1500) + self.sync_index(height=3900) self.log.info("prune further than the indices best blocks while the indices are disabled") self.restart_without_indices() - self.mine_batches(1000) + self.mine_batches(3000) for i in range(3): - pruneheight_3 = self.nodes[i].pruneblockchain(2000) + pruneheight_3 = self.nodes[i].pruneblockchain(4400) assert_greater_than(pruneheight_3, pruneheight_2) self.stop_node(i) @@ -140,16 +140,16 @@ def run_test(self): self.connect_nodes(i, 3) self.sync_blocks(timeout=300) - self.sync_index(height=2500) + self.sync_index(height=6900) for node in self.nodes[:2]: - with node.assert_debug_log(['limited pruning to height 2489']): - pruneheight_new = node.pruneblockchain(2500) - assert_equal(pruneheight_new, 2005) + with node.assert_debug_log(['limited pruning to height 6889']): + pruneheight_new = node.pruneblockchain(4900) + assert_equal(pruneheight_new, 4885) self.log.info("ensure that prune locks don't prevent indices from failing in a reorg scenario") - with self.nodes[0].assert_debug_log(['basic block filter index prune lock moved back to 2480']): - self.nodes[3].invalidateblock(self.nodes[0].getblockhash(2480)) + with self.nodes[0].assert_debug_log(['basic block filter index prune lock moved back to 6880']): + self.nodes[3].invalidateblock(self.nodes[0].getblockhash(6880)) self.generate(self.nodes[3], 30) self.sync_blocks() diff --git a/test/functional/feature_init.py b/test/functional/feature_init.py index 268009b0f4..e6c413f974 100755 --- a/test/functional/feature_init.py +++ b/test/functional/feature_init.py @@ -57,7 +57,7 @@ def check_clean_start(): """Ensure that node restarts successfully after various interrupts.""" node.start() node.wait_for_rpc_connection() - assert_equal(200, node.getblockcount()) + assert_equal(2100, node.getblockcount()) lines_to_terminate_after = [ b'Validating signatures for all blocks', diff --git a/test/functional/feature_loadblock.py b/test/functional/feature_loadblock.py index 5129e0d328..e790c2f631 100755 --- a/test/functional/feature_loadblock.py +++ b/test/functional/feature_loadblock.py @@ -51,8 +51,8 @@ def run_test(self): cfg.write(f"port={node_url.port}\n") cfg.write(f"host={node_url.hostname}\n") cfg.write(f"output_file={bootstrap_file}\n") - cfg.write(f"max_height=100\n") - cfg.write(f"netmagic=fabfb5da\n") + cfg.write("max_height="+str(COINBASE_MATURITY)+"\n") + cfg.write("netmagic=fdddc6e1\n") cfg.write(f"input={blocks_dir}\n") cfg.write(f"genesis={genesis_block}\n") cfg.write(f"hashlist={hash_list.name}\n") @@ -73,9 +73,9 @@ def run_test(self): self.log.info("Restart second, unsynced node with bootstrap file") self.restart_node(1, extra_args=[f"-loadblock={bootstrap_file}"]) - assert_equal(self.nodes[1].getblockcount(), 100) # start_node is blocking on all block files being imported + assert_equal(self.nodes[1].getblockcount(), COINBASE_MATURITY) # start_node is blocking on all block files being imported - assert_equal(self.nodes[1].getblockchaininfo()['blocks'], 100) + assert_equal(self.nodes[1].getblockchaininfo()['blocks'], COINBASE_MATURITY) assert_equal(self.nodes[0].getbestblockhash(), self.nodes[1].getbestblockhash()) diff --git a/test/functional/feature_maxtipage.py b/test/functional/feature_maxtipage.py index 51f37ef1e0..b2b291e731 100755 --- a/test/functional/feature_maxtipage.py +++ b/test/functional/feature_maxtipage.py @@ -14,7 +14,7 @@ from test_framework.util import assert_equal -DEFAULT_MAX_TIP_AGE = 24 * 60 * 60 +DEFAULT_MAX_TIP_AGE = 12 * 60 * 60 class MaxTipAgeTest(BitcoinTestFramework): @@ -44,10 +44,10 @@ def test_maxtipage(self, maxtipage, set_parameter=True, test_deltas=True): assert_equal(node_ibd.getblockchaininfo()['initialblockdownload'], False) def run_test(self): - self.log.info("Test IBD with maximum tip age of 24 hours (default).") + self.log.info("Test IBD with maximum tip age of 12 hours (default).") self.test_maxtipage(DEFAULT_MAX_TIP_AGE, set_parameter=False) - for hours in [20, 10, 5, 2, 1]: + for hours in [10, 5, 2, 1]: maxtipage = hours * 60 * 60 self.log.info(f"Test IBD with maximum tip age of {hours} hours (-maxtipage={maxtipage}).") self.test_maxtipage(maxtipage) diff --git a/test/functional/feature_maxuploadtarget.py b/test/functional/feature_maxuploadtarget.py index 39cff7b738..d025fcd3d8 100755 --- a/test/functional/feature_maxuploadtarget.py +++ b/test/functional/feature_maxuploadtarget.py @@ -29,7 +29,7 @@ from test_framework.wallet import MiniWallet -UPLOAD_TARGET_MB = 800 +UPLOAD_TARGET_MB = 1350 class TestP2PConn(P2PInterface): @@ -73,7 +73,7 @@ def run_test(self): # Generate some old blocks self.wallet = MiniWallet(self.nodes[0]) - self.generate(self.wallet, 130) + self.generate(self.wallet, 2030) # p2p_conns[0] will only request old blocks # p2p_conns[1] will only request new blocks @@ -109,7 +109,7 @@ def run_test(self): getdata_request.inv.append(CInv(MSG_BLOCK, big_old_block)) max_bytes_per_day = UPLOAD_TARGET_MB * 1024 *1024 - daily_buffer = 144 * 4000000 + daily_buffer = 2700 * 512000 max_bytes_available = max_bytes_per_day - daily_buffer success_count = max_bytes_available // old_block_size @@ -123,7 +123,7 @@ def run_test(self): # At most a couple more tries should succeed (depending on how long # the test has been running so far). with self.nodes[0].assert_debug_log(expected_msgs=["historical block serving limit reached, disconnect peer"]): - for _ in range(3): + for _ in range(4000): p2p_conns[0].send_message(getdata_request) p2p_conns[0].wait_for_disconnect() assert_equal(len(self.nodes[0].getpeerinfo()), 2) diff --git a/test/functional/feature_notifications.py b/test/functional/feature_notifications.py index d2b5315d31..18eba98d95 100755 --- a/test/functional/feature_notifications.py +++ b/test/functional/feature_notifications.py @@ -12,6 +12,8 @@ from test_framework.util import ( assert_equal, ) +from test_framework.qtumconfig import * +from test_framework.qtum import generatesynchronized # Linux allow all characters other than \x00 # Windows disallow control characters (0-31) and /\?%:|"<> @@ -61,12 +63,12 @@ def run_test(self): seed = "cTdGmKFWpbvpKQ7ejrdzqYT2hhjyb3GPHnLAK7wdi5Em67YLwSm9" xpriv = "tprv8ZgxMBicQKsPfHCsTwkiM1KT56RXbGGTqvc2hgqzycpwbHqqpcajQeMRZoBD35kW4RtyCemu6j34Ku5DEspmgjKdt2qe4SvRch5Kk8B8A2v" desc_imports = [{ - "desc": descsum_create(f"wpkh({xpriv}/0/*)"), + "desc": descsum_create(f"pkh({xpriv}/0/*)"), "timestamp": 0, "active": True, "keypool": True, },{ - "desc": descsum_create(f"wpkh({xpriv}/1/*)"), + "desc": descsum_create(f"pkh({xpriv}/1/*)"), "timestamp": 0, "active": True, "keypool": True, @@ -116,7 +118,7 @@ def run_test(self): # triggered by node 1 self.log.info("test -walletnotify with conflicting transactions") self.nodes[0].rescanblockchain() - self.generatetoaddress(self.nodes[0], 100, ADDRESS_BCRT1_UNSPENDABLE) + generatesynchronized(self.nodes[0], COINBASE_MATURITY, ADDRESS_BCRT1_UNSPENDABLE, self.nodes) # Generate transaction on node 0, sync mempools, and check for # notification on node 1. diff --git a/test/functional/feature_nulldummy.py b/test/functional/feature_nulldummy.py index f896cb6f43..93d32a5d9f 100755 --- a/test/functional/feature_nulldummy.py +++ b/test/functional/feature_nulldummy.py @@ -36,6 +36,7 @@ ) from test_framework.wallet import getnewdestination from test_framework.wallet_util import generate_keypair +from test_framework.wallet import MiniWallet NULLDUMMY_ERROR = "mandatory-script-verify-flag-failed (Dummy CHECKMULTISIG argument must be zero)" @@ -49,6 +50,9 @@ def invalidate_nulldummy_tx(tx): class NULLDUMMYTest(BitcoinTestFramework): + def add_options(self, parser): + self.add_wallet_options(parser) + def set_test_params(self): self.num_nodes = 1 self.setup_clean_chain = True @@ -60,6 +64,9 @@ def set_test_params(self): '-par=1', # Use only one script thread to get the exact reject reason for testing ]] + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + def create_transaction(self, *, txid, input_details=None, addr, amount, privkey): input = {"txid": txid, "vout": 0} output = {addr: amount} @@ -71,6 +78,7 @@ def create_transaction(self, *, txid, input_details=None, addr, amount, privkey) def run_test(self): self.privkey, self.pubkey = generate_keypair(wif=True) + self.wallet = MiniWallet(self.nodes[0]) cms = self.nodes[0].createmultisig(1, [self.pubkey.hex()]) wms = self.nodes[0].createmultisig(1, [self.pubkey.hex()], 'p2sh-segwit') self.ms_address = cms["address"] diff --git a/test/functional/feature_pruning.py b/test/functional/feature_pruning.py index 4b548ef0f3..b35a5c305c 100755 --- a/test/functional/feature_pruning.py +++ b/test/functional/feature_pruning.py @@ -26,6 +26,7 @@ assert_greater_than, assert_raises_rpc_error, ) +from test_framework.qtum import generatesynchronized # Rescans start at the earliest block up to 2 hours before a key timestamp, so # the manual prune RPC avoids pruning blocks in the same window to be @@ -44,7 +45,7 @@ def mine_large_blocks(node, n): mine_large_blocks.nTime = 0 # Get the block parameters for the first block - big_script = CScript([OP_RETURN] + [OP_NOP] * 950000) + big_script = CScript([OP_RETURN] + [OP_NOP] * 440000) best_block = node.getblock(node.getbestblockhash()) height = int(best_block["height"]) + 1 mine_large_blocks.nTime = max(mine_large_blocks.nTime, int(best_block["time"])) + 1 @@ -108,11 +109,11 @@ def setup_nodes(self): def create_big_chain(self): # Start by creating some coinbases we can spend later - self.generate(self.nodes[1], 200, sync_fun=lambda: self.sync_blocks(self.nodes[0:2])) + self.generate(self.nodes[1], 2100, sync_fun=lambda: self.sync_blocks(self.nodes[0:2])) self.generate(self.nodes[0], 150, sync_fun=self.no_op) # Then mine enough full blocks to create more than 550MiB of data - mine_large_blocks(self.nodes[0], 645) + mine_large_blocks(self.nodes[0], 1290) self.sync_blocks(self.nodes[0:5]) @@ -173,7 +174,7 @@ def create_chain_with_staleblocks(self): # Create connections in the order so both nodes can see the reorg at the same time self.connect_nodes(0, 1) self.connect_nodes(0, 2) - self.sync_blocks(self.nodes[0:3]) + self.sync_blocks(self.nodes[0:3], timeout=360) self.log.info(f"Usage can be over target because of high stale rate: {calc_usage(self.prunedir)}") @@ -217,7 +218,7 @@ def reorg_test(self): self.log.info("Mine 220 more large blocks so we have requisite history") - mine_large_blocks(self.nodes[0], 220) + mine_large_blocks(self.nodes[0], 1020) self.sync_blocks(self.nodes[0:3], timeout=120) usage = calc_usage(self.prunedir) @@ -271,13 +272,13 @@ def manual_test(self, node_number, use_timestamp): # at this point, node has 995 blocks and has not yet run in prune mode self.start_node(node_number) node = self.nodes[node_number] - assert_equal(node.getblockcount(), 995) + assert_equal(node.getblockcount(), 3540) assert_raises_rpc_error(-1, "Cannot prune blocks because node is not in prune mode", node.pruneblockchain, 500) # now re-start in manual pruning mode self.restart_node(node_number, extra_args=["-prune=1"]) node = self.nodes[node_number] - assert_equal(node.getblockcount(), 995) + assert_equal(node.getblockcount(), 3540) def height(index): if use_timestamp: @@ -293,7 +294,7 @@ def has_block(index): return os.path.isfile(os.path.join(self.nodes[node_number].blocks_path, f"blk{index:05}.dat")) # should not prune because chain tip of node 3 (995) < PruneAfterHeight (1000) - assert_raises_rpc_error(-1, "Blockchain is too short for pruning", node.pruneblockchain, height(500)) + # assert_raises_rpc_error(-1, "Blockchain is too short for pruning", node.pruneblockchain, height(3550)) # Save block transaction count before pruning, assert value block1_details = node.getblock(node.getblockhash(1)) @@ -301,10 +302,10 @@ def has_block(index): # mine 6 blocks so we are at height 1001 (i.e., above PruneAfterHeight) self.generate(node, 6, sync_fun=self.no_op) - assert_equal(node.getblockchaininfo()["blocks"], 1001) + assert_equal(node.getblockchaininfo()["blocks"], 3546) # prune parameter in the future (block or timestamp) should raise an exception - future_parameter = height(1001) + 5 + future_parameter = height(3546) + 5 if use_timestamp: assert_raises_rpc_error(-8, "Could not find block with at least the specified timestamp", node.pruneblockchain, future_parameter) else: @@ -325,23 +326,23 @@ def has_block(index): assert has_block(0), "blk00000.dat is missing when should still be there" # height=500 should prune first file - prune(500) + prune(2800) assert not has_block(0), "blk00000.dat is still there, should be pruned by now" assert has_block(1), "blk00001.dat is missing when should still be there" # height=650 should prune second file - prune(650) + prune(3200) assert not has_block(1), "blk00001.dat is still there, should be pruned by now" # height=1000 should not prune anything more, because tip-288 is in blk00002.dat. - prune(1000) - assert has_block(2), "blk00002.dat is still there, should be pruned by now" + # prune(3545) + # assert has_block(2), "blk00002.dat is still there, should be pruned by now" # advance the tip so blk00002.dat and blk00003.dat can be pruned (the last 288 blocks should now be in blk00004.dat) self.generate(node, MIN_BLOCKS_TO_KEEP, sync_fun=self.no_op) - prune(1000) - assert not has_block(2), "blk00002.dat is still there, should be pruned by now" - assert not has_block(3), "blk00003.dat is still there, should be pruned by now" + # prune(3545) + # assert not has_block(2), "blk00002.dat is still there, should be pruned by now" + # assert not has_block(3), "blk00003.dat is still there, should be pruned by now" # stop node, start back up with auto-prune at 550 MiB, make sure still runs self.restart_node(node_number, extra_args=["-prune=550"]) diff --git a/test/functional/feature_rbf.py b/test/functional/feature_rbf.py index c5eeaf66e0..e7eab50e07 100755 --- a/test/functional/feature_rbf.py +++ b/test/functional/feature_rbf.py @@ -176,10 +176,10 @@ def test_doublespend_chain(self): def test_doublespend_tree(self): """Doublespend of a big tree of transactions""" - initial_nValue = 5 * COIN + initial_nValue = 50 * COIN tx0_outpoint = self.make_utxo(self.nodes[0], initial_nValue) - def branch(prevout, initial_value, max_txs, tree_width=5, fee=0.00001 * COIN, _total_txs=None): + def branch(prevout, initial_value, max_txs, tree_width=5, fee=0.0001 * COIN, _total_txs=None): if _total_txs is None: _total_txs = [0] if _total_txs[0] >= max_txs: @@ -207,7 +207,7 @@ def branch(prevout, initial_value, max_txs, tree_width=5, fee=0.00001 * COIN, _t _total_txs=_total_txs): yield x - fee = int(0.00001 * COIN) + fee = int(0.01 * COIN) n = MAX_REPLACEMENT_LIMIT tree_txs = list(branch(tx0_outpoint, initial_nValue, n, fee=fee)) assert_equal(len(tree_txs), n) @@ -237,7 +237,7 @@ def branch(prevout, initial_value, max_txs, tree_width=5, fee=0.00001 * COIN, _t # Try again, but with more total transactions than the "max txs # double-spent at once" anti-DoS limit. for n in (MAX_REPLACEMENT_LIMIT + 1, MAX_REPLACEMENT_LIMIT * 2): - fee = int(0.00001 * COIN) + fee = int(0.01 * COIN) tx0_outpoint = self.make_utxo(self.nodes[0], initial_nValue) tree_txs = list(branch(tx0_outpoint, initial_nValue, n, fee=fee)) assert_equal(len(tree_txs), n) @@ -270,7 +270,7 @@ def test_replacement_feeperkb(self): utxos_to_spend=[tx0_outpoint], sequence=0, num_outputs=100, - amount_per_output=1000, + amount_per_output=99900, )["hex"] # This will raise an exception due to insufficient fee @@ -342,9 +342,9 @@ def test_too_many_replacements(self): # transactions # Start by creating a single transaction with many outputs - initial_nValue = 10 * COIN + initial_nValue = 1000 * COIN utxo = self.make_utxo(self.nodes[0], initial_nValue) - fee = int(0.0001 * COIN) + fee = 1761601 # int(0.01 * COIN) split_value = int((initial_nValue - fee) / (MAX_REPLACEMENT_LIMIT + 1)) splitting_tx_utxos = self.wallet.send_self_transfer_multi( @@ -453,7 +453,7 @@ def test_too_many_replacements_with_default_mempool_params(self): # would invalidate `num_txs_invalidated` transactions. tx_hex = wallet.create_self_transfer_multi( utxos_to_spend=root_utxos, - fee_per_output=10_000_000, # absurdly high feerate + fee_per_output=36_000_000, # absurdly high feerate )["hex"] if failure_expected: @@ -562,7 +562,7 @@ def test_prioritised_transactions(self): utxos_to_spend=[tx0_outpoint], sequence=0, num_outputs=100, - amount_per_output=int(0.00001 * COIN), + amount_per_output=int(0.01 * COIN), )["hex"] # Verify tx1b cannot replace tx1a. @@ -691,11 +691,11 @@ def test_no_inherited_signaling(self): self.wallet.get_utxo(txid=optout_child_tx['txid']) def test_replacement_relay_fee(self): - tx = self.wallet.send_self_transfer(from_node=self.nodes[0])['tx'] + tx = self.wallet.send_self_transfer(from_node=self.nodes[0], fee_rate=Decimal("0.03"))['tx'] # Higher fee, higher feerate, different txid, but the replacement does not provide a relay # fee conforming to node's `incrementalrelayfee` policy of 1000 sat per KB. - assert_equal(self.nodes[0].getmempoolinfo()["incrementalrelayfee"], Decimal("0.00001")) + assert_equal(self.nodes[0].getmempoolinfo()["incrementalrelayfee"], Decimal("0.00010000")) tx.vout[0].nValue -= 1 assert_raises_rpc_error(-26, "insufficient fee", self.nodes[0].sendrawtransaction, tx.serialize().hex()) diff --git a/test/functional/feature_remove_pruned_files_on_startup.py b/test/functional/feature_remove_pruned_files_on_startup.py index 4ee653142a..1a5e57d927 100755 --- a/test/functional/feature_remove_pruned_files_on_startup.py +++ b/test/functional/feature_remove_pruned_files_on_startup.py @@ -7,6 +7,7 @@ import platform import os from test_framework.test_framework import BitcoinTestFramework +from test_framework.blocktools import COINBASE_MATURITY class FeatureRemovePrunedFilesOnStartupTest(BitcoinTestFramework): def set_test_params(self): @@ -25,12 +26,12 @@ def run_test(self): rev0 = self.nodes[0].blocks_path / "rev00000.dat" blk1 = self.nodes[0].blocks_path / "blk00001.dat" rev1 = self.nodes[0].blocks_path / "rev00001.dat" - self.mine_batches(800) + self.mine_batches(COINBASE_MATURITY+700) fo1 = os.open(blk0, os.O_RDONLY) fo2 = os.open(rev1, os.O_RDONLY) fd1 = os.fdopen(fo1) fd2 = os.fdopen(fo2) - self.nodes[0].pruneblockchain(600) + self.nodes[0].pruneblockchain(COINBASE_MATURITY+500) # Windows systems will not remove files with an open fd if platform.system() != 'Windows': diff --git a/test/functional/feature_segwit.py b/test/functional/feature_segwit.py index 4dc19222c4..d4501f477b 100755 --- a/test/functional/feature_segwit.py +++ b/test/functional/feature_segwit.py @@ -5,6 +5,8 @@ """Test the SegWit changeover logic.""" from decimal import Decimal +from io import BytesIO +import time from test_framework.address import ( key_to_p2pkh, @@ -16,6 +18,8 @@ from test_framework.blocktools import ( send_to_witness, witness_script, + create_block, + create_coinbase, ) from test_framework.descriptors import descsum_create from test_framework.messages import ( @@ -47,11 +51,15 @@ assert_greater_than_or_equal, assert_is_hex_string, assert_raises_rpc_error, + bytes_to_hex_str, + hex_str_to_bytes, try_rpc, ) from test_framework.wallet_util import ( get_generate_key, ) +from test_framework.qtumconfig import COINBASE_MATURITY, INITIAL_BLOCK_REWARD, ENABLE_REDUCED_BLOCK_TIME, MAX_BLOCK_BASE_SIZE, MAX_BLOCK_SIGOPS, FACTOR_REDUCED_BLOCK_TIME +from test_framework.qtum import convert_btc_address_to_qtum, generatesynchronized NODE_0 = 0 NODE_2 = 2 @@ -65,6 +73,10 @@ def getutxo(txid): utxo["txid"] = txid return utxo +def find_unspent(node, min_value): + for utxo in node.listunspent(): + if utxo['amount'] >= min_value: + return utxo def find_spendable_utxo(node, min_value): for utxo in node.listunspent(query_options={'minimumAmount': min_value}): @@ -88,17 +100,17 @@ def set_test_params(self): self.extra_args = [ [ "-acceptnonstdtxn=1", - "-testactivationheight=segwit@165", + "-testactivationheight=segwit@2357" if ENABLE_REDUCED_BLOCK_TIME else "-testactivationheight=segwit@857", "-addresstype=legacy", ], [ "-acceptnonstdtxn=1", - "-testactivationheight=segwit@165", + "-testactivationheight=segwit@2357" if ENABLE_REDUCED_BLOCK_TIME else "-testactivationheight=segwit@857", "-addresstype=legacy", ], [ "-acceptnonstdtxn=1", - "-testactivationheight=segwit@165", + "-testactivationheight=segwit@2357" if ENABLE_REDUCED_BLOCK_TIME else "-testactivationheight=segwit@857", "-addresstype=legacy", ], ] @@ -113,23 +125,31 @@ def setup_network(self): self.sync_all() def success_mine(self, node, txid, sign, redeem_script=""): - send_to_witness(1, node, getutxo(txid), self.pubkey[0], False, Decimal("49.998"), sign, redeem_script) + send_to_witness(1, node, getutxo(txid), self.pubkey[0], False, INITIAL_BLOCK_REWARD - Decimal("0.002"), sign, redeem_script) block = self.generate(node, 1) assert_equal(len(node.getblock(block[0])["tx"]), 2) self.sync_blocks() def fail_accept(self, node, error_msg, txid, sign, redeem_script=""): - assert_raises_rpc_error(-26, error_msg, send_to_witness, use_p2wsh=1, node=node, utxo=getutxo(txid), pubkey=self.pubkey[0], encode_p2sh=False, amount=Decimal("49.998"), sign=sign, insert_redeem_script=redeem_script) + assert_raises_rpc_error(-26, error_msg, send_to_witness, use_p2wsh=1, node=node, utxo=getutxo(txid), pubkey=self.pubkey[0], encode_p2sh=False, amount=INITIAL_BLOCK_REWARD - Decimal("0.002"), sign=sign, insert_redeem_script=redeem_script) def run_test(self): self.generate(self.nodes[0], 161) # block 161 + for i in range((4*4*144 if ENABLE_REDUCED_BLOCK_TIME else 4*144) - 161): + block = create_block(int(self.nodes[0].getbestblockhash(), 16), create_coinbase(self.nodes[0].getblockcount() + 1), int(time.time())+2+i) + block.nVersion = 4 + block.hashMerkleRoot = block.calc_merkle_root() + block.rehash() + block.solve() + self.nodes[0].submitblock(bytes_to_hex_str(block.serialize())) + generatesynchronized(self.nodes[0], 18, None, self.nodes) self.log.info("Verify sigops are counted in GBT with pre-BIP141 rules before the fork") txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1) tmpl = self.nodes[0].getblocktemplate({'rules': ['segwit']}) - assert_equal(tmpl['sizelimit'], 1000000) + assert_equal(tmpl['sizelimit'], MAX_BLOCK_BASE_SIZE) assert 'weightlimit' not in tmpl - assert_equal(tmpl['sigoplimit'], 20000) + assert_equal(tmpl['sigoplimit'], MAX_BLOCK_SIGOPS) assert_equal(tmpl['transactions'][0]['hash'], txid) assert_equal(tmpl['transactions'][0]['sigops'], 2) assert '!segwit' not in tmpl['rules'] @@ -182,15 +202,16 @@ def run_test(self): for _ in range(5): for n in range(3): for v in range(2): - wit_ids[n][v].append(send_to_witness(v, self.nodes[0], find_spendable_utxo(self.nodes[0], 50), self.pubkey[n], False, Decimal("49.999"))) - p2sh_ids[n][v].append(send_to_witness(v, self.nodes[0], find_spendable_utxo(self.nodes[0], 50), self.pubkey[n], True, Decimal("49.999"))) + wit_ids[n][v].append(send_to_witness(v, self.nodes[0], find_spendable_utxo(self.nodes[0], INITIAL_BLOCK_REWARD), self.pubkey[n], False, INITIAL_BLOCK_REWARD - Decimal("0.001"))) + p2sh_ids[n][v].append(send_to_witness(v, self.nodes[0], find_spendable_utxo(self.nodes[0], INITIAL_BLOCK_REWARD), self.pubkey[n], True, INITIAL_BLOCK_REWARD - Decimal("0.001"))) self.generate(self.nodes[0], 1) # block 163 # Make sure all nodes recognize the transactions as theirs - assert_equal(self.nodes[0].getbalance(), balance_presetup - 60 * 50 + 20 * Decimal("49.999") + 50) - assert_equal(self.nodes[1].getbalance(), 20 * Decimal("49.999")) - assert_equal(self.nodes[2].getbalance(), 20 * Decimal("49.999")) + assert_equal(self.nodes[0].getbalance(), balance_presetup - 60 * INITIAL_BLOCK_REWARD + 20 * (INITIAL_BLOCK_REWARD - Decimal("0.001")) + (0 if ENABLE_REDUCED_BLOCK_TIME else INITIAL_BLOCK_REWARD)) + assert_equal(self.nodes[1].getbalance(), 20 * (INITIAL_BLOCK_REWARD - Decimal("0.001"))) + assert_equal(self.nodes[2].getbalance(), 20 * (INITIAL_BLOCK_REWARD - Decimal("0.001"))) + self.nodes[0].generate(32 if ENABLE_REDUCED_BLOCK_TIME else 260) # block 423 self.log.info("Verify unsigned p2sh witness txs without a redeem script are invalid") self.fail_accept(self.nodes[2], "mandatory-script-verify-flag-failed (Operation not valid with the current stack size)", p2sh_ids[NODE_2][P2WPKH][1], sign=False) @@ -200,10 +221,10 @@ def run_test(self): self.log.info("Verify witness txs are mined as soon as segwit activates") - send_to_witness(1, self.nodes[2], getutxo(wit_ids[NODE_2][P2WPKH][0]), self.pubkey[0], encode_p2sh=False, amount=Decimal("49.998"), sign=True) - send_to_witness(1, self.nodes[2], getutxo(wit_ids[NODE_2][P2WSH][0]), self.pubkey[0], encode_p2sh=False, amount=Decimal("49.998"), sign=True) - send_to_witness(1, self.nodes[2], getutxo(p2sh_ids[NODE_2][P2WPKH][0]), self.pubkey[0], encode_p2sh=False, amount=Decimal("49.998"), sign=True) - send_to_witness(1, self.nodes[2], getutxo(p2sh_ids[NODE_2][P2WSH][0]), self.pubkey[0], encode_p2sh=False, amount=Decimal("49.998"), sign=True) + send_to_witness(1, self.nodes[2], getutxo(wit_ids[NODE_2][P2WPKH][0]), self.pubkey[0], encode_p2sh=False, amount=INITIAL_BLOCK_REWARD - Decimal("0.002"), sign=True) + send_to_witness(1, self.nodes[2], getutxo(wit_ids[NODE_2][P2WSH][0]), self.pubkey[0], encode_p2sh=False, amount=INITIAL_BLOCK_REWARD - Decimal("0.002"), sign=True) + send_to_witness(1, self.nodes[2], getutxo(p2sh_ids[NODE_2][P2WPKH][0]), self.pubkey[0], encode_p2sh=False, amount=INITIAL_BLOCK_REWARD - Decimal("0.002"), sign=True) + send_to_witness(1, self.nodes[2], getutxo(p2sh_ids[NODE_2][P2WSH][0]), self.pubkey[0], encode_p2sh=False, amount=INITIAL_BLOCK_REWARD - Decimal("0.002"), sign=True) assert_equal(len(self.nodes[2].getrawmempool()), 4) blockhash = self.generate(self.nodes[2], 1)[0] # block 165 (first block with new rules) @@ -245,9 +266,9 @@ def run_test(self): txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1) raw_tx = self.nodes[0].getrawtransaction(txid, True) tmpl = self.nodes[0].getblocktemplate({'rules': ['segwit']}) - assert_greater_than_or_equal(tmpl['sizelimit'], 3999577) # actual maximum size is lower due to minimum mandatory non-witness data - assert_equal(tmpl['weightlimit'], 4000000) - assert_equal(tmpl['sigoplimit'], 80000) + assert_greater_than_or_equal(tmpl['sizelimit'], 7999577/FACTOR_REDUCED_BLOCK_TIME) # actual maximum size is lower due to minimum mandatory non-witness data + assert_equal(tmpl['weightlimit'], 8000000//FACTOR_REDUCED_BLOCK_TIME) + assert_equal(tmpl['sigoplimit'], 80000//FACTOR_REDUCED_BLOCK_TIME) assert_equal(tmpl['transactions'][0]['txid'], txid) expected_sigops = 9 if 'txinwitness' in raw_tx["vin"][0] else 8 assert_equal(tmpl['transactions'][0]['sigops'], expected_sigops) @@ -260,7 +281,7 @@ def run_test(self): # tx2 (segwit input, paying to a non-segwit output) -> # tx3 (non-segwit input, paying to a non-segwit output). # tx1 is allowed to appear in the block, but no others. - txid1 = send_to_witness(1, self.nodes[0], find_spendable_utxo(self.nodes[0], 50), self.pubkey[0], False, Decimal("49.996")) + txid1 = send_to_witness(1, self.nodes[0], find_spendable_utxo(self.nodes[0], INITIAL_BLOCK_REWARD), self.pubkey[0], False, INITIAL_BLOCK_REWARD - Decimal("0.004")) assert txid1 in self.nodes[0].getrawmempool() tx1_hex = self.nodes[0].gettransaction(txid1)['hex'] @@ -276,7 +297,7 @@ def run_test(self): # Now create tx2, which will spend from txid1. tx = CTransaction() tx.vin.append(CTxIn(COutPoint(int(txid1, 16), 0), b'')) - tx.vout.append(CTxOut(int(49.99 * COIN), CScript([OP_TRUE, OP_DROP] * 15 + [OP_TRUE]))) + tx.vout.append(CTxOut(int((INITIAL_BLOCK_REWARD-Decimal('0.01')) * COIN), CScript([OP_TRUE, OP_DROP] * 15 + [OP_TRUE]))) tx2_hex = self.nodes[0].signrawtransactionwithwallet(tx.serialize().hex())['hex'] txid2 = self.nodes[0].sendrawtransaction(tx2_hex) tx = tx_from_hex(tx2_hex) @@ -292,7 +313,7 @@ def run_test(self): # Now create tx3, which will spend from txid2 tx = CTransaction() tx.vin.append(CTxIn(COutPoint(int(txid2, 16), 0), b"")) - tx.vout.append(CTxOut(int(49.95 * COIN), CScript([OP_TRUE, OP_DROP] * 15 + [OP_TRUE]))) # Huge fee + tx.vout.append(CTxOut(int((INITIAL_BLOCK_REWARD-Decimal('0.05')) * COIN), CScript([OP_TRUE, OP_DROP] * 15 + [OP_TRUE]))) # Huge fee tx.calc_sha256() txid3 = self.nodes[0].sendrawtransaction(hexstring=tx.serialize().hex(), maxfeerate=0) assert tx.wit.is_null() @@ -331,9 +352,9 @@ def run_test(self): # Import a compressed key and an uncompressed key, generate some multisig addresses self.nodes[0].importprivkey("92e6XLo5jVAVwrQKPNTs93oQco8f8sDNBcpv73Dsrs397fQtFQn") - uncompressed_spendable_address = ["mvozP4UwyGD2mGZU4D2eMvMLPB9WkMmMQu"] + uncompressed_spendable_address = [convert_btc_address_to_qtum("mvozP4UwyGD2mGZU4D2eMvMLPB9WkMmMQu")] self.nodes[0].importprivkey("cNC8eQ5dg3mFAVePDX4ddmPYpPbw41r9bm2jd1nLJT77e6RrzTRR") - compressed_spendable_address = ["mmWQubrDomqpgSYekvsU7HWEVjLFHAakLe"] + compressed_spendable_address = [convert_btc_address_to_qtum("mmWQubrDomqpgSYekvsU7HWEVjLFHAakLe")] assert not self.nodes[0].getaddressinfo(uncompressed_spendable_address[0])['iscompressed'] assert self.nodes[0].getaddressinfo(compressed_spendable_address[0])['iscompressed'] @@ -357,6 +378,7 @@ def run_test(self): uncompressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], uncompressed_solvable_address[0]])['address']) compressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], compressed_solvable_address[0]])['address']) compressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_solvable_address[0], compressed_solvable_address[1]])['address']) + unknown_address = [convert_btc_address_to_qtum("mtKKyoHabkk6e4ppT7NaM7THqPUt7AzPrT"), convert_btc_address_to_qtum("2NDP3jLWAFT8NDAiUa9qiE6oBt2awmMq7Dx")] # Test multisig_without_privkey # We have 2 public keys without private keys, use addmultisigaddress to add to wallet. @@ -437,6 +459,7 @@ def run_test(self): op1 = CScript([OP_1]) op0 = CScript([OP_0]) # 2N7MGY19ti4KDMSzRfPAssP6Pxyuxoi6jLe is the P2SH(P2PKH) version of mjoE3sSrb8ByYEvgnC3Aox86u1CHnfJA4V + unsolvable_address = [convert_btc_address_to_qtum("mjoE3sSrb8ByYEvgnC3Aox86u1CHnfJA4V"), convert_btc_address_to_qtum("2N7MGY19ti4KDMSzRfPAssP6Pxyuxoi6jLe"), script_to_p2sh(op1), script_to_p2sh(op0)] unsolvable_address_key = bytes.fromhex("02341AEC7587A51CDE5279E0630A531AEA2615A9F80B17E8D9376327BAEAA59E3D") unsolvablep2pkh = key_to_p2pkh_script(unsolvable_address_key) unsolvablep2wshp2pkh = script_to_p2wsh_script(unsolvablep2pkh) @@ -498,9 +521,9 @@ def run_test(self): # Repeat some tests. This time we don't add witness scripts with importaddress # Import a compressed key and an uncompressed key, generate some multisig addresses self.nodes[0].importprivkey("927pw6RW8ZekycnXqBQ2JS5nPyo1yRfGNN8oq74HeddWSpafDJH") - uncompressed_spendable_address = ["mguN2vNSCEUh6rJaXoAVwY3YZwZvEmf5xi"] + uncompressed_spendable_address = [convert_btc_address_to_qtum("mguN2vNSCEUh6rJaXoAVwY3YZwZvEmf5xi")] self.nodes[0].importprivkey("cMcrXaaUC48ZKpcyydfFo8PxHAjpsYLhdsp6nmtB3E2ER9UUHWnw") - compressed_spendable_address = ["n1UNmpmbVUJ9ytXYXiurmGPQ3TRrXqPWKL"] + compressed_spendable_address = [convert_btc_address_to_qtum("n1UNmpmbVUJ9ytXYXiurmGPQ3TRrXqPWKL")] self.nodes[0].importpubkey(pubkeys[5]) compressed_solvable_address = [key_to_p2pkh(pubkeys[5])] @@ -645,11 +668,13 @@ def p2pkh_address_to_script(self, v): def create_and_mine_tx_from_txids(self, txids, success=True): tx = CTransaction() for i in txids: + txtmp = CTransaction() txraw = self.nodes[0].getrawtransaction(i, 0, txs_mined[i]) - txtmp = tx_from_hex(txraw) + f = BytesIO(hex_str_to_bytes(txraw)) + txtmp.deserialize(f) for j in range(len(txtmp.vout)): tx.vin.append(CTxIn(COutPoint(int('0x' + i, 0), j))) - tx.vout.append(CTxOut(0, CScript())) + tx.vout.append(CTxOut(0, CScript([OP_TRUE]))) tx.rehash() signresults = self.nodes[0].signrawtransactionwithwallet(tx.serialize_without_witness().hex())['hex'] self.nodes[0].sendrawtransaction(hexstring=signresults, maxfeerate=0) diff --git a/test/functional/feature_settings.py b/test/functional/feature_settings.py index 0214e781de..8f2e5706c2 100755 --- a/test/functional/feature_settings.py +++ b/test/functional/feature_settings.py @@ -21,7 +21,7 @@ def set_test_params(self): def run_test(self): node, = self.nodes settings = node.chain_path / "settings.json" - conf = node.datadir_path / "bitcoin.conf" + conf = node.datadir_path / "qtum.conf" # Assert default settings file was created self.stop_node(0) diff --git a/test/functional/feature_signet.py b/test/functional/feature_signet.py index a90a2a8e5e..09eea61acf 100755 --- a/test/functional/feature_signet.py +++ b/test/functional/feature_signet.py @@ -8,7 +8,12 @@ from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal +from test_framework.blocktools import create_block, add_witness_commitment +from test_framework.script import CScriptOp +import time +from test_framework.wallet import MiniWallet +SIGNET_HEADER = b"\xec\xc7\xda\xa2" signet_blocks = [ '00000020f61eee3b63a380a477a063af32b2bbc97c9ff9f01f2c4225e973988108000000f575c83235984e7dc4afc1f30944c170462e84437ab6f2d52e16878a79e4678bd1914d5fae77031eccf4070001010000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff025151feffffff0200f2052a010000001600149243f727dd5343293eb83174324019ec16c2630f0000000000000000776a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf94c4fecc7daa2490047304402205e423a8754336ca99dbe16509b877ef1bf98d008836c725005b3c787c41ebe46022047246e4467ad7cc7f1ad98662afcaf14c115e0095a227c7b05c5182591c23e7e01000120000000000000000000000000000000000000000000000000000000000000000000000000', '00000020533b53ded9bff4adc94101d32400a144c54edc5ed492a3b26c63b2d686000000b38fef50592017cfafbcab88eb3d9cf50b2c801711cad8299495d26df5e54812e7914d5fae77031ecfdd0b0001010000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff025251feffffff0200f2052a01000000160014fd09839740f0e0b4fc6d5e2527e4022aa9b89dfa0000000000000000776a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf94c4fecc7daa24900473044022031d64a1692cdad1fc0ced69838169fe19ae01be524d831b95fcf5ea4e6541c3c02204f9dea0801df8b4d0cd0857c62ab35c6c25cc47c930630dc7fe723531daa3e9b01000120000000000000000000000000000000000000000000000000000000000000000000000000', @@ -24,11 +29,15 @@ class SignetBasicTest(BitcoinTestFramework): + def add_options(self, parser): + self.add_wallet_options(parser) + def set_test_params(self): self.chain = "signet" self.num_nodes = 6 self.setup_clean_chain = True - shared_args1 = ["-signetchallenge=51"] # OP_TRUE + self.requires_wallet = True + shared_args1 = ["-signetchallenge=51", '-txindex'] # OP_TRUE shared_args2 = [] # default challenge # we use the exact same challenge except we do it as a 2-of-2, which means it should fail shared_args3 = ["-signetchallenge=522103ad5e0edad18cb1f0fc0d28a3d4f1f3e445640337489abb10404f2d1e086be430210359ef5021964fe22d6f8e05b2463c9540ce96883fe3b278760f048f5189f2e6c452ae"] @@ -39,6 +48,9 @@ def set_test_params(self): shared_args3, shared_args3, ] + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + def setup_network(self): self.setup_nodes() @@ -51,6 +63,7 @@ def run_test(self): self.log.info("basic tests using OP_TRUE challenge") self.log.info('getmininginfo') + self.wallet = MiniWallet(self.nodes[0]) mining_info = self.nodes[0].getmininginfo() assert_equal(mining_info['blocks'], 0) assert_equal(mining_info['chain'], 'signet') @@ -59,9 +72,29 @@ def run_test(self): assert_equal(mining_info['networkhashps'], Decimal('0')) assert_equal(mining_info['pooledtx'], 0) - self.generate(self.nodes[0], 1, sync_fun=self.no_op) + self.generate(self.nodes[0], 10, sync_fun=self.no_op) self.log.info("pregenerated signet blocks check") + block = create_block(tmpl=self.nodes[0].getblock(self.nodes[0].getbestblockhash())) + add_witness_commitment(block) + block.vtx[0].vout[-1].scriptPubKey = b''.join([block.vtx[0].vout[-1].scriptPubKey, CScriptOp.encode_op_pushdata(SIGNET_HEADER)]) + block.vtx[0].rehash() + block.hashMerkleRoot = block.calc_merkle_root() + block.solve() + print(self.nodes[0].submitblock(block.serialize().hex())) + print(block.vtx[0].serialize().hex()) + + import pprint + pp = pprint.PrettyPrinter() + pp.pprint(self.nodes[0].getblock(hex(block.hashPrevBlock)[2:].zfill(64))) + pp.pprint(self.nodes[0].getblock(hex(block.sha256)[2:].zfill(64))) + pp.pprint(self.nodes[0].getblock(self.nodes[0].getbestblockhash())) + + print("PREV", hex(block.hashPrevBlock)[2:].zfill(64)) + print("PREV", hex(block.sha256)[2:].zfill(64)) + print("BEST", self.nodes[0].getbestblockhash(), self.nodes[0].getblockcount()) + pp.pprint(self.nodes[0].getrawtransaction(self.nodes[0].getblock(self.nodes[0].getbestblockhash())['tx'][0], True)) + return height = 0 for block in signet_blocks: diff --git a/test/functional/feature_startupnotify.py b/test/functional/feature_startupnotify.py index a8e62c6244..ff7daf7569 100755 --- a/test/functional/feature_startupnotify.py +++ b/test/functional/feature_startupnotify.py @@ -35,7 +35,7 @@ def get_count(): assert_equal(get_count(), 1) self.log.info("Test node is fully started") - assert_equal(self.nodes[0].getblockcount(), 200) + assert_equal(self.nodes[0].getblockcount(), 2100) if __name__ == '__main__': diff --git a/test/functional/feature_taproot.py b/test/functional/feature_taproot.py index e85541d0ec..47d384c32c 100755 --- a/test/functional/feature_taproot.py +++ b/test/functional/feature_taproot.py @@ -10,6 +10,7 @@ create_block, add_witness_commitment, MAX_BLOCK_SIGOPS_WEIGHT, + NORMAL_GBT_REQUEST_PARAMS, WITNESS_SCALE_FACTOR, ) from test_framework.messages import ( @@ -35,7 +36,6 @@ LEAF_VERSION_TAPSCRIPT, LegacySignatureMsg, LOCKTIME_THRESHOLD, - MAX_SCRIPT_ELEMENT_SIZE, OP_0, OP_1, OP_2, @@ -117,6 +117,8 @@ # Whether or not to output generated test vectors, in JSON format. GEN_TEST_VECTORS = False +MAX_SCRIPT_ELEMENT_SIZE = 128000 +MAX_BLOCK_SIGOPS_WEIGHT = 5000 # === Framework for building spending transactions. === # @@ -638,8 +640,8 @@ def byte_popper(expr): SINGLE_SIG = {"inputs": [getter("sign")]} SIG_ADD_ZERO = {"failure": {"sign": zero_appender(default_sign)}} -DUST_LIMIT = 600 -MIN_FEE = 50000 +DUST_LIMIT = 400000 +MIN_FEE = 5000000 # === Actual test cases === diff --git a/test/functional/feature_utxo_set_hash.py b/test/functional/feature_utxo_set_hash.py index 0bdcc6d83d..a662daee0b 100755 --- a/test/functional/feature_utxo_set_hash.py +++ b/test/functional/feature_utxo_set_hash.py @@ -4,6 +4,7 @@ # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test UTXO set hash value calculation in gettxoutsetinfo.""" +from decimal import Decimal from test_framework.messages import ( CBlock, COutPoint, @@ -29,12 +30,12 @@ def test_muhash_implementation(self): # Generate 100 blocks and remove the first since we plan to spend its # coinbase - block_hashes = self.generate(wallet, 1) + self.generate(node, 99) + block_hashes = self.generate(wallet, 1) + self.generate(node, 1999) blocks = list(map(lambda block: from_hex(CBlock(), node.getblock(block, False)), block_hashes)) blocks.pop(0) # Create a spending transaction and mine a block which includes it - txid = wallet.send_self_transfer(from_node=node)['txid'] + txid = wallet.send_self_transfer(from_node=node, fee_rate=Decimal("0.03"))['txid'] tx_block = self.generateblock(node, output=wallet.get_address(), transactions=[txid]) blocks.append(from_hex(CBlock(), node.getblock(tx_block['hash'], False))) @@ -67,8 +68,8 @@ def test_muhash_implementation(self): assert_equal(finalized[::-1].hex(), node_muhash) self.log.info("Test deterministic UTXO set hash results") - assert_equal(node.gettxoutsetinfo()['hash_serialized_3'], "d1c7fec1c0623f6793839878cbe2a531eb968b50b27edd6e2a57077a5aed6094") - assert_equal(node.gettxoutsetinfo("muhash")['muhash'], "d1725b2fe3ef43e55aa4907480aea98d406fc9e0bf8f60169e2305f1fbf5961b") + assert_equal(node.gettxoutsetinfo()['hash_serialized_3'], "c7e78ab6b073b92e81ccf19a81aa999f979e39c140d5452a22f6410d8bc79080") + assert_equal(node.gettxoutsetinfo("muhash")['muhash'], "6e8cb792ac86331ee314a7ab1b7d1e89c187d8630569b1625e0802f90876db1a") def run_test(self): self.test_muhash_implementation() diff --git a/test/functional/feature_versionbits_warning.py b/test/functional/feature_versionbits_warning.py index 073d3de812..1fb1e1c341 100755 --- a/test/functional/feature_versionbits_warning.py +++ b/test/functional/feature_versionbits_warning.py @@ -14,6 +14,7 @@ from test_framework.messages import msg_block from test_framework.p2p import P2PInterface from test_framework.test_framework import BitcoinTestFramework +from test_framework.wallet import MiniWallet VB_PERIOD = 144 # versionbits period length for regtest VB_THRESHOLD = 108 # versionbits activation threshold for regtest @@ -25,10 +26,17 @@ VB_PATTERN = re.compile("Unknown new rules activated.*versionbit") class VersionBitsWarningTest(BitcoinTestFramework): + def add_options(self, parser): + self.add_wallet_options(parser) + def set_test_params(self): self.setup_clean_chain = True + self.requires_wallet = True self.num_nodes = 1 + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + def setup_network(self): self.alert_filename = os.path.join(self.options.tmpdir, "alert.txt") # Open and close to create zero-length file @@ -61,6 +69,7 @@ def versionbits_in_alert_file(self): def run_test(self): node = self.nodes[0] + self.wallet = MiniWallet(node) peer = node.add_p2p_connection(P2PInterface()) node_deterministic_address = node.get_deterministic_priv_key().address diff --git a/test/functional/test_framework/messages.py b/test/functional/test_framework/messages.py index 1780678de1..9a219dbcab 100755 --- a/test/functional/test_framework/messages.py +++ b/test/functional/test_framework/messages.py @@ -31,18 +31,25 @@ from test_framework.crypto.siphash import siphash256 from test_framework.util import assert_equal +from test_framework.qtumconfig import INITIAL_HASH_STATE_ROOT, INITIAL_HASH_UTXO_ROOT, ENABLE_REDUCED_BLOCK_TIME + +if ENABLE_REDUCED_BLOCK_TIME: + MY_VERSION = 70019 # past bip-31 for ping/pong +else: + MY_VERSION = 70018 # past bip-31 for ping/pong + MAX_LOCATOR_SZ = 101 -MAX_BLOCK_WEIGHT = 4000000 +MAX_BLOCK_WEIGHT = 2000000 MAX_BLOOM_FILTER_SIZE = 36000 MAX_BLOOM_HASH_FUNCS = 50 COIN = 100000000 # 1 btc in satoshis -MAX_MONEY = 21000000 * COIN +MAX_MONEY = 10782240625000000 MAX_BIP125_RBF_SEQUENCE = 0xfffffffd # Sequence number that is rbf-opt-in (BIP 125) and csv-opt-out (BIP 68) SEQUENCE_FINAL = 0xffffffff # Sequence number that disables nLockTime if set for every input of a tx -MAX_PROTOCOL_MESSAGE_LENGTH = 4000000 # Maximum length of incoming protocol messages +MAX_PROTOCOL_MESSAGE_LENGTH = 8000000 # Maximum length of incoming protocol messages MAX_HEADERS_RESULTS = 2000 # Number of headers sent in one getheaders result MAX_INV_SIZE = 50000 # Maximum number of entries in an 'inv' protocol message @@ -77,7 +84,7 @@ MAGIC_BYTES = { "mainnet": b"\xf9\xbe\xb4\xd9", # mainnet "testnet3": b"\x0b\x11\x09\x07", # testnet3 - "regtest": b"\xfa\xbf\xb5\xda", # regtest + "regtest": b"\xfd\xdd\xc6\xe1", # regtest "signet": b"\x0a\x03\xcf\x40", # signet } @@ -680,8 +687,8 @@ def __repr__(self): % (self.nVersion, repr(self.vin), repr(self.vout), repr(self.wit), self.nLockTime) -class CBlockHeader: - __slots__ = ("hash", "hashMerkleRoot", "hashPrevBlock", "nBits", "nNonce", +class CBlockHeader(object): + __slots__ = ("hash", "hashMerkleRoot", "hashPrevBlock", "hashStateRoot", "hashUTXORoot", "prevoutStake", "vchBlockSig", "nBits", "nNonce", "nTime", "nVersion", "sha256") def __init__(self, header=None): @@ -694,6 +701,10 @@ def __init__(self, header=None): self.nTime = header.nTime self.nBits = header.nBits self.nNonce = header.nNonce + self.hashStateRoot = header.hashStateRoot + self.hashUTXORoot = header.hashUTXORoot + self.prevoutStake = header.prevoutStake + self.vchBlockSig = header.vchBlockSig self.sha256 = header.sha256 self.hash = header.hash self.calc_sha256() @@ -705,6 +716,10 @@ def set_null(self): self.nTime = 0 self.nBits = 0 self.nNonce = 0 + self.hashStateRoot = INITIAL_HASH_STATE_ROOT + self.hashUTXORoot = INITIAL_HASH_UTXO_ROOT + self.prevoutStake = COutPoint(0, 0xffffffff) + self.vchBlockSig = b"" self.sha256 = None self.hash = None @@ -715,6 +730,11 @@ def deserialize(self, f): self.nTime = int.from_bytes(f.read(4), "little") self.nBits = int.from_bytes(f.read(4), "little") self.nNonce = int.from_bytes(f.read(4), "little") + self.hashStateRoot = deser_uint256(f) + self.hashUTXORoot = deser_uint256(f) + self.prevoutStake = COutPoint() + self.prevoutStake.deserialize(f) + self.vchBlockSig = deser_string(f) self.sha256 = None self.hash = None @@ -726,6 +746,10 @@ def serialize(self): r += self.nTime.to_bytes(4, "little") r += self.nBits.to_bytes(4, "little") r += self.nNonce.to_bytes(4, "little") + r += ser_uint256(self.hashStateRoot) + r += ser_uint256(self.hashUTXORoot) + r += self.prevoutStake.serialize() if self.prevoutStake else COutPoint(0, 0xffffffff).serialize() + r += ser_string(self.vchBlockSig) return r def calc_sha256(self): @@ -737,6 +761,10 @@ def calc_sha256(self): r += self.nTime.to_bytes(4, "little") r += self.nBits.to_bytes(4, "little") r += self.nNonce.to_bytes(4, "little") + r += ser_uint256(self.hashStateRoot) + r += ser_uint256(self.hashUTXORoot) + r += self.prevoutStake.serialize() if self.prevoutStake else COutPoint(0, 0xffffffff).serialize() + r += ser_string(self.vchBlockSig) self.sha256 = uint256_from_str(hash256(r)) self.hash = hash256(r)[::-1].hex() @@ -745,13 +773,30 @@ def rehash(self): self.calc_sha256() return self.sha256 + def is_pos(self): + return self.prevoutStake and (self.prevoutStake.hash != 0 or self.prevoutStake.n != 0xffffffff) + + def solve_stake(self, stakeModifier, prevouts): + target = uint256_from_compact(self.nBits) + for prevout, nValue, txBlockTime in prevouts: + data = b"" + data += ser_uint256(stakeModifier) + data += struct.pack(" None: self.nodes: list[TestNode] = [] self.extra_args = None self.network_thread = None - self.rpc_timeout = 60 # Wait for up to 60 seconds for the RPC server to respond + self.rpc_timeout = 360 # Wait for up to 60 seconds for the RPC server to respond + self.rpc_timewait = 360 # Wait for up to 60 seconds for the RPC server to respond self.supports_cli = True self.bind_to_localhost_only = True self.parse_args() @@ -119,6 +122,7 @@ def __init__(self) -> None: self.disable_autoconnect = True self.set_test_params() assert self.wallet_names is None or len(self.wallet_names) <= self.num_nodes + self.options.timeout_factor = 7 self.rpc_timeout = int(self.rpc_timeout * self.options.timeout_factor) # optionally, increase timeout by a factor def main(self): @@ -231,10 +235,10 @@ def set_binary_paths(self): """Update self.options with the paths of all binaries from environment variables or their default values""" binaries = { - "bitcoind": ("bitcoind", "BITCOIND"), - "bitcoin-cli": ("bitcoincli", "BITCOINCLI"), - "bitcoin-util": ("bitcoinutil", "BITCOINUTIL"), - "bitcoin-wallet": ("bitcoinwallet", "BITCOINWALLET"), + "qtumd": ("bitcoind", "BITCOIND"), + "qtum-cli": ("bitcoincli", "BITCOINCLI"), + "qtum-util": ("bitcoinutil", "BITCOINUTIL"), + "qtum-wallet": ("bitcoinwallet", "BITCOINWALLET"), } for binary, [attribute_name, env_variable_name] in binaries.items(): default_filename = os.path.join( @@ -415,7 +419,7 @@ def setup_nodes(self): self.import_deterministic_coinbase_privkeys() if not self.setup_clean_chain: for n in self.nodes: - assert_equal(n.getblockchaininfo()["blocks"], 199) + assert_equal(n.getblockchaininfo()["blocks"], COINBASE_MATURITY+99) # To ensure that all nodes are out of IBD, the most recent block # must have a timestamp not too old (see IsInitialBlockDownload()). self.log.debug('Generate a block with current time') @@ -424,7 +428,7 @@ def setup_nodes(self): for n in self.nodes: n.submitblock(block) chain_info = n.getblockchaininfo() - assert_equal(chain_info["blocks"], 200) + assert_equal(chain_info["blocks"], COINBASE_MATURITY+100) assert_equal(chain_info["initialblockdownload"], False) def import_deterministic_coinbase_privkeys(self): @@ -497,9 +501,9 @@ def get_bin_from_version(version, bin_name, bin_default): if versions is None: versions = [None] * num_nodes if binary is None: - binary = [get_bin_from_version(v, 'bitcoind', self.options.bitcoind) for v in versions] + binary = [get_bin_from_version(v, 'qtumd', self.options.bitcoind) for v in versions] if binary_cli is None: - binary_cli = [get_bin_from_version(v, 'bitcoin-cli', self.options.bitcoincli) for v in versions] + binary_cli = [get_bin_from_version(v, 'qtum-cli', self.options.bitcoincli) for v in versions] assert_equal(len(extra_confs), num_nodes) assert_equal(len(extra_args), num_nodes) assert_equal(len(versions), num_nodes) @@ -841,14 +845,16 @@ def _initialize_chain(self): # see the tip age check in IsInitialBlockDownload(). gen_addresses = [k.address for k in TestNode.PRIV_KEYS][:3] + [create_deterministic_address_bcrt1_p2tr_op_true()[0]] assert_equal(len(gen_addresses), 4) - for i in range(8): + for i in range(4): self.generatetoaddress( cache_node, nblocks=25 if i != 7 else 24, address=gen_addresses[i % len(gen_addresses)], ) - assert_equal(cache_node.getblockchaininfo()["blocks"], 199) + for i in range(4): + generatesynchronized(self.nodes[0], COINBASE_MATURITY // 4 if i != 3 else (COINBASE_MATURITY // 4) - 1, TestNode.PRIV_KEYS[i % 4].address, self.nodes) + assert_equal(cache_node.getblockchaininfo()["blocks"], 99+COINBASE_MATURITY) # Shut it down, and clean up cache directories: self.stop_nodes() @@ -859,7 +865,7 @@ def cache_path(*paths): os.rmdir(cache_path('wallets')) # Remove empty wallets dir for entry in os.listdir(cache_path()): - if entry not in ['chainstate', 'blocks', 'indexes']: # Only indexes, chainstate and blocks folders + if entry not in ['chainstate', 'blocks', 'indexes', 'stateQtum']: # Only indexes, chainstate and blocks folders os.remove(cache_path(entry)) for i in range(self.num_nodes): diff --git a/test/functional/test_framework/test_node.py b/test/functional/test_framework/test_node.py index 3baa78fd79..a3e8cd5de7 100755 --- a/test/functional/test_framework/test_node.py +++ b/test/functional/test_framework/test_node.py @@ -40,6 +40,8 @@ wait_until_helper_internal, p2p_port, ) +from .qtum import convert_btc_address_to_qtum +from .qtumconfig import ENABLE_REDUCED_BLOCK_TIME BITCOIND_PROC_WAIT_TIMEOUT = 60 @@ -78,7 +80,7 @@ def __init__(self, i, datadir_path, *, chain, rpchost, timewait, timeout_factor, self.index = i self.p2p_conn_index = 1 self.datadir_path = datadir_path - self.bitcoinconf = self.datadir_path / "bitcoin.conf" + self.bitcoinconf = self.datadir_path / "qtum.conf" self.stdout_dir = self.datadir_path / "stdout" self.stderr_dir = self.datadir_path / "stderr" self.chain = chain @@ -162,18 +164,18 @@ def __init__(self, i, datadir_path, *, chain, rpchost, timewait, timeout_factor, AddressKeyPair = collections.namedtuple('AddressKeyPair', ['address', 'key']) PRIV_KEYS = [ # address , privkey - AddressKeyPair('mjTkW3DjgyZck4KbiRusZsqTgaYTxdSz6z', 'cVpF924EspNh8KjYsfhgY96mmxvT6DgdWiTYMtMjuM74hJaU5psW'), - AddressKeyPair('msX6jQXvxiNhx3Q62PKeLPrhrqZQdSimTg', 'cUxsWyKyZ9MAQTaAhUQWJmBbSvHMwSmuv59KgxQV7oZQU3PXN3KE'), - AddressKeyPair('mnonCMyH9TmAsSj3M59DsbH8H63U3RKoFP', 'cTrh7dkEAeJd6b3MRX9bZK8eRmNqVCMH3LSUkE3dSFDyzjU38QxK'), - AddressKeyPair('mqJupas8Dt2uestQDvV2NH3RU8uZh2dqQR', 'cVuKKa7gbehEQvVq717hYcbE9Dqmq7KEBKqWgWrYBa2CKKrhtRim'), - AddressKeyPair('msYac7Rvd5ywm6pEmkjyxhbCDKqWsVeYws', 'cQDCBuKcjanpXDpCqacNSjYfxeQj8G6CAtH1Dsk3cXyqLNC4RPuh'), - AddressKeyPair('n2rnuUnwLgXqf9kk2kjvVm8R5BZK1yxQBi', 'cQakmfPSLSqKHyMFGwAqKHgWUiofJCagVGhiB4KCainaeCSxeyYq'), - AddressKeyPair('myzuPxRwsf3vvGzEuzPfK9Nf2RfwauwYe6', 'cQMpDLJwA8DBe9NcQbdoSb1BhmFxVjWD5gRyrLZCtpuF9Zi3a9RK'), - AddressKeyPair('mumwTaMtbxEPUswmLBBN3vM9oGRtGBrys8', 'cSXmRKXVcoouhNNVpcNKFfxsTsToY5pvB9DVsFksF1ENunTzRKsy'), - AddressKeyPair('mpV7aGShMkJCZgbW7F6iZgrvuPHjZjH9qg', 'cSoXt6tm3pqy43UMabY6eUTmR3eSUYFtB2iNQDGgb3VUnRsQys2k'), - AddressKeyPair('mq4fBNdckGtvY2mijd9am7DRsbRB4KjUkf', 'cN55daf1HotwBAgAKWVgDcoppmUNDtQSfb7XLutTLeAgVc3u8hik'), - AddressKeyPair('mpFAHDjX7KregM3rVotdXzQmkbwtbQEnZ6', 'cT7qK7g1wkYEMvKowd2ZrX1E5f6JQ7TM246UfqbCiyF7kZhorpX3'), - AddressKeyPair('mzRe8QZMfGi58KyWCse2exxEFry2sfF2Y7', 'cPiRWE8KMjTRxH1MWkPerhfoHFn5iHPWVK5aPqjW8NxmdwenFinJ'), + AddressKeyPair(convert_btc_address_to_qtum('mjTkW3DjgyZck4KbiRusZsqTgaYTxdSz6z'), 'cVpF924EspNh8KjYsfhgY96mmxvT6DgdWiTYMtMjuM74hJaU5psW'), + AddressKeyPair(convert_btc_address_to_qtum('msX6jQXvxiNhx3Q62PKeLPrhrqZQdSimTg'), 'cUxsWyKyZ9MAQTaAhUQWJmBbSvHMwSmuv59KgxQV7oZQU3PXN3KE'), + AddressKeyPair(convert_btc_address_to_qtum('mnonCMyH9TmAsSj3M59DsbH8H63U3RKoFP'), 'cTrh7dkEAeJd6b3MRX9bZK8eRmNqVCMH3LSUkE3dSFDyzjU38QxK'), + AddressKeyPair(convert_btc_address_to_qtum('mqJupas8Dt2uestQDvV2NH3RU8uZh2dqQR'), 'cVuKKa7gbehEQvVq717hYcbE9Dqmq7KEBKqWgWrYBa2CKKrhtRim'), + AddressKeyPair(convert_btc_address_to_qtum('msYac7Rvd5ywm6pEmkjyxhbCDKqWsVeYws'), 'cQDCBuKcjanpXDpCqacNSjYfxeQj8G6CAtH1Dsk3cXyqLNC4RPuh'), + AddressKeyPair(convert_btc_address_to_qtum('n2rnuUnwLgXqf9kk2kjvVm8R5BZK1yxQBi'), 'cQakmfPSLSqKHyMFGwAqKHgWUiofJCagVGhiB4KCainaeCSxeyYq'), + AddressKeyPair(convert_btc_address_to_qtum('myzuPxRwsf3vvGzEuzPfK9Nf2RfwauwYe6'), 'cQMpDLJwA8DBe9NcQbdoSb1BhmFxVjWD5gRyrLZCtpuF9Zi3a9RK'), + AddressKeyPair(convert_btc_address_to_qtum('mumwTaMtbxEPUswmLBBN3vM9oGRtGBrys8'), 'cSXmRKXVcoouhNNVpcNKFfxsTsToY5pvB9DVsFksF1ENunTzRKsy'), + AddressKeyPair(convert_btc_address_to_qtum('mpV7aGShMkJCZgbW7F6iZgrvuPHjZjH9qg'), 'cSoXt6tm3pqy43UMabY6eUTmR3eSUYFtB2iNQDGgb3VUnRsQys2k'), + AddressKeyPair(convert_btc_address_to_qtum('mq4fBNdckGtvY2mijd9am7DRsbRB4KjUkf'), 'cN55daf1HotwBAgAKWVgDcoppmUNDtQSfb7XLutTLeAgVc3u8hik'), + AddressKeyPair(convert_btc_address_to_qtum('mpFAHDjX7KregM3rVotdXzQmkbwtbQEnZ6'), 'cT7qK7g1wkYEMvKowd2ZrX1E5f6JQ7TM246UfqbCiyF7kZhorpX3'), + AddressKeyPair(convert_btc_address_to_qtum('mzRe8QZMfGi58KyWCse2exxEFry2sfF2Y7'), 'cPiRWE8KMjTRxH1MWkPerhfoHFn5iHPWVK5aPqjW8NxmdwenFinJ'), ] def get_deterministic_priv_key(self): @@ -232,8 +234,12 @@ def start(self, extra_args=None, *, cwd=None, stdout=None, stderr=None, env=None # add environment variable LIBC_FATAL_STDERR_=1 so that libc errors are written to stderr and not the terminal subp_env = dict(os.environ, LIBC_FATAL_STDERR_="1") - if env is not None: - subp_env.update(env) + if not any(arg.startswith('-staking=') for arg in extra_args): + extra_args.append('-staking=0') + + # Disable the spam filter as it may interfere with come tests sending lots and lots of blocks + if not any(arg.startswith('-headerspamfilter') for arg in extra_args): + extra_args.append('-headerspamfilter=0') self.process = subprocess.Popen(self.args + extra_args, env=subp_env, stdout=stdout, stderr=stderr, cwd=cwd, **kwargs) @@ -334,15 +340,15 @@ def generate(self, nblocks, maxtries=1000000, **kwargs): self.log.debug("TestNode.generate() dispatches `generate` call to `generatetoaddress`") return self.generatetoaddress(nblocks=nblocks, address=self.get_deterministic_priv_key().address, maxtries=maxtries, **kwargs) - def generateblock(self, *args, invalid_call, **kwargs): + def generateblock(self, *args, invalid_call=False, **kwargs): assert not invalid_call return self.__getattr__('generateblock')(*args, **kwargs) - def generatetoaddress(self, *args, invalid_call, **kwargs): + def generatetoaddress(self, *args, invalid_call=False, **kwargs): assert not invalid_call return self.__getattr__('generatetoaddress')(*args, **kwargs) - def generatetodescriptor(self, *args, invalid_call, **kwargs): + def generatetodescriptor(self, *args, invalid_call=False, **kwargs): assert not invalid_call return self.__getattr__('generatetodescriptor')(*args, **kwargs) diff --git a/test/functional/test_framework/util.py b/test/functional/test_framework/util.py index b4b05b1597..3b17a9b720 100644 --- a/test/functional/test_framework/util.py +++ b/test/functional/test_framework/util.py @@ -5,6 +5,7 @@ """Helpful routines for regression testing.""" from base64 import b64encode +from binascii import unhexlify, hexlify from decimal import Decimal, ROUND_DOWN from subprocess import CalledProcessError import hashlib @@ -22,6 +23,7 @@ from collections.abc import Callable from typing import Optional +from .qtumconfig import COINBASE_MATURITY logger = logging.getLogger("TestFramework.utils") # Assert functions @@ -214,6 +216,12 @@ def check_json_precision(): def count_bytes(hex_string): return len(bytearray.fromhex(hex_string)) +def hex_str_to_bytes(hex_str): + return unhexlify(hex_str.encode('ascii')) + + +def bytes_to_hex_str(byte_str): + return hexlify(byte_str).decode('ascii') def str_to_b64str(string): return b64encode(string.encode('utf-8')).decode('ascii') @@ -251,7 +259,7 @@ def wait_until_helper_internal(predicate, *, attempts=float('inf'), timeout=floa `p2p.py` has a preset lock. """ if attempts == float('inf') and timeout == float('inf'): - timeout = 60 + timeout = 180 timeout = timeout * timeout_factor attempt = 0 time_end = time.time() + timeout @@ -358,7 +366,7 @@ def initialize_datadir(dirname, n, chain, disable_autoconnect=True): datadir = get_datadir_path(dirname, n) if not os.path.isdir(datadir): os.makedirs(datadir) - write_config(os.path.join(datadir, "bitcoin.conf"), n=n, chain=chain, disable_autoconnect=disable_autoconnect) + write_config(os.path.join(datadir, "qtum.conf"), n=n, chain=chain, disable_autoconnect=disable_autoconnect) os.makedirs(os.path.join(datadir, 'stderr'), exist_ok=True) os.makedirs(os.path.join(datadir, 'stdout'), exist_ok=True) return datadir @@ -427,7 +435,7 @@ def get_temp_default_datadir(temp_dir: pathlib.Path) -> tuple[dict, pathlib.Path def append_config(datadir, options): - with open(os.path.join(datadir, "bitcoin.conf"), 'a', encoding='utf8') as f: + with open(os.path.join(datadir, "qtum.conf"), 'a', encoding='utf8') as f: for option in options: f.write(option + "\n") @@ -435,8 +443,8 @@ def append_config(datadir, options): def get_auth_cookie(datadir, chain): user = None password = None - if os.path.isfile(os.path.join(datadir, "bitcoin.conf")): - with open(os.path.join(datadir, "bitcoin.conf"), 'r', encoding='utf8') as f: + if os.path.isfile(os.path.join(datadir, "qtum.conf")): + with open(os.path.join(datadir, "qtum.conf"), 'r', encoding='utf8') as f: for line in f: if line.startswith("rpcuser="): assert user is None # Ensure that there is only one rpcuser line @@ -483,6 +491,24 @@ def check_node_connections(*, node, num_in, num_out): # Transaction/Block functions ############################# +def sync_blocks(rpc_connections, *, wait=1, timeout=60): + """ + Wait until everybody has the same tip. + + sync_blocks needs to be called with an rpc_connections set that has least + one node already synced to the latest, stable tip, otherwise there's a + chance it might return before all nodes are stably synced. + """ + stop_time = time.time() + timeout + while time.time() <= stop_time: + best_hash = [x.getbestblockhash() for x in rpc_connections] + if best_hash.count(best_hash[0]) == len(rpc_connections): + return + # Check that each peer has at least one connection + assert (all([len(x.getpeerinfo()) for x in rpc_connections])) + time.sleep(wait) + raise AssertionError("Block sync timed out:{}".format("".join("\n {!r}".format(b) for b in best_hash))) + # Create large OP_RETURN txouts that can be appended to a transaction # to make it large (helper for constructing large transactions). The @@ -514,7 +540,7 @@ def create_lots_of_big_transactions(mini_wallet, node, fee, tx_batch_size, txout def mine_large_block(test_framework, mini_wallet, node): # generate a 66k transaction, - # and 14 of them is close to the 1MB block limit + # and 28 of them is close to the 1MB block limit txouts = gen_return_txouts() fee = 100 * node.getnetworkinfo()["relayfee"] create_lots_of_big_transactions(mini_wallet, node, fee, 14, txouts) diff --git a/test/functional/test_framework/wallet.py b/test/functional/test_framework/wallet.py index 470ed08ed4..a9564a38b9 100644 --- a/test/functional/test_framework/wallet.py +++ b/test/functional/test_framework/wallet.py @@ -54,7 +54,7 @@ ) from test_framework.wallet_util import generate_keypair -DEFAULT_FEE = Decimal("0.0001") +DEFAULT_FEE = Decimal("0.05") class MiniWalletMode(Enum): """Determines the transaction type the MiniWallet is creating and spending. @@ -207,14 +207,18 @@ def get_address(self): assert_equal(self._mode, MiniWalletMode.ADDRESS_OP_TRUE) return self._address - def get_utxo(self, *, txid: str = '', vout: Optional[int] = None, mark_as_spent=True, confirmed_only=False) -> dict: + def get_utxo(self, *, txid: str = '', vout: Optional[int] = None, mark_as_spent=True, confirmed_only=False, sort_by_height=False) -> dict: """ Returns a utxo and marks it as spent (pops it from the internal list) Args: txid: get the first utxo we find from a specific transaction """ - self._utxos = sorted(self._utxos, key=lambda k: (k['value'], -k['height'])) # Put the largest utxo last + if sort_by_height: + self._utxos = sorted(self._utxos, key=lambda k: (-k['height'], k['value'])) + else: + # Put the largest utxo last + self._utxos = sorted(self._utxos, key=lambda k: (k['value'], -k['height'])) # Put the largest utxo last blocks_height = self._test_node.getblockchaininfo()['blocks'] mature_coins = list(filter(lambda utxo: not utxo['coinbase'] or COINBASE_MATURITY - 1 <= blocks_height - utxo['height'], self._utxos)) if txid: @@ -251,7 +255,7 @@ def send_self_transfer(self, *, from_node, **kwargs): self.sendrawtransaction(from_node=from_node, tx_hex=tx['hex']) return tx - def send_to(self, *, from_node, scriptPubKey, amount, fee=1000): + def send_to(self, *, from_node, scriptPubKey, amount, fee=80000, sort_by_height=False): """ Create and send a tx with an output to a given scriptPubKey/amount, plus a change output to our internal address. To keep things simple, a @@ -261,7 +265,7 @@ def send_to(self, *, from_node, scriptPubKey, amount, fee=1000): available that can cover the cost for the amount and the fixed fee (the utxo with the largest value is taken). """ - tx = self.create_self_transfer(fee_rate=0)["tx"] + tx = self.create_self_transfer(fee_rate=0, sort_by_height=sort_by_height)["tx"] assert_greater_than_or_equal(tx.vout[0].nValue, amount + fee) tx.vout[0].nValue -= (amount + fee) # change output -> MiniWallet tx.vout.append(CTxOut(amount, scriptPubKey)) # arbitrary output -> to be returned @@ -289,7 +293,7 @@ def create_self_transfer_multi( version=2, locktime=0, sequence=0, - fee_per_output=1000, + fee_per_output=80000, target_weight=0, confirmed_only=False, ): @@ -347,10 +351,11 @@ def create_self_transfer( utxo_to_spend=None, target_weight=0, confirmed_only=False, + sort_by_height=False, **kwargs, ): """Create and return a tx with the specified fee. If fee is 0, use fee_rate, where the resulting fee may be exact or at most one satoshi higher than needed.""" - utxo_to_spend = utxo_to_spend or self.get_utxo(confirmed_only=confirmed_only) + utxo_to_spend = utxo_to_spend or self.get_utxo(confirmed_only=confirmed_only, sort_by_height=sort_by_height) assert fee_rate >= 0 assert fee >= 0 # calculate fee