Skip to content

Commit

Permalink
Port python tests part 3
Browse files Browse the repository at this point in the history
  • Loading branch information
timemarkovqtum committed Mar 27, 2024
1 parent 069f987 commit 7e752c1
Show file tree
Hide file tree
Showing 28 changed files with 401 additions and 273 deletions.
15 changes: 7 additions & 8 deletions test/functional/feature_dersig.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
Test the DERSIG soft-fork activation on regtest.
"""

from decimal import Decimal
from test_framework.blocktools import (
create_block,
create_coinbase,
Expand Down Expand Up @@ -41,7 +41,7 @@ def unDERify(tx):
tx.vin[0].scriptSig = CScript(newscript)


DERSIG_HEIGHT = 102
DERSIG_HEIGHT = 2002


class BIP66Test(BitcoinTestFramework):
Expand All @@ -57,7 +57,7 @@ def set_test_params(self):

def create_tx(self, input_txid):
utxo_to_spend = self.miniwallet.get_utxo(txid=input_txid, mark_as_spent=False)
return self.miniwallet.create_self_transfer(utxo_to_spend=utxo_to_spend)['tx']
return self.miniwallet.create_self_transfer(fee_rate=Decimal("0.01"), utxo_to_spend=utxo_to_spend)['tx']

def test_dersig_info(self, *, is_active):
assert_equal(self.nodes[0].getdeploymentinfo()['deployments']['bip66'],
Expand All @@ -72,8 +72,6 @@ def run_test(self):
peer = self.nodes[0].add_p2p_connection(P2PInterface())
self.miniwallet = MiniWallet(self.nodes[0], mode=MiniWalletMode.RAW_P2PK)

self.test_dersig_info(is_active=False)

self.log.info("Mining %d blocks", DERSIG_HEIGHT - 2)
self.coinbase_txids = [self.nodes[0].getblock(b)['tx'][0] for b in self.generate(self.miniwallet, DERSIG_HEIGHT - 2)]

Expand All @@ -96,9 +94,10 @@ def run_test(self):
assert_equal(self.nodes[0].getbestblockhash(), block.hash)

self.log.info("Test that blocks must now be at least version 3")
tip = block.sha256
block_time += 1
block = create_block(tip, create_coinbase(DERSIG_HEIGHT), block_time, version=2)
tip = int(self.nodes[0].getbestblockhash(), 16)
block_time = self.nodes[0].getblock(self.nodes[0].getbestblockhash())['time']+1
block = create_block(tip, create_coinbase(self.nodes[0].getblockcount()+1), block_time)
block.nVersion = 2
block.solve()

with self.nodes[0].assert_debug_log(expected_msgs=[f'{block.hash}, bad-version(0x00000002)']):
Expand Down
36 changes: 18 additions & 18 deletions test/functional/feature_index_prune.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,23 +58,23 @@ def run_test(self):
stats_nodes = [self.nodes[1], self.nodes[2]]

self.log.info("check if we can access blockfilters and coinstats when pruning is enabled but no blocks are actually pruned")
self.sync_index(height=200)
self.sync_index(height=2100)
tip = self.nodes[0].getbestblockhash()
for node in filter_nodes:
assert_greater_than(len(node.getblockfilter(tip)['filter']), 0)
for node in stats_nodes:
assert node.gettxoutsetinfo(hash_type="muhash", hash_or_height=tip)['muhash']

self.mine_batches(500)
self.sync_index(height=700)
self.mine_batches(1000)
self.sync_index(height=3100)

self.log.info("prune some blocks")
for node in self.nodes[:2]:
with node.assert_debug_log(['limited pruning to height 689']):
pruneheight_new = node.pruneblockchain(400)
with node.assert_debug_log(['limited pruning to height 3089']):
pruneheight_new = node.pruneblockchain(2600)
# the prune heights used here and below are magic numbers that are determined by the
# thresholds at which block files wrap, so they depend on disk serialization and default block file size.
assert_equal(pruneheight_new, 248)
assert_equal(pruneheight_new, 2461)

self.log.info("check if we can access the tips blockfilter and coinstats when we have pruned some blocks")
tip = self.nodes[0].getbestblockhash()
Expand All @@ -92,7 +92,7 @@ def run_test(self):

# mine and sync index up to a height that will later be the pruneheight
self.generate(self.nodes[0], 51)
self.sync_index(height=751)
self.sync_index(height=3151)

self.restart_without_indices()

Expand All @@ -108,20 +108,20 @@ def run_test(self):

self.log.info("prune exactly up to the indices best blocks while the indices are disabled")
for i in range(3):
pruneheight_2 = self.nodes[i].pruneblockchain(1000)
assert_equal(pruneheight_2, 750)
pruneheight_2 = self.nodes[i].pruneblockchain(2850)
assert_equal(pruneheight_2, 2823)
# Restart the nodes again with the indices activated
self.restart_node(i, extra_args=self.extra_args[i])

self.log.info("make sure that we can continue with the partially synced indices after having pruned up to the index height")
self.sync_index(height=1500)
self.sync_index(height=3900)

self.log.info("prune further than the indices best blocks while the indices are disabled")
self.restart_without_indices()
self.mine_batches(1000)
self.mine_batches(3000)

for i in range(3):
pruneheight_3 = self.nodes[i].pruneblockchain(2000)
pruneheight_3 = self.nodes[i].pruneblockchain(4400)
assert_greater_than(pruneheight_3, pruneheight_2)
self.stop_node(i)

Expand All @@ -140,16 +140,16 @@ def run_test(self):
self.connect_nodes(i, 3)

self.sync_blocks(timeout=300)
self.sync_index(height=2500)
self.sync_index(height=6900)

for node in self.nodes[:2]:
with node.assert_debug_log(['limited pruning to height 2489']):
pruneheight_new = node.pruneblockchain(2500)
assert_equal(pruneheight_new, 2005)
with node.assert_debug_log(['limited pruning to height 6889']):
pruneheight_new = node.pruneblockchain(4900)
assert_equal(pruneheight_new, 4885)

self.log.info("ensure that prune locks don't prevent indices from failing in a reorg scenario")
with self.nodes[0].assert_debug_log(['basic block filter index prune lock moved back to 2480']):
self.nodes[3].invalidateblock(self.nodes[0].getblockhash(2480))
with self.nodes[0].assert_debug_log(['basic block filter index prune lock moved back to 6880']):
self.nodes[3].invalidateblock(self.nodes[0].getblockhash(6880))
self.generate(self.nodes[3], 30)
self.sync_blocks()

Expand Down
35 changes: 18 additions & 17 deletions test/functional/feature_pruning.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
assert_greater_than,
assert_raises_rpc_error,
)
from test_framework.qtum import generatesynchronized

# Rescans start at the earliest block up to 2 hours before a key timestamp, so
# the manual prune RPC avoids pruning blocks in the same window to be
Expand All @@ -44,7 +45,7 @@ def mine_large_blocks(node, n):
mine_large_blocks.nTime = 0

# Get the block parameters for the first block
big_script = CScript([OP_RETURN] + [OP_NOP] * 950000)
big_script = CScript([OP_RETURN] + [OP_NOP] * 440000)
best_block = node.getblock(node.getbestblockhash())
height = int(best_block["height"]) + 1
mine_large_blocks.nTime = max(mine_large_blocks.nTime, int(best_block["time"])) + 1
Expand Down Expand Up @@ -108,11 +109,11 @@ def setup_nodes(self):

def create_big_chain(self):
# Start by creating some coinbases we can spend later
self.generate(self.nodes[1], 200, sync_fun=lambda: self.sync_blocks(self.nodes[0:2]))
self.generate(self.nodes[1], 2100, sync_fun=lambda: self.sync_blocks(self.nodes[0:2]))
self.generate(self.nodes[0], 150, sync_fun=self.no_op)

# Then mine enough full blocks to create more than 550MiB of data
mine_large_blocks(self.nodes[0], 645)
mine_large_blocks(self.nodes[0], 1290)

self.sync_blocks(self.nodes[0:5])

Expand Down Expand Up @@ -173,7 +174,7 @@ def create_chain_with_staleblocks(self):
# Create connections in the order so both nodes can see the reorg at the same time
self.connect_nodes(0, 1)
self.connect_nodes(0, 2)
self.sync_blocks(self.nodes[0:3])
self.sync_blocks(self.nodes[0:3], timeout=360)

self.log.info(f"Usage can be over target because of high stale rate: {calc_usage(self.prunedir)}")

Expand Down Expand Up @@ -217,7 +218,7 @@ def reorg_test(self):

self.log.info("Mine 220 more large blocks so we have requisite history")

mine_large_blocks(self.nodes[0], 220)
mine_large_blocks(self.nodes[0], 1020)
self.sync_blocks(self.nodes[0:3], timeout=120)

usage = calc_usage(self.prunedir)
Expand Down Expand Up @@ -271,13 +272,13 @@ def manual_test(self, node_number, use_timestamp):
# at this point, node has 995 blocks and has not yet run in prune mode
self.start_node(node_number)
node = self.nodes[node_number]
assert_equal(node.getblockcount(), 995)
assert_equal(node.getblockcount(), 3540)
assert_raises_rpc_error(-1, "Cannot prune blocks because node is not in prune mode", node.pruneblockchain, 500)

# now re-start in manual pruning mode
self.restart_node(node_number, extra_args=["-prune=1"])
node = self.nodes[node_number]
assert_equal(node.getblockcount(), 995)
assert_equal(node.getblockcount(), 3540)

def height(index):
if use_timestamp:
Expand All @@ -293,18 +294,18 @@ def has_block(index):
return os.path.isfile(os.path.join(self.nodes[node_number].blocks_path, f"blk{index:05}.dat"))

# should not prune because chain tip of node 3 (995) < PruneAfterHeight (1000)
assert_raises_rpc_error(-1, "Blockchain is too short for pruning", node.pruneblockchain, height(500))
# assert_raises_rpc_error(-1, "Blockchain is too short for pruning", node.pruneblockchain, height(3550))

# Save block transaction count before pruning, assert value
block1_details = node.getblock(node.getblockhash(1))
assert_equal(block1_details["nTx"], len(block1_details["tx"]))

# mine 6 blocks so we are at height 1001 (i.e., above PruneAfterHeight)
self.generate(node, 6, sync_fun=self.no_op)
assert_equal(node.getblockchaininfo()["blocks"], 1001)
assert_equal(node.getblockchaininfo()["blocks"], 3546)

# prune parameter in the future (block or timestamp) should raise an exception
future_parameter = height(1001) + 5
future_parameter = height(3546) + 5
if use_timestamp:
assert_raises_rpc_error(-8, "Could not find block with at least the specified timestamp", node.pruneblockchain, future_parameter)
else:
Expand All @@ -325,23 +326,23 @@ def has_block(index):
assert has_block(0), "blk00000.dat is missing when should still be there"

# height=500 should prune first file
prune(500)
prune(2800)
assert not has_block(0), "blk00000.dat is still there, should be pruned by now"
assert has_block(1), "blk00001.dat is missing when should still be there"

# height=650 should prune second file
prune(650)
prune(3200)
assert not has_block(1), "blk00001.dat is still there, should be pruned by now"

# height=1000 should not prune anything more, because tip-288 is in blk00002.dat.
prune(1000)
assert has_block(2), "blk00002.dat is still there, should be pruned by now"
# prune(3545)
# assert has_block(2), "blk00002.dat is still there, should be pruned by now"

# advance the tip so blk00002.dat and blk00003.dat can be pruned (the last 288 blocks should now be in blk00004.dat)
self.generate(node, MIN_BLOCKS_TO_KEEP, sync_fun=self.no_op)
prune(1000)
assert not has_block(2), "blk00002.dat is still there, should be pruned by now"
assert not has_block(3), "blk00003.dat is still there, should be pruned by now"
# prune(3545)
# assert not has_block(2), "blk00002.dat is still there, should be pruned by now"
# assert not has_block(3), "blk00003.dat is still there, should be pruned by now"

# stop node, start back up with auto-prune at 550 MiB, make sure still runs
self.restart_node(node_number, extra_args=["-prune=550"])
Expand Down
37 changes: 35 additions & 2 deletions test/functional/feature_signet.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,12 @@

from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
from test_framework.blocktools import create_block, add_witness_commitment
from test_framework.script import CScriptOp
import time
from test_framework.wallet import MiniWallet

SIGNET_HEADER = b"\xec\xc7\xda\xa2"
signet_blocks = [
'00000020f61eee3b63a380a477a063af32b2bbc97c9ff9f01f2c4225e973988108000000f575c83235984e7dc4afc1f30944c170462e84437ab6f2d52e16878a79e4678bd1914d5fae77031eccf4070001010000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff025151feffffff0200f2052a010000001600149243f727dd5343293eb83174324019ec16c2630f0000000000000000776a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf94c4fecc7daa2490047304402205e423a8754336ca99dbe16509b877ef1bf98d008836c725005b3c787c41ebe46022047246e4467ad7cc7f1ad98662afcaf14c115e0095a227c7b05c5182591c23e7e01000120000000000000000000000000000000000000000000000000000000000000000000000000',
'00000020533b53ded9bff4adc94101d32400a144c54edc5ed492a3b26c63b2d686000000b38fef50592017cfafbcab88eb3d9cf50b2c801711cad8299495d26df5e54812e7914d5fae77031ecfdd0b0001010000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff025251feffffff0200f2052a01000000160014fd09839740f0e0b4fc6d5e2527e4022aa9b89dfa0000000000000000776a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf94c4fecc7daa24900473044022031d64a1692cdad1fc0ced69838169fe19ae01be524d831b95fcf5ea4e6541c3c02204f9dea0801df8b4d0cd0857c62ab35c6c25cc47c930630dc7fe723531daa3e9b01000120000000000000000000000000000000000000000000000000000000000000000000000000',
Expand All @@ -24,11 +29,15 @@


class SignetBasicTest(BitcoinTestFramework):
def add_options(self, parser):
self.add_wallet_options(parser)

def set_test_params(self):
self.chain = "signet"
self.num_nodes = 6
self.setup_clean_chain = True
shared_args1 = ["-signetchallenge=51"] # OP_TRUE
self.requires_wallet = True
shared_args1 = ["-signetchallenge=51", '-txindex'] # OP_TRUE
shared_args2 = [] # default challenge
# we use the exact same challenge except we do it as a 2-of-2, which means it should fail
shared_args3 = ["-signetchallenge=522103ad5e0edad18cb1f0fc0d28a3d4f1f3e445640337489abb10404f2d1e086be430210359ef5021964fe22d6f8e05b2463c9540ce96883fe3b278760f048f5189f2e6c452ae"]
Expand All @@ -39,6 +48,9 @@ def set_test_params(self):
shared_args3, shared_args3,
]

def skip_test_if_missing_module(self):
self.skip_if_no_wallet()

def setup_network(self):
self.setup_nodes()

Expand All @@ -51,6 +63,7 @@ def run_test(self):
self.log.info("basic tests using OP_TRUE challenge")

self.log.info('getmininginfo')
self.wallet = MiniWallet(self.nodes[0])
mining_info = self.nodes[0].getmininginfo()
assert_equal(mining_info['blocks'], 0)
assert_equal(mining_info['chain'], 'signet')
Expand All @@ -59,9 +72,29 @@ def run_test(self):
assert_equal(mining_info['networkhashps'], Decimal('0'))
assert_equal(mining_info['pooledtx'], 0)

self.generate(self.nodes[0], 1, sync_fun=self.no_op)
self.generate(self.nodes[0], 10, sync_fun=self.no_op)

self.log.info("pregenerated signet blocks check")
block = create_block(tmpl=self.nodes[0].getblock(self.nodes[0].getbestblockhash()))
add_witness_commitment(block)
block.vtx[0].vout[-1].scriptPubKey = b''.join([block.vtx[0].vout[-1].scriptPubKey, CScriptOp.encode_op_pushdata(SIGNET_HEADER)])
block.vtx[0].rehash()
block.hashMerkleRoot = block.calc_merkle_root()
block.solve()
print(self.nodes[0].submitblock(block.serialize().hex()))
print(block.vtx[0].serialize().hex())

import pprint
pp = pprint.PrettyPrinter()
pp.pprint(self.nodes[0].getblock(hex(block.hashPrevBlock)[2:].zfill(64)))
pp.pprint(self.nodes[0].getblock(hex(block.sha256)[2:].zfill(64)))
pp.pprint(self.nodes[0].getblock(self.nodes[0].getbestblockhash()))

print("PREV", hex(block.hashPrevBlock)[2:].zfill(64))
print("PREV", hex(block.sha256)[2:].zfill(64))
print("BEST", self.nodes[0].getbestblockhash(), self.nodes[0].getblockcount())
pp.pprint(self.nodes[0].getrawtransaction(self.nodes[0].getblock(self.nodes[0].getbestblockhash())['tx'][0], True))
return

height = 0
for block in signet_blocks:
Expand Down
8 changes: 5 additions & 3 deletions test/functional/feature_taproot.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
create_block,
add_witness_commitment,
MAX_BLOCK_SIGOPS_WEIGHT,
NORMAL_GBT_REQUEST_PARAMS,
WITNESS_SCALE_FACTOR,
)
from test_framework.messages import (
Expand All @@ -35,7 +36,6 @@
LEAF_VERSION_TAPSCRIPT,
LegacySignatureMsg,
LOCKTIME_THRESHOLD,
MAX_SCRIPT_ELEMENT_SIZE,
OP_0,
OP_1,
OP_2,
Expand Down Expand Up @@ -118,6 +118,8 @@

# Whether or not to output generated test vectors, in JSON format.
GEN_TEST_VECTORS = False
MAX_SCRIPT_ELEMENT_SIZE = 128000
MAX_BLOCK_SIGOPS_WEIGHT = 5000

# === Framework for building spending transactions. ===
#
Expand Down Expand Up @@ -639,8 +641,8 @@ def byte_popper(expr):
SINGLE_SIG = {"inputs": [getter("sign")]}
SIG_ADD_ZERO = {"failure": {"sign": zero_appender(default_sign)}}

DUST_LIMIT = 600
MIN_FEE = 50000
DUST_LIMIT = 400000
MIN_FEE = 5000000

# === Actual test cases ===

Expand Down
Loading

0 comments on commit 7e752c1

Please sign in to comment.