From dcb876c6fc5abb67de5e513d17bfac4d526d6a7d Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Tue, 30 Jul 2024 15:30:03 -0300 Subject: [PATCH 01/61] fix: fix timestamps on gen-genesis CLI (#1094) --- hathor/transaction/genesis.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/hathor/transaction/genesis.py b/hathor/transaction/genesis.py index 3ec107aac..ec2793496 100644 --- a/hathor/transaction/genesis.py +++ b/hathor/transaction/genesis.py @@ -68,21 +68,21 @@ def generate_new_genesis( weight=min_block_weight, outputs=[TxOutput(tokens, output_script)], ) - mining_service.start_mining(block) + mining_service.start_mining(block, update_time=False) block.update_hash() tx1 = Transaction( timestamp=block_timestamp + 1, weight=min_tx_weight, ) - mining_service.start_mining(tx1) + mining_service.start_mining(tx1, update_time=False) tx1.update_hash() tx2 = Transaction( timestamp=block_timestamp + 2, weight=min_tx_weight, ) - mining_service.start_mining(tx2) + mining_service.start_mining(tx2, update_time=False) tx2.update_hash() return block, tx1, tx2 From f3481ab1b2bd774653f5be58eccd35270a9fd4b5 Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Tue, 6 Aug 2024 12:12:42 -0300 Subject: [PATCH 02/61] feat: add sync-v2 benchmark to CI (#1083) --- .github/actions/setup-hathor-env/action.yml | 38 +++++++++++++++++ .github/workflows/base_benchmarks.yml | 34 ++++++++++++++++ .github/workflows/main.yml | 23 ++--------- .github/workflows/pr_benchmarks.yml | 41 +++++++++++++++++++ extras/benchmarking/benchmark_sync_v2.sh | 45 +++++++++++++++++++++ 5 files changed, 162 insertions(+), 19 deletions(-) create mode 100644 .github/actions/setup-hathor-env/action.yml create mode 100644 .github/workflows/base_benchmarks.yml create mode 100644 .github/workflows/pr_benchmarks.yml create mode 100755 extras/benchmarking/benchmark_sync_v2.sh diff --git a/.github/actions/setup-hathor-env/action.yml b/.github/actions/setup-hathor-env/action.yml new file mode 100644 index 000000000..46df73176 --- /dev/null +++ b/.github/actions/setup-hathor-env/action.yml @@ -0,0 +1,38 @@ +name: setup-hathor-env +description: Setup Hathor node environment +inputs: + python: + description: The python version + os: + description: The OS name +runs: + using: composite + steps: + - name: Install Poetry + shell: bash + run: pipx install poetry + + - name: Set up Python ${{ inputs.python }} + uses: actions/setup-python@v4 + with: + python-version: ${{ inputs.python }} + cache: 'poetry' + + - name: Install Ubuntu dependencies + if: startsWith(inputs.os, 'ubuntu') + run: | + sudo apt-get -qy update + sudo apt-get -qy install graphviz librocksdb-dev libsnappy-dev liblz4-dev + shell: bash + + - name: Install macOS dependencies + if: startsWith(inputs.os, 'macos') + run: | + brew cleanup -q + # brew update -q + brew install -q graphviz rocksdb pkg-config + shell: bash + + - name: Install Poetry dependencies + run: poetry install -n --no-root + shell: bash diff --git a/.github/workflows/base_benchmarks.yml b/.github/workflows/base_benchmarks.yml new file mode 100644 index 000000000..a2e59a68d --- /dev/null +++ b/.github/workflows/base_benchmarks.yml @@ -0,0 +1,34 @@ +# yamllint disable rule:line-length +name: benchmarking +on: # yamllint disable-line rule:truthy + push: + branches: + - master + +jobs: + benchmark_base_branch: + name: Continuous Benchmarking base branch + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@v4 + - uses: bencherdev/bencher@main + - name: Install hyperfine + run: | + wget https://github.com/sharkdp/hyperfine/releases/download/v1.12.0/hyperfine_1.12.0_amd64.deb + sudo dpkg -i hyperfine_1.12.0_amd64.deb + - uses: ./.github/actions/setup-hathor-env + name: Setup Hathor node environment + with: + python: 3.11 + os: ubuntu-22.04 + - name: Track base branch benchmarks with Bencher + run: | + bencher run \ + --project hathor-core \ + --token '${{ secrets.BENCHER_API_TOKEN }}' \ + --branch master \ + --testbed ubuntu-22.04 \ + --adapter shell_hyperfine \ + --err \ + --file bench_results.json \ + './extras/benchmarking/benchmark_sync_v2.sh' diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 8e82414ef..befb9bc11 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -63,26 +63,11 @@ jobs: steps: - name: Checkout uses: actions/checkout@v3 - - name: Install Poetry - run: pipx install poetry - - name: Set up Python ${{ matrix.python }} - uses: actions/setup-python@v4 + - uses: ./.github/actions/setup-hathor-env + name: Setup Hathor node environment with: - python-version: ${{ matrix.python }} - cache: 'poetry' - - name: Install Ubuntu dependencies - if: startsWith(matrix.os, 'ubuntu') - run: | - sudo apt-get -qy update - sudo apt-get -qy install graphviz librocksdb-dev libsnappy-dev liblz4-dev - - name: Install macOS dependencies - if: startsWith(matrix.os, 'macos') - run: | - brew cleanup -q - # brew update -q - brew install -q graphviz rocksdb pkg-config - - name: Install Poetry dependencies - run: poetry install -n --no-root + python: ${{ matrix.python }} + os: ${{ matrix.os }} - name: Cache mypy uses: actions/cache@v3 with: diff --git a/.github/workflows/pr_benchmarks.yml b/.github/workflows/pr_benchmarks.yml new file mode 100644 index 000000000..2aa2e16a4 --- /dev/null +++ b/.github/workflows/pr_benchmarks.yml @@ -0,0 +1,41 @@ +# yamllint disable rule:line-length +name: benchmarking +on: # yamllint disable-line rule:truthy + pull_request: + branches: + - master + +jobs: + benchmark_pr_branch: + name: Continuous Benchmarking PRs + # DO NOT REMOVE: For handling Fork PRs see Pull Requests from Forks + if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository + permissions: + pull-requests: write + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@v4 + - uses: bencherdev/bencher@main + - name: Install hyperfine + run: | + wget https://github.com/sharkdp/hyperfine/releases/download/v1.12.0/hyperfine_1.12.0_amd64.deb + sudo dpkg -i hyperfine_1.12.0_amd64.deb + - uses: ./.github/actions/setup-hathor-env + name: Setup Hathor node environment + with: + python: 3.11 + os: ubuntu-22.04 + - name: Track PR Benchmarks with Bencher + run: | + bencher run \ + --project hathor-core \ + --token '${{ secrets.BENCHER_API_TOKEN }}' \ + --branch '${{ github.head_ref }}' \ + --branch-start-point '${{ github.base_ref }}' \ + --branch-start-point-hash '${{ github.event.pull_request.base.sha }}' \ + --testbed ubuntu-22.04 \ + --adapter shell_hyperfine \ + --err \ + --github-actions '${{ secrets.GITHUB_TOKEN }}' \ + --file bench_results.json \ + './extras/benchmarking/benchmark_sync_v2.sh' diff --git a/extras/benchmarking/benchmark_sync_v2.sh b/extras/benchmarking/benchmark_sync_v2.sh new file mode 100755 index 000000000..9df5b87c4 --- /dev/null +++ b/extras/benchmarking/benchmark_sync_v2.sh @@ -0,0 +1,45 @@ +N_BLOCKS=20000 +CACHE_SIZE=100000 +TESTNET_DATA_DIR=server-data +TCP_PORT=40403 +AWAIT_INIT_DELAY=10 +N_RUNS=2 +BENCH_FILE=bench_results.json +BENCH_DATA_DIR=bench-data + +BLUE='\033[0;34m' +NO_COLOR='\033[0m' + +echo "${BLUE}Downloading testnet data...${NO_COLOR}" +mkdir $TESTNET_DATA_DIR +poetry run hathor-cli quick_test --testnet --data $TESTNET_DATA_DIR --quit-after-n-blocks $N_BLOCKS > /dev/null 2>&1 + +echo "${BLUE}Running server node in the background...${NO_COLOR}" +poetry run hathor-cli run_node \ + --testnet \ + --data $TESTNET_DATA_DIR \ + --cache \ + --cache-size $CACHE_SIZE \ + --x-localhost-only \ + --listen tcp:$TCP_PORT \ + > /dev/null 2>&1 & + +# Await initialization +sleep $AWAIT_INIT_DELAY + +echo "${BLUE}Running benchmark...${NO_COLOR}" +hyperfine \ + --runs $N_RUNS \ + --export-json $BENCH_FILE \ + --command-name "sync-v2 (up to $N_BLOCKS blocks)" \ + --prepare "rm -rf $BENCH_DATA_DIR && mkdir $BENCH_DATA_DIR" \ + " + poetry run hathor-cli quick_test \ + --testnet \ + --data $BENCH_DATA_DIR \ + --cache \ + --cache-size $CACHE_SIZE \ + --x-localhost-only \ + --bootstrap tcp://localhost:$TCP_PORT \ + --quit-after-n-blocks $N_BLOCKS + " From 5e999598ea27550920a8b6a510249914dac96cbb Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Mon, 12 Aug 2024 18:21:55 -0300 Subject: [PATCH 03/61] feat(event-queue): add tx data to VERTEX_REMOVED (#1106) --- hathor/cli/events_simulator/scenario.py | 5 +---- hathor/consensus/consensus.py | 2 +- hathor/event/model/event_data.py | 15 +++++---------- hathor/event/model/event_type.py | 4 ++-- hathor/pubsub.py | 4 +--- hathor/transaction/resources/transaction.py | 9 +++++++-- tests/event/test_event_simulation_scenarios.py | 4 ++-- tests/event/websocket/test_protocol.py | 4 ++-- 8 files changed, 21 insertions(+), 26 deletions(-) diff --git a/hathor/cli/events_simulator/scenario.py b/hathor/cli/events_simulator/scenario.py index cf03f9cce..315a633d2 100644 --- a/hathor/cli/events_simulator/scenario.py +++ b/hathor/cli/events_simulator/scenario.py @@ -171,9 +171,6 @@ def simulate_invalid_mempool_transaction(simulator: 'Simulator', manager: 'Hatho assert manager.propagate_tx(b0, fails_silently=False) simulator.run(60) - # the transaction should have been removed from the mempool + # the transaction should have been removed from the mempool and the storage after the re-org assert tx not in manager.tx_storage.iter_mempool_from_best_index() - - # additionally the transaction should have been marked as invalid and removed from the storage after the re-org - assert tx.get_metadata().validation.is_invalid() assert not manager.tx_storage.transaction_exists(tx.hash) diff --git a/hathor/consensus/consensus.py b/hathor/consensus/consensus.py index fb27e9895..8bdbb4e4a 100644 --- a/hathor/consensus/consensus.py +++ b/hathor/consensus/consensus.py @@ -159,7 +159,7 @@ def _unsafe_update(self, base: BaseTransaction) -> None: # And emit events for txs that were removed for tx_removed in txs_to_remove: - context.pubsub.publish(HathorEvents.CONSENSUS_TX_REMOVED, vertex_id=tx_removed.hash) + context.pubsub.publish(HathorEvents.CONSENSUS_TX_REMOVED, tx=tx_removed) # and also emit the reorg finished event if needed if context.reorg_common_block is not None: diff --git a/hathor/event/model/event_data.py b/hathor/event/model/event_data.py index a0faee890..b9f2b6da6 100644 --- a/hathor/event/model/event_data.py +++ b/hathor/event/model/event_data.py @@ -97,7 +97,7 @@ def from_event_arguments(cls, args: EventArguments) -> 'EmptyData': return cls() -class TxData(BaseEventData, extra=Extra.ignore): +class TxDataWithoutMeta(BaseEventData, extra=Extra.ignore): """Class that represents transaction data on an event.""" hash: str nonce: Optional[int] = None @@ -112,11 +112,10 @@ class TxData(BaseEventData, extra=Extra.ignore): # TODO: Token name and symbol could be in a different class because they're only used by TokenCreationTransaction token_name: Optional[str] token_symbol: Optional[str] - metadata: 'TxMetadata' aux_pow: Optional[str] = None @classmethod - def from_event_arguments(cls, args: EventArguments) -> 'TxData': + def from_event_arguments(cls, args: EventArguments) -> Self: from hathor.transaction.resources.transaction import get_tx_extra_data tx_extra_data_json = get_tx_extra_data(args.tx, detail_tokens=False) tx_json = tx_extra_data_json['tx'] @@ -138,12 +137,8 @@ def from_event_arguments(cls, args: EventArguments) -> 'TxData': return cls(**tx_json) -class VertexIdData(BaseEventData): - vertex_id: str - - @classmethod - def from_event_arguments(cls, args: EventArguments) -> Self: - return cls(vertex_id=args.vertex_id.hex()) +class TxData(TxDataWithoutMeta): + metadata: 'TxMetadata' class ReorgData(BaseEventData): @@ -164,4 +159,4 @@ def from_event_arguments(cls, args: EventArguments) -> 'ReorgData': # Union type to encompass BaseEventData polymorphism -EventData: TypeAlias = EmptyData | TxData | ReorgData | VertexIdData +EventData: TypeAlias = EmptyData | TxData | TxDataWithoutMeta | ReorgData diff --git a/hathor/event/model/event_type.py b/hathor/event/model/event_type.py index c251e704c..38e968427 100644 --- a/hathor/event/model/event_type.py +++ b/hathor/event/model/event_type.py @@ -14,7 +14,7 @@ from enum import Enum -from hathor.event.model.event_data import BaseEventData, EmptyData, ReorgData, TxData, VertexIdData +from hathor.event.model.event_data import BaseEventData, EmptyData, ReorgData, TxData, TxDataWithoutMeta from hathor.pubsub import HathorEvents @@ -56,6 +56,6 @@ def data_type(self) -> type[BaseEventData]: EventType.REORG_STARTED: ReorgData, EventType.REORG_FINISHED: EmptyData, EventType.VERTEX_METADATA_CHANGED: TxData, - EventType.VERTEX_REMOVED: VertexIdData, + EventType.VERTEX_REMOVED: TxDataWithoutMeta, EventType.FULL_NODE_CRASHED: EmptyData, } diff --git a/hathor/pubsub.py b/hathor/pubsub.py index 6a6b28f74..8a4e25d4a 100644 --- a/hathor/pubsub.py +++ b/hathor/pubsub.py @@ -21,7 +21,6 @@ from twisted.python.threadable import isInIOThread from hathor.reactor import ReactorProtocol as Reactor -from hathor.types import VertexId from hathor.utils.zope import verified_cast if TYPE_CHECKING: @@ -62,7 +61,7 @@ class HathorEvents(Enum): CONSENSUS_TX_REMOVED: Triggered when a tx is removed because it became invalid (due to a reward lock check) - Publishes the tx hash + Publishes the tx object WALLET_OUTPUT_RECEIVED: Triggered when a wallet receives a new output @@ -146,7 +145,6 @@ class EventArguments: # XXX: add these as needed, these attributes don't always exist, but when they do these are their types tx: 'BaseTransaction' - vertex_id: VertexId reorg_size: int old_best_block: 'Block' new_best_block: 'Block' diff --git a/hathor/transaction/resources/transaction.py b/hathor/transaction/resources/transaction.py index 39a677784..2a530b74d 100644 --- a/hathor/transaction/resources/transaction.py +++ b/hathor/transaction/resources/transaction.py @@ -47,7 +47,12 @@ def update_serialized_tokens_array(tx: BaseTransaction, serialized: dict[str, An serialized['tokens'] = [h.hex() for h in tx.tokens] -def get_tx_extra_data(tx: BaseTransaction, *, detail_tokens: bool = True) -> dict[str, Any]: +def get_tx_extra_data( + tx: BaseTransaction, + *, + detail_tokens: bool = True, + force_reload_metadata: bool = True, +) -> dict[str, Any]: """ Get the data of a tx to be returned to the frontend Returns success, tx serializes, metadata and spent outputs """ @@ -61,7 +66,7 @@ def get_tx_extra_data(tx: BaseTransaction, *, detail_tokens: bool = True) -> dic # Update tokens array update_serialized_tokens_array(tx, serialized) - meta = tx.get_metadata(force_reload=True) + meta = tx.get_metadata(force_reload=force_reload_metadata) # To get the updated accumulated weight just need to call the # TransactionAccumulatedWeightResource (/transaction_acc_weight) diff --git a/tests/event/test_event_simulation_scenarios.py b/tests/event/test_event_simulation_scenarios.py index a65ff3aed..d1a5cd171 100644 --- a/tests/event/test_event_simulation_scenarios.py +++ b/tests/event/test_event_simulation_scenarios.py @@ -20,10 +20,10 @@ ReorgData, SpentOutput, TxData, + TxDataWithoutMeta, TxInput, TxMetadata, TxOutput, - VertexIdData, ) from hathor.event.model.event_type import EventType from hathor.event.websocket.request import StartStreamRequest @@ -340,7 +340,7 @@ def test_invalid_mempool(self) -> None: EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=37, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '2e3122412eb129c7f0d03e37d8a5637da9354df980a2259332b2b14e7a340d94'], twins=[], accumulated_weight=2.0, score=2.0, first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=0), latest_event_id=41, stream_id=stream_id), # noqa: E501 EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=38, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '2e3122412eb129c7f0d03e37d8a5637da9354df980a2259332b2b14e7a340d94'], twins=[], accumulated_weight=2.0, score=2.0, first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=0), latest_event_id=41, stream_id=stream_id), # noqa: E501 # One VERTEX_REMOVED for the tx above - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=39, timestamp=0, type=EventType.VERTEX_REMOVED, data=VertexIdData(vertex_id='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650'), group_id=0), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=39, timestamp=0, type=EventType.VERTEX_REMOVED, data=TxDataWithoutMeta(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None), group_id=0), latest_event_id=41, stream_id=stream_id), # noqa: E501 # REORG_FINISHED EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=40, timestamp=0, type=EventType.REORG_FINISHED, data=EmptyData(), group_id=0), latest_event_id=41, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for the block that caused the reorg diff --git a/tests/event/websocket/test_protocol.py b/tests/event/websocket/test_protocol.py index 426d74778..01a614e50 100644 --- a/tests/event/websocket/test_protocol.py +++ b/tests/event/websocket/test_protocol.py @@ -101,10 +101,10 @@ def test_send_event_response() -> None: b'"timestamp":123.0,"type":"VERTEX_METADATA_CHANGED","data":{"hash":"abc","nonce":123,' b'"timestamp":456,"signal_bits":0,"version":1,"weight":10.0,"inputs":[],"outputs":[],' b'"parents":[],' - b'"tokens":[],"token_name":null,"token_symbol":null,"metadata":{"hash":"abc",' + b'"tokens":[],"token_name":null,"token_symbol":null,"aux_pow":null,"metadata":{"hash":"abc",' b'"spent_outputs":[],"conflict_with":[],"voided_by":[],"received_by":[],"children":[],' b'"twins":[],"accumulated_weight":10.0,"score":20.0,"first_block":null,"height":100,' - b'"validation":"validation"},"aux_pow":null},"group_id":null},"latest_event_id":10,' + b'"validation":"validation"}},"group_id":null},"latest_event_id":10,' b'"stream_id":"stream_id"}') protocol.sendMessage.assert_called_once_with(expected_payload) From 47473dd1efe839f7fde085ab98a9edd2a186118f Mon Sep 17 00:00:00 2001 From: Jan Segre Date: Mon, 4 Mar 2024 16:18:18 -0300 Subject: [PATCH 04/61] feat(mergedmining): support dummy mining on coordinator --- hathor/cli/merged_mining.py | 25 +++++++-- hathor/client.py | 3 +- hathor/merged_mining/coordinator.py | 79 ++++++++++++++++++++++------- 3 files changed, 81 insertions(+), 26 deletions(-) diff --git a/hathor/cli/merged_mining.py b/hathor/cli/merged_mining.py index 14061ecb7..3fa11db06 100644 --- a/hathor/cli/merged_mining.py +++ b/hathor/cli/merged_mining.py @@ -30,8 +30,13 @@ def create_parser() -> ArgumentParser: parser.add_argument('--debug-listen', help='Port to listen for Debug API', type=int, required=False) parser.add_argument('--hathor-api', help='Endpoint of the Hathor API (without version)', type=str, required=True) parser.add_argument('--hathor-address', help='Hathor address to send funds to', type=str, required=False) - parser.add_argument('--bitcoin-rpc', help='Endpoint of the Bitcoin RPC', type=str, required=True) + rpc = parser.add_mutually_exclusive_group(required=True) + rpc.add_argument('--bitcoin-rpc', help='Endpoint of the Bitcoin RPC', type=str) parser.add_argument('--bitcoin-address', help='Bitcoin address to send funds to', type=str, required=False) + rpc.add_argument('--x-dummy-merged-mining', help='Use zeroed bits to simulate a dummy merged mining', + action='store_true') + parser.add_argument('--x-dummy-merkle-len', help='Merkle path length to simulate when doing dummy merged mining', + type=int, required=False) parser.add_argument('--min-diff', help='Minimum difficulty to set for jobs', type=int, required=False) return parser @@ -45,7 +50,14 @@ def execute(args: Namespace) -> None: loop = asyncio.get_event_loop() - bitcoin_rpc = BitcoinRPC(args.bitcoin_rpc) + bitcoin_rpc: BitcoinRPC | None + if args.bitcoin_rpc is not None: + # XXX: plain assert because argparse should already ensure it's correct + assert not args.x_dummy_merged_mining + bitcoin_rpc = BitcoinRPC(args.bitcoin_rpc) + else: + assert args.x_dummy_merged_mining + bitcoin_rpc = None hathor_client = HathorClient(args.hathor_api) # TODO: validate addresses? merged_mining = MergedMiningCoordinator( @@ -55,9 +67,11 @@ def execute(args: Namespace) -> None: payback_address_bitcoin=args.bitcoin_address, address_from_login=not (args.hathor_address and args.bitcoin_address), min_difficulty=args.min_diff, + dummy_merkle_path_len=args.x_dummy_merkle_len, ) - logger.info('start Bitcoin RPC', url=args.bitcoin_rpc) - loop.run_until_complete(bitcoin_rpc.start()) + if bitcoin_rpc is not None: + logger.info('start Bitcoin RPC', url=args.bitcoin_rpc) + loop.run_until_complete(bitcoin_rpc.start()) logger.info('start Hathor Client', url=args.hathor_api) loop.run_until_complete(hathor_client.start()) logger.info('start Merged Mining Server', listen=f'0.0.0.0:{args.port}') @@ -89,7 +103,8 @@ def execute(args: Namespace) -> None: loop.run_until_complete(mm_server.wait_closed()) loop.run_until_complete(merged_mining.stop()) loop.run_until_complete(hathor_client.stop()) - loop.run_until_complete(bitcoin_rpc.stop()) + if bitcoin_rpc is not None: + loop.run_until_complete(bitcoin_rpc.stop()) loop.close() logger.info('bye') diff --git a/hathor/client.py b/hathor/client.py index 1c68f6787..f6b1ece41 100644 --- a/hathor/client.py +++ b/hathor/client.py @@ -251,8 +251,7 @@ async def submit(self, block: Block) -> Optional[BlockTemplate]: resp: Union[bool, dict] = await self._do_request('mining.submit', { 'hexdata': bytes(block).hex(), }) - if resp: - assert isinstance(resp, dict) + if isinstance(resp, dict): error = resp.get('error') if error: raise APIError(error) diff --git a/hathor/merged_mining/coordinator.py b/hathor/merged_mining/coordinator.py index f65a181da..88905692a 100644 --- a/hathor/merged_mining/coordinator.py +++ b/hathor/merged_mining/coordinator.py @@ -71,6 +71,9 @@ # PROPAGATION_FAILED = {'code': 33, 'message': 'Solution propagation failed'} DUPLICATE_SOLUTION = {'code': 34, 'message': 'Solution already submitted'} +ZEROED_4: bytes = b'\0' * 4 +ZEROED_32: bytes = b'\0' * 32 + class HathorCoordJob(NamedTuple): """ Data class used to send a job's work to Hathor Stratum. @@ -688,6 +691,8 @@ async def submit_to_bitcoin(self, job: SingleMinerJob, work: SingleMinerWork) -> """ Submit work to Bitcoin RPC. """ bitcoin_rpc = self.coordinator.bitcoin_rpc + if bitcoin_rpc is None: + return # bitcoin_block = job.build_bitcoin_block(work) # XXX: too expensive for now bitcoin_block_header = job.build_bitcoin_block_header(work) block_hash = Hash(bitcoin_block_header.hash) @@ -797,6 +802,29 @@ class BitcoinCoordJob(NamedTuple): witness_commitment: Optional[bytes] = None append_to_input: bool = True + @classmethod + def create_dummy(cls, merkle_path_len: int = 0) -> 'BitcoinCoordJob': + """ Creates a dummy instance with zeroed values and optionally a merkle path with the given length. + """ + if merkle_path_len > 0: + transactions = [BitcoinRawTransaction(ZEROED_32, ZEROED_32, b'')] * (2 ** merkle_path_len - 1) + merkle_path = tuple(build_merkle_path_for_coinbase([t.txid for t in transactions])) + else: + transactions = [] + merkle_path = tuple() + return cls( + version=0, + previous_block_hash=ZEROED_32, + coinbase_value=0, + target=ZEROED_32, + min_time=0, + size_limit=0, + bits=ZEROED_4, + height=0, + transactions=transactions, + merkle_path=merkle_path, + ) + @classmethod def from_dict(cls, params: dict) -> 'BitcoinCoordJob': r""" Convert from dict of the properties returned from Bitcoin RPC. @@ -968,7 +996,7 @@ def make_coinbase_transaction(self, hathor_block_hash: bytes, payback_script_bit if self.witness_commitment is not None: segwit_output = BitcoinTransactionOutput(0, self.witness_commitment) outputs.append(segwit_output) - coinbase_input.script_witness.append(b'\0' * 32) + coinbase_input.script_witness.append(ZEROED_32) # append now because segwit presence may change this inputs.append(coinbase_input) @@ -1110,10 +1138,17 @@ class MergedMiningCoordinator: MAX_XNONCE1 = 2**XNONCE1_SIZE - 1 MAX_RECONNECT_BACKOFF = 30 - def __init__(self, bitcoin_rpc: IBitcoinRPC, hathor_client: IHathorClient, - payback_address_bitcoin: Optional[str], payback_address_hathor: Optional[str], - address_from_login: bool = True, min_difficulty: Optional[int] = None, - sequential_xnonce1: bool = False, rng: Optional[Random] = None): + def __init__(self, + bitcoin_rpc: IBitcoinRPC | None, + hathor_client: IHathorClient, + payback_address_bitcoin: str | None, + payback_address_hathor: str | None, + address_from_login: bool = True, + min_difficulty: int | None = None, + sequential_xnonce1: bool = False, + rng: Random | None = None, + dummy_merkle_path_len: int | None = None, + ): self.log = logger.new() if rng is None: rng = Random() @@ -1144,6 +1179,7 @@ def __init__(self, bitcoin_rpc: IBitcoinRPC, hathor_client: IHathorClient, self.started_at = 0.0 self.strip_all_transactions = False self.strip_segwit_transactions = False + self.dummy_merkle_path_len = dummy_merkle_path_len or 0 @property def uptime(self) -> float: @@ -1185,21 +1221,24 @@ async def start(self) -> None: """ loop = asyncio.get_event_loop() self.started_at = time.time() - self.update_bitcoin_block_task = loop.create_task(self.update_bitcoin_block()) + if self.bitcoin_rpc is not None: + self.update_bitcoin_block_task = loop.create_task(self.update_bitcoin_block()) + else: + self.bitcoin_coord_job = BitcoinCoordJob.create_dummy(self.dummy_merkle_path_len) self.update_hathor_block_task = loop.create_task(self.update_hathor_block()) async def stop(self) -> None: """ Stops the client, interrupting mining processes, stoping supervisor loop, and sending finished jobs. """ - assert self.update_bitcoin_block_task is not None - self.update_bitcoin_block_task.cancel() + finals = [] + if self.update_bitcoin_block_task is not None: + self.update_bitcoin_block_task.cancel() + finals.append(self.update_bitcoin_block_task) assert self.update_hathor_block_task is not None self.update_hathor_block_task.cancel() + finals.append(self.update_hathor_block_task) try: - await asyncio.gather( - self.update_bitcoin_block_task, - self.update_hathor_block_task, - ) + await asyncio.gather(*finals) except asyncio.CancelledError: pass except Exception: @@ -1210,6 +1249,7 @@ async def stop(self) -> None: async def update_bitcoin_block(self) -> None: """ Task that continuously polls block templates from bitcoin.get_block_template """ + assert self.bitcoin_rpc is not None backoff = 1 longpoll_id = None while True: @@ -1348,13 +1388,14 @@ async def update_merged_block(self) -> None: merkle_root = build_merkle_root(list(tx.txid for tx in block_proposal.transactions)) if merkle_root != block_proposal.header.merkle_root: self.log.warn('bad merkle root', expected=merkle_root.hex(), got=block_proposal.header.merkle_root.hex()) - error = await self.bitcoin_rpc.verify_block_proposal(block=bytes(block_proposal)) - if error is not None: - self.log.warn('proposed block is invalid, skipping update', error=error) - else: - self.next_merged_job = merged_job - self.update_jobs() - self.log.debug('merged job updated') + if self.bitcoin_rpc is not None: + error = await self.bitcoin_rpc.verify_block_proposal(block=bytes(block_proposal)) + if error is not None: + self.log.warn('proposed block is invalid, skipping update', error=error) + return + self.next_merged_job = merged_job + self.update_jobs() + self.log.debug('merged job updated') def status(self) -> dict[Any, Any]: """ Build status dict with useful metrics for use in MM Status API. From 445be1c841cf9ae2c1c8e4c33b7e54ecb88b6fdc Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Wed, 14 Aug 2024 15:34:23 -0300 Subject: [PATCH 05/61] feat(metadata): create basic static metadata structures (#1013) --- hathor/builder/builder.py | 7 +- hathor/builder/cli_builder.py | 6 +- hathor/cli/quick_test.py | 5 +- hathor/feature_activation/feature_service.py | 2 +- hathor/transaction/base_transaction.py | 44 +++- hathor/transaction/block.py | 10 +- hathor/transaction/static_metadata.py | 64 ++++++ hathor/transaction/storage/cache_storage.py | 10 + hathor/transaction/storage/memory_storage.py | 15 +- hathor/transaction/storage/rocksdb_storage.py | 14 ++ .../storage/transaction_storage.py | 17 +- hathor/transaction/transaction.py | 13 +- hathor/verification/verification_service.py | 12 +- .../test_feature_service.py | 215 ++++++++---------- tests/others/test_metrics.py | 2 + tests/poa/test_poa_simulation.py | 2 +- tests/tx/test_block.py | 44 ++-- 17 files changed, 318 insertions(+), 164 deletions(-) create mode 100644 hathor/transaction/static_metadata.py diff --git a/hathor/builder/builder.py b/hathor/builder/builder.py index de8410bb2..455384852 100644 --- a/hathor/builder/builder.py +++ b/hathor/builder/builder.py @@ -563,7 +563,12 @@ def _get_or_create_verification_service(self) -> VerificationService: if self._verification_service is None: settings = self._get_or_create_settings() verifiers = self._get_or_create_vertex_verifiers() - self._verification_service = VerificationService(settings=settings, verifiers=verifiers) + storage = self._get_or_create_tx_storage() + self._verification_service = VerificationService( + settings=settings, + verifiers=verifiers, + tx_storage=storage, + ) return self._verification_service diff --git a/hathor/builder/cli_builder.py b/hathor/builder/cli_builder.py index 33acca41b..3f3304f2a 100644 --- a/hathor/builder/cli_builder.py +++ b/hathor/builder/cli_builder.py @@ -295,7 +295,11 @@ def create_manager(self, reactor: Reactor) -> HathorManager: daa=daa, feature_service=self.feature_service ) - verification_service = VerificationService(settings=settings, verifiers=vertex_verifiers) + verification_service = VerificationService( + settings=settings, + verifiers=vertex_verifiers, + tx_storage=tx_storage, + ) cpu_mining_service = CpuMiningService() diff --git a/hathor/cli/quick_test.py b/hathor/cli/quick_test.py index 30f8852ac..1ba6fd0ff 100644 --- a/hathor/cli/quick_test.py +++ b/hathor/cli/quick_test.py @@ -30,7 +30,8 @@ def create_parser(cls) -> ArgumentParser: return parser def prepare(self, *, register_resources: bool = True) -> None: - from hathor.transaction import BaseTransaction, Block + from hathor.transaction import Block + from hathor.transaction.base_transaction import GenericVertex super().prepare(register_resources=False) self._no_wait = self._args.no_wait @@ -47,7 +48,7 @@ def patched_on_new_tx(*args: Any, **kwargs: Any) -> bool: else: vertex = args[0] should_quit = False - assert isinstance(vertex, BaseTransaction) + assert isinstance(vertex, GenericVertex) if isinstance(vertex, Block): should_quit = vertex.get_height() >= self._args.quit_after_n_blocks diff --git a/hathor/feature_activation/feature_service.py b/hathor/feature_activation/feature_service.py index caadb62fb..9fa7ceb0b 100644 --- a/hathor/feature_activation/feature_service.py +++ b/hathor/feature_activation/feature_service.py @@ -222,7 +222,7 @@ def _get_ancestor_at_height(self, *, block: 'Block', ancestor_height: int) -> 'B if parent_block.get_height() == ancestor_height: return parent_block - if not parent_metadata.voided_by and (ancestor := self._tx_storage.get_transaction_by_height(ancestor_height)): + if not parent_metadata.voided_by and (ancestor := self._tx_storage.get_block_by_height(ancestor_height)): from hathor.transaction import Block assert isinstance(ancestor, Block) return ancestor diff --git a/hathor/transaction/base_transaction.py b/hathor/transaction/base_transaction.py index 607ddb539..d2bb14e50 100644 --- a/hathor/transaction/base_transaction.py +++ b/hathor/transaction/base_transaction.py @@ -24,13 +24,14 @@ from itertools import chain from math import inf, isfinite, log from struct import error as StructError, pack -from typing import TYPE_CHECKING, Any, ClassVar, Iterator, Optional +from typing import TYPE_CHECKING, Any, ClassVar, Generic, Iterator, Optional, TypeAlias, TypeVar, cast from structlog import get_logger from hathor.checkpoint import Checkpoint from hathor.conf.get_settings import get_global_settings from hathor.transaction.exceptions import InvalidOutputValue, WeightError +from hathor.transaction.static_metadata import VertexStaticMetadata from hathor.transaction.transaction_metadata import TransactionMetadata from hathor.transaction.util import VerboseCallback, int_to_bytes, unpack, unpack_len from hathor.transaction.validation_state import ValidationState @@ -123,9 +124,11 @@ def get_cls(self) -> type['BaseTransaction']: _base_transaction_log = logger.new() +StaticMetadataT = TypeVar('StaticMetadataT', bound=VertexStaticMetadata, covariant=True) -class BaseTransaction(ABC): - """Hathor base transaction""" + +class GenericVertex(ABC, Generic[StaticMetadataT]): + """Hathor generic vertex""" # Even though nonce is serialized with different sizes for tx and blocks # the same size is used for hashes to enable mining algorithm compatibility @@ -134,6 +137,7 @@ class BaseTransaction(ABC): HEX_BASE = 16 _metadata: Optional[TransactionMetadata] + _static_metadata: StaticMetadataT | None # Bits extracted from the first byte of the version field. They carry extra information that may be interpreted # differently by each subclass of BaseTransaction. @@ -178,6 +182,7 @@ def __init__( self.parents = parents or [] self.storage = storage self._hash: VertexId | None = hash # Stored as bytes. + self._static_metadata = None @classproperty def log(cls): @@ -260,7 +265,7 @@ def __eq__(self, other: object) -> bool: :raises NotImplement: when one of the transactions do not have a calculated hash """ - if not isinstance(other, BaseTransaction): + if not isinstance(other, GenericVertex): return NotImplemented if self._hash and other._hash: return self.hash == other.hash @@ -762,7 +767,7 @@ def _update_feature_activation_bit_counts(self) -> None: from hathor.transaction import Block assert isinstance(self, Block) # This method lazily calculates and stores the value in metadata - self.get_feature_activation_bit_counts() + cast(Block, self).get_feature_activation_bit_counts() def _update_initial_accumulated_weight(self) -> None: """Update the vertex initial accumulated_weight.""" @@ -875,6 +880,8 @@ def clone(self, *, include_metadata: bool = True, include_storage: bool = True) :return: Transaction or Block copy """ new_tx = self.create_from_struct(self.get_struct()) + # static_metadata can be safely copied as it is a frozen dataclass + new_tx.set_static_metadata(self._static_metadata) if hasattr(self, '_metadata') and include_metadata: assert self._metadata is not None # FIXME: is this actually true or do we have to check if not None new_tx._metadata = self._metadata.clone() @@ -897,6 +904,33 @@ def is_ready_for_validation(self) -> bool: return False return True + @property + def static_metadata(self) -> StaticMetadataT: + """Get this vertex's static metadata. Assumes it has been initialized.""" + assert self._static_metadata is not None + return self._static_metadata + + @abstractmethod + def init_static_metadata_from_storage(self, storage: 'TransactionStorage') -> None: + """Initialize this vertex's static metadata using dependencies from a storage. This can be called multiple + times, provided the dependencies don't change.""" + raise NotImplementedError + + def set_static_metadata(self, static_metadata: StaticMetadataT | None) -> None: + """Set this vertex's static metadata. After it's set, it can only be set again to the same value.""" + assert not self._static_metadata or self._static_metadata == static_metadata, ( + 'trying to set static metadata with different values' + ) + self._static_metadata = static_metadata + + +""" +Type aliases for easily working with `GenericVertex`. A `Vertex` is a superclass that includes all specific +vertex subclasses, and a `BaseTransaction` is simply an alias to `Vertex` for backwards compatibility. +""" +Vertex: TypeAlias = GenericVertex[VertexStaticMetadata] +BaseTransaction: TypeAlias = Vertex + class TxInput: _tx: BaseTransaction # XXX: used for caching on hathor.transaction.Transaction.get_spent_tx diff --git a/hathor/transaction/block.py b/hathor/transaction/block.py index 22e6d61ac..617458a8a 100644 --- a/hathor/transaction/block.py +++ b/hathor/transaction/block.py @@ -20,13 +20,15 @@ from struct import pack from typing import TYPE_CHECKING, Any, Iterator, Optional -from typing_extensions import Self +from typing_extensions import Self, override from hathor.checkpoint import Checkpoint from hathor.feature_activation.feature import Feature from hathor.feature_activation.model.feature_state import FeatureState from hathor.transaction import BaseTransaction, TxOutput, TxVersion +from hathor.transaction.base_transaction import GenericVertex from hathor.transaction.exceptions import CheckpointError +from hathor.transaction.static_metadata import BlockStaticMetadata from hathor.transaction.util import VerboseCallback, int_to_bytes, unpack, unpack_len from hathor.util import not_none from hathor.utils.int import get_bit_list @@ -42,7 +44,7 @@ _SIGHASH_ALL_FORMAT_STRING = '!BBBB' -class Block(BaseTransaction): +class Block(GenericVertex[BlockStaticMetadata]): SERIALIZATION_NONCE_SIZE = 16 def __init__( @@ -427,3 +429,7 @@ def iter_transactions_in_this_block(self) -> Iterator[BaseTransaction]: bfs.skip_neighbors(tx) continue yield tx + + @override + def init_static_metadata_from_storage(self, storage: 'TransactionStorage') -> None: + raise NotImplementedError('this will be implemented') diff --git a/hathor/transaction/static_metadata.py b/hathor/transaction/static_metadata.py new file mode 100644 index 000000000..43114c0fb --- /dev/null +++ b/hathor/transaction/static_metadata.py @@ -0,0 +1,64 @@ +# Copyright 2024 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import dataclasses +from abc import ABC +from dataclasses import dataclass +from typing import TYPE_CHECKING + +from hathor.util import json_dumpb, json_loadb + +if TYPE_CHECKING: + from hathor.transaction import BaseTransaction + + +@dataclass(slots=True, frozen=True, kw_only=True) +class VertexStaticMetadata(ABC): + """ + Static Metadata represents vertex attributes that are not intrinsic to the vertex data, but can be calculated from + only the vertex itself and its dependencies, and whose values never change. + + This class is an abstract base class for all static metadata types that includes attributes common to all vertex + types. + """ + min_height: int + + def to_bytes(self) -> bytes: + """Convert this static metadata instance to a json bytes representation.""" + return json_dumpb(dataclasses.asdict(self)) + + @classmethod + def from_bytes(cls, data: bytes, *, target: 'BaseTransaction') -> 'VertexStaticMetadata': + """Create a static metadata instance from a json bytes representation, with a known vertex type target.""" + from hathor.transaction import Block, Transaction + json_dict = json_loadb(data) + + if isinstance(target, Block): + return BlockStaticMetadata(**json_dict) + + if isinstance(target, Transaction): + return TransactionStaticMetadata(**json_dict) + + raise NotImplementedError + + +@dataclass(slots=True, frozen=True, kw_only=True) +class BlockStaticMetadata(VertexStaticMetadata): + height: int + feature_activation_bit_counts: list[int] + + +@dataclass(slots=True, frozen=True, kw_only=True) +class TransactionStaticMetadata(VertexStaticMetadata): + pass diff --git a/hathor/transaction/storage/cache_storage.py b/hathor/transaction/storage/cache_storage.py index 63b9af6b3..8f9536358 100644 --- a/hathor/transaction/storage/cache_storage.py +++ b/hathor/transaction/storage/cache_storage.py @@ -16,11 +16,13 @@ from typing import Any, Iterator, Optional from twisted.internet import threads +from typing_extensions import override from hathor.conf.settings import HathorSettings from hathor.indexes import IndexesManager from hathor.reactor import ReactorProtocol as Reactor from hathor.transaction import BaseTransaction +from hathor.transaction.static_metadata import VertexStaticMetadata from hathor.transaction.storage.migrations import MigrationState from hathor.transaction.storage.transaction_storage import BaseTransactionStorage from hathor.transaction.storage.tx_allow_scope import TxAllowScope @@ -164,6 +166,14 @@ def save_transaction(self, tx: 'BaseTransaction', *, only_metadata: bool = False # call super which adds to index if needed super().save_transaction(tx, only_metadata=only_metadata) + @override + def _save_static_metadata(self, tx: BaseTransaction) -> None: + self.store._save_static_metadata(tx) + + @override + def _get_static_metadata(self, vertex: BaseTransaction) -> VertexStaticMetadata | None: + return self.store._get_static_metadata(vertex) + def get_all_genesis(self) -> set[BaseTransaction]: return self.store.get_all_genesis() diff --git a/hathor/transaction/storage/memory_storage.py b/hathor/transaction/storage/memory_storage.py index efb47b1ba..861e19e65 100644 --- a/hathor/transaction/storage/memory_storage.py +++ b/hathor/transaction/storage/memory_storage.py @@ -14,12 +14,15 @@ from typing import Any, Iterator, Optional, TypeVar +from typing_extensions import override + from hathor.conf.settings import HathorSettings from hathor.indexes import IndexesManager +from hathor.transaction import BaseTransaction +from hathor.transaction.static_metadata import VertexStaticMetadata from hathor.transaction.storage.exceptions import TransactionDoesNotExist from hathor.transaction.storage.migrations import MigrationState from hathor.transaction.storage.transaction_storage import BaseTransactionStorage -from hathor.transaction.transaction import BaseTransaction from hathor.transaction.transaction_metadata import TransactionMetadata _Clonable = TypeVar('_Clonable', BaseTransaction, TransactionMetadata) @@ -40,6 +43,7 @@ def __init__( """ self.transactions: dict[bytes, BaseTransaction] = {} self.metadata: dict[bytes, TransactionMetadata] = {} + self._static_metadata: dict[bytes, VertexStaticMetadata] = {} # Store custom key/value attributes self.attributes: dict[str, Any] = {} self._clone_if_needed = _clone_if_needed @@ -71,6 +75,7 @@ def remove_transaction(self, tx: BaseTransaction) -> None: super().remove_transaction(tx) self.transactions.pop(tx.hash, None) self.metadata.pop(tx.hash, None) + self._static_metadata.pop(tx.hash, None) def save_transaction(self, tx: 'BaseTransaction', *, only_metadata: bool = False) -> None: super().save_transaction(tx, only_metadata=only_metadata) @@ -83,6 +88,14 @@ def _save_transaction(self, tx: BaseTransaction, *, only_metadata: bool = False) if meta: self.metadata[tx.hash] = self._clone(meta) + @override + def _save_static_metadata(self, tx: BaseTransaction) -> None: + self._static_metadata[tx.hash] = tx.static_metadata + + @override + def _get_static_metadata(self, vertex: BaseTransaction) -> VertexStaticMetadata | None: + return self._static_metadata.get(vertex.hash) + def transaction_exists(self, hash_bytes: bytes) -> bool: return hash_bytes in self.transactions diff --git a/hathor/transaction/storage/rocksdb_storage.py b/hathor/transaction/storage/rocksdb_storage.py index 50c7be615..faa97b590 100644 --- a/hathor/transaction/storage/rocksdb_storage.py +++ b/hathor/transaction/storage/rocksdb_storage.py @@ -15,10 +15,12 @@ from typing import TYPE_CHECKING, Iterator, Optional from structlog import get_logger +from typing_extensions import override from hathor.conf.settings import HathorSettings from hathor.indexes import IndexesManager from hathor.storage import RocksDBStorage +from hathor.transaction.static_metadata import VertexStaticMetadata from hathor.transaction.storage.exceptions import TransactionDoesNotExist from hathor.transaction.storage.migrations import MigrationState from hathor.transaction.storage.transaction_storage import BaseTransactionStorage @@ -35,6 +37,7 @@ _DB_NAME = 'data_v2.db' _CF_NAME_TX = b'tx' _CF_NAME_META = b'meta' +_CF_NAME_STATIC_META = b'static-meta' _CF_NAME_ATTR = b'attr' _CF_NAME_MIGRATIONS = b'migrations' @@ -55,6 +58,7 @@ def __init__( ) -> None: self._cf_tx = rocksdb_storage.get_or_create_column_family(_CF_NAME_TX) self._cf_meta = rocksdb_storage.get_or_create_column_family(_CF_NAME_META) + self._cf_static_meta = rocksdb_storage.get_or_create_column_family(_CF_NAME_STATIC_META) self._cf_attr = rocksdb_storage.get_or_create_column_family(_CF_NAME_ATTR) self._cf_migrations = rocksdb_storage.get_or_create_column_family(_CF_NAME_MIGRATIONS) @@ -93,6 +97,7 @@ def remove_transaction(self, tx: 'BaseTransaction') -> None: super().remove_transaction(tx) self._db.delete((self._cf_tx, tx.hash)) self._db.delete((self._cf_meta, tx.hash)) + self._db.delete((self._cf_static_meta, tx.hash)) self._remove_from_weakref(tx) def save_transaction(self, tx: 'BaseTransaction', *, only_metadata: bool = False) -> None: @@ -108,6 +113,15 @@ def _save_transaction(self, tx: 'BaseTransaction', *, only_metadata: bool = Fals meta_data = self._meta_to_bytes(tx.get_metadata(use_storage=False)) self._db.put((self._cf_meta, key), meta_data) + @override + def _save_static_metadata(self, tx: 'BaseTransaction') -> None: + self._db.put((self._cf_static_meta, tx.hash), tx.static_metadata.to_bytes()) + + @override + def _get_static_metadata(self, vertex: 'BaseTransaction') -> VertexStaticMetadata | None: + data = self._db.get((self._cf_static_meta, vertex.hash)) + return VertexStaticMetadata.from_bytes(data, target=vertex) if data else None + def transaction_exists(self, hash_bytes: bytes) -> bool: may_exist, _ = self._db.key_may_exist((self._cf_tx, hash_bytes)) if not may_exist: diff --git a/hathor/transaction/storage/transaction_storage.py b/hathor/transaction/storage/transaction_storage.py index 85a978b2c..32326c02c 100644 --- a/hathor/transaction/storage/transaction_storage.py +++ b/hathor/transaction/storage/transaction_storage.py @@ -32,6 +32,7 @@ from hathor.transaction.base_transaction import BaseTransaction, TxOutput from hathor.transaction.block import Block from hathor.transaction.exceptions import RewardLocked +from hathor.transaction.static_metadata import VertexStaticMetadata from hathor.transaction.storage.exceptions import ( TransactionDoesNotExist, TransactionIsNotABlock, @@ -425,6 +426,11 @@ def save_transaction(self: 'TransactionStorage', tx: BaseTransaction, *, only_me meta = tx.get_metadata() self.pre_save_validation(tx, meta) + @abstractmethod + def _save_static_metadata(self, vertex: BaseTransaction) -> None: + """Save a vertex's static metadata to this storage.""" + raise NotImplementedError + def pre_save_validation(self, tx: BaseTransaction, tx_meta: TransactionMetadata) -> None: """ Must be run before every save, will raise AssertionError or TransactionNotInAllowedScopeError @@ -545,12 +551,12 @@ def get_transaction(self, hash_bytes: bytes) -> BaseTransaction: self.post_get_validation(tx) return tx - def get_transaction_by_height(self, height: int) -> Optional[BaseTransaction]: - """Returns a transaction from the height index. This is fast.""" + def get_block_by_height(self, height: int) -> Optional[Block]: + """Return a block in the best blockchain from the height index. This is fast.""" assert self.indexes is not None ancestor_hash = self.indexes.height.get(height) - return None if ancestor_hash is None else self.get_transaction(ancestor_hash) + return None if ancestor_hash is None else self.get_block(ancestor_hash) def get_metadata(self, hash_bytes: bytes) -> Optional[TransactionMetadata]: """Returns the transaction metadata with hash `hash_bytes`. @@ -564,6 +570,11 @@ def get_metadata(self, hash_bytes: bytes) -> Optional[TransactionMetadata]: except TransactionDoesNotExist: return None + @abstractmethod + def _get_static_metadata(self, vertex: BaseTransaction) -> VertexStaticMetadata | None: + """Get a vertex's static metadata from this storage.""" + raise NotImplementedError + def get_all_transactions(self) -> Iterator[BaseTransaction]: """Return all vertices (transactions and blocks) within the allowed scope. """ diff --git a/hathor/transaction/transaction.py b/hathor/transaction/transaction.py index a787339d8..a826b6a6b 100644 --- a/hathor/transaction/transaction.py +++ b/hathor/transaction/transaction.py @@ -19,12 +19,15 @@ from struct import pack from typing import TYPE_CHECKING, Any, NamedTuple, Optional +from typing_extensions import override + from hathor.checkpoint import Checkpoint from hathor.exception import InvalidNewTransaction from hathor.reward_lock import iter_spent_rewards -from hathor.transaction import BaseTransaction, TxInput, TxOutput, TxVersion -from hathor.transaction.base_transaction import TX_HASH_SIZE +from hathor.transaction import TxInput, TxOutput, TxVersion +from hathor.transaction.base_transaction import TX_HASH_SIZE, GenericVertex from hathor.transaction.exceptions import InvalidToken +from hathor.transaction.static_metadata import TransactionStaticMetadata from hathor.transaction.util import VerboseCallback, unpack, unpack_len from hathor.types import TokenUid, VertexId from hathor.util import not_none @@ -51,7 +54,7 @@ class RewardLockedInfo(NamedTuple): blocks_needed: int -class Transaction(BaseTransaction): +class Transaction(GenericVertex[TransactionStaticMetadata]): SERIALIZATION_NONCE_SIZE = 4 @@ -389,3 +392,7 @@ def is_spending_voided_tx(self) -> bool: if meta.voided_by: return True return False + + @override + def init_static_metadata_from_storage(self, storage: 'TransactionStorage') -> None: + raise NotImplementedError('this will be implemented') diff --git a/hathor/verification/verification_service.py b/hathor/verification/verification_service.py index 3f75c0b9b..09b3563f9 100644 --- a/hathor/verification/verification_service.py +++ b/hathor/verification/verification_service.py @@ -18,6 +18,7 @@ from hathor.profiler import get_cpu_profiler from hathor.transaction import BaseTransaction, Block, MergeMinedBlock, Transaction, TxVersion from hathor.transaction.poa import PoaBlock +from hathor.transaction.storage import TransactionStorage from hathor.transaction.token_creation_tx import TokenCreationTransaction from hathor.transaction.transaction import TokenInfo from hathor.transaction.validation_state import ValidationState @@ -28,11 +29,18 @@ class VerificationService: - __slots__ = ('_settings', 'verifiers') + __slots__ = ('_settings', 'verifiers', '_tx_storage') - def __init__(self, *, settings: HathorSettings, verifiers: VertexVerifiers) -> None: + def __init__( + self, + *, + settings: HathorSettings, + verifiers: VertexVerifiers, + tx_storage: TransactionStorage | None = None, + ) -> None: self._settings = settings self.verifiers = verifiers + self._tx_storage = tx_storage def validate_basic(self, vertex: BaseTransaction, *, skip_block_weight_verification: bool = False) -> bool: """ Run basic validations (all that are possible without dependencies) and update the validation state. diff --git a/tests/feature_activation/test_feature_service.py b/tests/feature_activation/test_feature_service.py index 60c76d8bc..b81a6c812 100644 --- a/tests/feature_activation/test_feature_service.py +++ b/tests/feature_activation/test_feature_service.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import cast from unittest.mock import Mock, patch import pytest @@ -29,15 +28,18 @@ from hathor.feature_activation.model.feature_description import FeatureDescription from hathor.feature_activation.model.feature_state import FeatureState from hathor.feature_activation.settings import Settings as FeatureSettings -from hathor.transaction import Block, TransactionMetadata -from hathor.transaction.storage import TransactionStorage +from hathor.indexes import MemoryIndexesManager +from hathor.transaction import Block +from hathor.transaction.storage import TransactionMemoryStorage, TransactionStorage from hathor.transaction.validation_state import ValidationState +from hathor.util import not_none -def _get_blocks_and_storage() -> tuple[list[Block], TransactionStorage]: +@pytest.fixture +def storage() -> TransactionStorage: settings = get_global_settings() - genesis_hash = settings.GENESIS_BLOCK_HASH - blocks: list[Block] = [] + indexes = MemoryIndexesManager() + storage = TransactionMemoryStorage(indexes=indexes, settings=settings) feature_activation_bits = [ 0b0000, # 0: boundary block 0b0010, @@ -72,37 +74,18 @@ def _get_blocks_and_storage() -> tuple[list[Block], TransactionStorage]: 0b0000, # 24: boundary block 0b0000, ] - storage = Mock() for height, bits in enumerate(feature_activation_bits): - block_hash = genesis_hash if height == 0 else int.to_bytes(height, length=1, byteorder='big') - block = Block(hash=block_hash, storage=storage, signal_bits=bits) - blocks.append(block) - parent_hash = blocks[height - 1].hash - assert parent_hash is not None - block.parents = [parent_hash] - block._metadata = TransactionMetadata(height=height) - block._metadata.validation = ValidationState.FULL - - block_by_hash = {block.hash: block for block in blocks} - storage.get_transaction = Mock(side_effect=lambda hash_bytes: block_by_hash[hash_bytes]) - storage.get_transaction_by_height = Mock(side_effect=lambda h: blocks[h]) + if height == 0: + continue + parent = not_none(storage.get_block_by_height(height - 1)) + block = Block(signal_bits=bits, parents=[parent.hash], storage=storage) + block.update_hash() + block.get_metadata().validation = ValidationState.FULL + storage.save_transaction(block) + indexes.height.add_new(height, block.hash, block.timestamp) - return blocks, storage - - -@pytest.fixture -def block_mocks() -> list[Block]: - blocks, _ = _get_blocks_and_storage() - - return blocks - - -@pytest.fixture -def tx_storage() -> TransactionStorage: - _, tx_storage = _get_blocks_and_storage() - - return tx_storage + return storage @pytest.fixture @@ -114,26 +97,26 @@ def feature_settings() -> FeatureSettings: @pytest.fixture -def service(feature_settings: FeatureSettings, tx_storage: TransactionStorage) -> FeatureService: +def service(feature_settings: FeatureSettings, storage: TransactionStorage) -> FeatureService: service = FeatureService( feature_settings=feature_settings, - tx_storage=tx_storage + tx_storage=storage ) service.bit_signaling_service = Mock() return service -def test_get_state_genesis(block_mocks: list[Block], service: FeatureService) -> None: - block = block_mocks[0] +def test_get_state_genesis(storage: TransactionStorage, service: FeatureService) -> None: + block = not_none(storage.get_block_by_height(0)) result = service.get_state(block=block, feature=Mock()) assert result == FeatureState.DEFINED @pytest.mark.parametrize('block_height', [0, 1, 2, 3]) -def test_get_state_first_interval(block_mocks: list[Block], service: FeatureService, block_height: int) -> None: - block = block_mocks[block_height] +def test_get_state_first_interval(storage: TransactionStorage, service: FeatureService, block_height: int) -> None: + block = not_none(storage.get_block_by_height(block_height)) result = service.get_state(block=block, feature=Mock()) assert result == FeatureState.DEFINED @@ -149,8 +132,7 @@ def test_get_state_first_interval(block_mocks: list[Block], service: FeatureServ ] ) def test_get_state_from_defined( - block_mocks: list[Block], - tx_storage: TransactionStorage, + storage: TransactionStorage, block_height: int, start_height: int, expected_state: FeatureState @@ -168,10 +150,10 @@ def test_get_state_from_defined( ) service = FeatureService( feature_settings=feature_settings, - tx_storage=tx_storage + tx_storage=storage ) service.bit_signaling_service = Mock() - block = block_mocks[block_height] + block = not_none(storage.get_block_by_height(block_height)) result = service.get_state(block=block, feature=Feature.NOP_FEATURE_1) @@ -181,8 +163,7 @@ def test_get_state_from_defined( @pytest.mark.parametrize('block_height', [12, 13, 14, 15, 16, 17]) @pytest.mark.parametrize('timeout_height', [8, 12]) def test_get_state_from_started_to_failed( - block_mocks: list[Block], - tx_storage: TransactionStorage, + storage: TransactionStorage, block_height: int, timeout_height: int, ) -> None: @@ -200,10 +181,10 @@ def test_get_state_from_started_to_failed( ) service = FeatureService( feature_settings=feature_settings, - tx_storage=tx_storage + tx_storage=storage ) service.bit_signaling_service = Mock() - block = block_mocks[block_height] + block = not_none(storage.get_block_by_height(block_height)) result = service.get_state(block=block, feature=Feature.NOP_FEATURE_1) @@ -213,8 +194,7 @@ def test_get_state_from_started_to_failed( @pytest.mark.parametrize('block_height', [8, 9, 10, 11]) @pytest.mark.parametrize('timeout_height', [8, 12]) def test_get_state_from_started_to_must_signal_on_timeout( - block_mocks: list[Block], - tx_storage: TransactionStorage, + storage: TransactionStorage, block_height: int, timeout_height: int, ) -> None: @@ -232,10 +212,10 @@ def test_get_state_from_started_to_must_signal_on_timeout( ) service = FeatureService( feature_settings=feature_settings, - tx_storage=tx_storage + tx_storage=storage ) service.bit_signaling_service = Mock() - block = block_mocks[block_height] + block = not_none(storage.get_block_by_height(block_height)) result = service.get_state(block=block, feature=Feature.NOP_FEATURE_1) @@ -246,8 +226,7 @@ def test_get_state_from_started_to_must_signal_on_timeout( @pytest.mark.parametrize('block_height', [8, 9, 10, 11]) @pytest.mark.parametrize('default_threshold', [0, 1, 2, 3]) def test_get_state_from_started_to_locked_in_on_default_threshold( - block_mocks: list[Block], - tx_storage: TransactionStorage, + storage: TransactionStorage, block_height: int, default_threshold: int ) -> None: @@ -266,10 +245,10 @@ def test_get_state_from_started_to_locked_in_on_default_threshold( ) service = FeatureService( feature_settings=feature_settings, - tx_storage=tx_storage + tx_storage=storage ) service.bit_signaling_service = Mock() - block = block_mocks[block_height] + block = not_none(storage.get_block_by_height(block_height)) result = service.get_state(block=block, feature=Feature.NOP_FEATURE_1) @@ -279,8 +258,7 @@ def test_get_state_from_started_to_locked_in_on_default_threshold( @pytest.mark.parametrize('block_height', [8, 9, 10, 11]) @pytest.mark.parametrize('custom_threshold', [0, 1, 2, 3]) def test_get_state_from_started_to_locked_in_on_custom_threshold( - block_mocks: list[Block], - tx_storage: TransactionStorage, + storage: TransactionStorage, block_height: int, custom_threshold: int ) -> None: @@ -298,10 +276,10 @@ def test_get_state_from_started_to_locked_in_on_custom_threshold( ) service = FeatureService( feature_settings=feature_settings, - tx_storage=tx_storage + tx_storage=storage ) service.bit_signaling_service = Mock() - block = block_mocks[block_height] + block = not_none(storage.get_block_by_height(block_height)) result = service.get_state(block=block, feature=Feature.NOP_FEATURE_1) @@ -318,8 +296,7 @@ def test_get_state_from_started_to_locked_in_on_custom_threshold( ] ) def test_get_state_from_started_to_started( - block_mocks: list[Block], - tx_storage: TransactionStorage, + storage: TransactionStorage, block_height: int, lock_in_on_timeout: bool, timeout_height: int, @@ -338,10 +315,10 @@ def test_get_state_from_started_to_started( ) service = FeatureService( feature_settings=feature_settings, - tx_storage=tx_storage + tx_storage=storage ) service.bit_signaling_service = Mock() - block = block_mocks[block_height] + block = not_none(storage.get_block_by_height(block_height)) result = service.get_state(block=block, feature=Feature.NOP_FEATURE_1) @@ -350,8 +327,7 @@ def test_get_state_from_started_to_started( @pytest.mark.parametrize('block_height', [12, 13, 14, 15]) def test_get_state_from_must_signal_to_locked_in( - block_mocks: list[Block], - tx_storage: TransactionStorage, + storage: TransactionStorage, block_height: int, ) -> None: feature_settings = FeatureSettings.construct( @@ -368,10 +344,10 @@ def test_get_state_from_must_signal_to_locked_in( ) service = FeatureService( feature_settings=feature_settings, - tx_storage=tx_storage + tx_storage=storage ) service.bit_signaling_service = Mock() - block = block_mocks[block_height] + block = not_none(storage.get_block_by_height(block_height)) result = service.get_state(block=block, feature=Feature.NOP_FEATURE_1) @@ -381,8 +357,7 @@ def test_get_state_from_must_signal_to_locked_in( @pytest.mark.parametrize('block_height', [16, 17, 18, 19]) @pytest.mark.parametrize('minimum_activation_height', [0, 4, 8, 12, 16]) def test_get_state_from_locked_in_to_active( - block_mocks: list[Block], - tx_storage: TransactionStorage, + storage: TransactionStorage, block_height: int, minimum_activation_height: int, ) -> None: @@ -401,10 +376,10 @@ def test_get_state_from_locked_in_to_active( ) service = FeatureService( feature_settings=feature_settings, - tx_storage=tx_storage + tx_storage=storage ) service.bit_signaling_service = Mock() - block = block_mocks[block_height] + block = not_none(storage.get_block_by_height(block_height)) result = service.get_state(block=block, feature=Feature.NOP_FEATURE_1) @@ -414,8 +389,7 @@ def test_get_state_from_locked_in_to_active( @pytest.mark.parametrize('block_height', [16, 17, 18, 19]) @pytest.mark.parametrize('minimum_activation_height', [17, 20, 100]) def test_get_state_from_locked_in_to_locked_in( - block_mocks: list[Block], - tx_storage: TransactionStorage, + storage: TransactionStorage, block_height: int, minimum_activation_height: int, ) -> None: @@ -434,10 +408,10 @@ def test_get_state_from_locked_in_to_locked_in( ) service = FeatureService( feature_settings=feature_settings, - tx_storage=tx_storage + tx_storage=storage ) service.bit_signaling_service = Mock() - block = block_mocks[block_height] + block = not_none(storage.get_block_by_height(block_height)) result = service.get_state(block=block, feature=Feature.NOP_FEATURE_1) @@ -445,7 +419,7 @@ def test_get_state_from_locked_in_to_locked_in( @pytest.mark.parametrize('block_height', [20, 21, 22, 23]) -def test_get_state_from_active(block_mocks: list[Block], tx_storage: TransactionStorage, block_height: int) -> None: +def test_get_state_from_active(storage: TransactionStorage, block_height: int) -> None: feature_settings = FeatureSettings.construct( evaluation_interval=4, features={ @@ -460,10 +434,10 @@ def test_get_state_from_active(block_mocks: list[Block], tx_storage: Transaction ) service = FeatureService( feature_settings=feature_settings, - tx_storage=tx_storage + tx_storage=storage ) service.bit_signaling_service = Mock() - block = block_mocks[block_height] + block = not_none(storage.get_block_by_height(block_height)) result = service.get_state(block=block, feature=Feature.NOP_FEATURE_1) @@ -471,7 +445,7 @@ def test_get_state_from_active(block_mocks: list[Block], tx_storage: Transaction @pytest.mark.parametrize('block_height', [16, 17, 18, 19]) -def test_caching_mechanism(block_mocks: list[Block], tx_storage: TransactionStorage, block_height: int) -> None: +def test_caching_mechanism(storage: TransactionStorage, block_height: int) -> None: feature_settings = FeatureSettings.construct( evaluation_interval=4, features={ @@ -484,9 +458,9 @@ def test_caching_mechanism(block_mocks: list[Block], tx_storage: TransactionStor ) } ) - service = FeatureService(feature_settings=feature_settings, tx_storage=tx_storage) + service = FeatureService(feature_settings=feature_settings, tx_storage=storage) service.bit_signaling_service = Mock() - block = block_mocks[block_height] + block = not_none(storage.get_block_by_height(block_height)) calculate_new_state_mock = Mock(wraps=service._calculate_new_state) with patch.object(FeatureService, '_calculate_new_state', calculate_new_state_mock): @@ -503,7 +477,7 @@ def test_caching_mechanism(block_mocks: list[Block], tx_storage: TransactionStor @pytest.mark.parametrize('block_height', [16, 17, 18, 19]) -def test_is_feature_active(block_mocks: list[Block], tx_storage: TransactionStorage, block_height: int) -> None: +def test_is_feature_active(storage: TransactionStorage, block_height: int) -> None: feature_settings = FeatureSettings.construct( evaluation_interval=4, features={ @@ -518,10 +492,10 @@ def test_is_feature_active(block_mocks: list[Block], tx_storage: TransactionStor ) service = FeatureService( feature_settings=feature_settings, - tx_storage=tx_storage + tx_storage=storage ) service.bit_signaling_service = Mock() - block = block_mocks[block_height] + block = not_none(storage.get_block_by_height(block_height)) result = service.is_feature_active(block=block, feature=Feature.NOP_FEATURE_1) @@ -529,7 +503,7 @@ def test_is_feature_active(block_mocks: list[Block], tx_storage: TransactionStor @pytest.mark.parametrize('block_height', [12, 13, 14, 15]) -def test_get_state_from_failed(block_mocks: list[Block], tx_storage: TransactionStorage, block_height: int) -> None: +def test_get_state_from_failed(storage: TransactionStorage, block_height: int) -> None: feature_settings = FeatureSettings.construct( evaluation_interval=4, features={ @@ -543,25 +517,24 @@ def test_get_state_from_failed(block_mocks: list[Block], tx_storage: Transaction ) service = FeatureService( feature_settings=feature_settings, - tx_storage=tx_storage + tx_storage=storage ) service.bit_signaling_service = Mock() - block = block_mocks[block_height] + block = not_none(storage.get_block_by_height(block_height)) result = service.get_state(block=block, feature=Feature.NOP_FEATURE_1) assert result == FeatureState.FAILED -def test_get_state_undefined_feature(block_mocks: list[Block], service: FeatureService) -> None: - block = block_mocks[10] - +def test_get_state_undefined_feature(storage: TransactionStorage, service: FeatureService) -> None: + block = not_none(storage.get_block_by_height(10)) result = service.get_state(block=block, feature=Feature.NOP_FEATURE_1) assert result == FeatureState.DEFINED -def test_get_bits_description(tx_storage: TransactionStorage) -> None: +def test_get_bits_description(storage: TransactionStorage) -> None: criteria_mock_1 = Criteria.construct(bit=Mock(), start_height=Mock(), timeout_height=Mock(), version=Mock()) criteria_mock_2 = Criteria.construct(bit=Mock(), start_height=Mock(), timeout_height=Mock(), version=Mock()) feature_settings = FeatureSettings.construct( @@ -572,7 +545,7 @@ def test_get_bits_description(tx_storage: TransactionStorage) -> None: ) service = FeatureService( feature_settings=feature_settings, - tx_storage=tx_storage + tx_storage=storage ) service.bit_signaling_service = Mock() @@ -606,14 +579,13 @@ def get_state(self: FeatureService, *, block: Block, feature: Feature) -> Featur ) def test_get_ancestor_at_height_invalid( feature_settings: FeatureSettings, - block_mocks: list[Block], - tx_storage: TransactionStorage, + storage: TransactionStorage, block_height: int, ancestor_height: int ) -> None: - service = FeatureService(feature_settings=feature_settings, tx_storage=tx_storage) + service = FeatureService(feature_settings=feature_settings, tx_storage=storage) service.bit_signaling_service = Mock() - block = block_mocks[block_height] + block = not_none(storage.get_block_by_height(block_height)) with pytest.raises(AssertionError) as e: service._get_ancestor_at_height(block=block, ancestor_height=ancestor_height) @@ -636,21 +608,23 @@ def test_get_ancestor_at_height_invalid( ) def test_get_ancestor_at_height( feature_settings: FeatureSettings, - block_mocks: list[Block], - tx_storage: TransactionStorage, + storage: TransactionStorage, block_height: int, ancestor_height: int ) -> None: - service = FeatureService(feature_settings=feature_settings, tx_storage=tx_storage) + service = FeatureService(feature_settings=feature_settings, tx_storage=storage) service.bit_signaling_service = Mock() - block = block_mocks[block_height] - result = service._get_ancestor_at_height(block=block, ancestor_height=ancestor_height) + block = not_none(storage.get_block_by_height(block_height)) + + get_block_by_height_wrapped = Mock(wraps=storage.get_block_by_height) + with patch.object(storage, 'get_block_by_height', get_block_by_height_wrapped): + result = service._get_ancestor_at_height(block=block, ancestor_height=ancestor_height) - assert result == block_mocks[ancestor_height] - assert result.get_height() == ancestor_height - assert cast(Mock, tx_storage.get_transaction_by_height).call_count == ( - 0 if block_height - ancestor_height <= 1 else 1 - ), 'this should only be called if the ancestor is deeper than one parent away' + assert get_block_by_height_wrapped.call_count == ( + 0 if block_height - ancestor_height <= 1 else 1 + ), 'this should only be called if the ancestor is deeper than one parent away' + assert result == storage.get_block_by_height(ancestor_height) + assert result.get_height() == ancestor_height @pytest.mark.parametrize( @@ -665,21 +639,23 @@ def test_get_ancestor_at_height( ) def test_get_ancestor_at_height_voided( feature_settings: FeatureSettings, - block_mocks: list[Block], - tx_storage: TransactionStorage, + storage: TransactionStorage, block_height: int, ancestor_height: int ) -> None: - service = FeatureService(feature_settings=feature_settings, tx_storage=tx_storage) + service = FeatureService(feature_settings=feature_settings, tx_storage=storage) service.bit_signaling_service = Mock() - block = block_mocks[block_height] - parent_block = block_mocks[block_height - 1] + block = not_none(storage.get_block_by_height(block_height)) + parent_block = not_none(storage.get_block_by_height(block_height - 1)) parent_block.get_metadata().voided_by = {b'some'} - result = service._get_ancestor_at_height(block=block, ancestor_height=ancestor_height) - assert result == block_mocks[ancestor_height] - assert result.get_height() == ancestor_height - assert cast(Mock, tx_storage.get_transaction_by_height).call_count == 0 + get_block_by_height_wrapped = Mock(wraps=storage.get_block_by_height) + with patch.object(storage, 'get_block_by_height', get_block_by_height_wrapped): + result = service._get_ancestor_at_height(block=block, ancestor_height=ancestor_height) + + assert get_block_by_height_wrapped.call_count == 0 + assert result == storage.get_block_by_height(ancestor_height) + assert result.get_height() == ancestor_height @pytest.mark.parametrize( @@ -709,8 +685,7 @@ def test_get_ancestor_at_height_voided( ] ) def test_check_must_signal( - tx_storage: TransactionStorage, - block_mocks: list[Block], + storage: TransactionStorage, bit: int, threshold: int, block_height: int, @@ -729,9 +704,9 @@ def test_check_must_signal( ) } ) - service = FeatureService(feature_settings=feature_settings, tx_storage=tx_storage) + service = FeatureService(feature_settings=feature_settings, tx_storage=storage) service.bit_signaling_service = Mock() - block = block_mocks[block_height] + block = not_none(storage.get_block_by_height(block_height)) result = service.is_signaling_mandatory_features(block) diff --git a/tests/others/test_metrics.py b/tests/others/test_metrics.py index 2d21d0c57..6573c43a2 100644 --- a/tests/others/test_metrics.py +++ b/tests/others/test_metrics.py @@ -108,6 +108,7 @@ def _init_manager(): b'meta': 0.0, b'attr': 0.0, b'migrations': 0.0, + b'static-meta': 0.0, b'event': 0.0, b'event-metadata': 0.0, b'feature-activation-metadata': 0.0, @@ -161,6 +162,7 @@ def _init_manager(): b'meta': 0.0, b'attr': 0.0, b'migrations': 0.0, + b'static-meta': 0.0, b'event': 0.0, b'event-metadata': 0.0, b'feature-activation-metadata': 0.0, diff --git a/tests/poa/test_poa_simulation.py b/tests/poa/test_poa_simulation.py index 33c455426..9941dabf0 100644 --- a/tests/poa/test_poa_simulation.py +++ b/tests/poa/test_poa_simulation.py @@ -185,7 +185,7 @@ def test_two_producers(self) -> None: assert set(manager1_blocks_by_height[1]) == set(manager2_blocks_by_height[1]) # but only the block from signer2 becomes non-voided, as it is in turn - non_voided_block1 = manager1.tx_storage.get_transaction_by_height(1) + non_voided_block1 = manager1.tx_storage.get_block_by_height(1) assert isinstance(non_voided_block1, PoaBlock) _assert_block_in_turn(non_voided_block1, signer2) diff --git a/tests/tx/test_block.py b/tests/tx/test_block.py index 7bef0f834..9996d9f55 100644 --- a/tests/tx/test_block.py +++ b/tests/tx/test_block.py @@ -20,26 +20,29 @@ from hathor.conf.settings import HathorSettings from hathor.feature_activation.feature import Feature from hathor.feature_activation.feature_service import BlockIsMissingSignal, BlockIsSignaling, FeatureService +from hathor.indexes import MemoryIndexesManager from hathor.transaction import Block, TransactionMetadata from hathor.transaction.exceptions import BlockMustSignalError from hathor.transaction.storage import TransactionMemoryStorage, TransactionStorage +from hathor.transaction.validation_state import ValidationState +from hathor.util import not_none from hathor.verification.block_verifier import BlockVerifier def test_calculate_feature_activation_bit_counts_genesis(): settings = get_global_settings() storage = TransactionMemoryStorage(settings=settings) - genesis_block = storage.get_transaction(settings.GENESIS_BLOCK_HASH) - assert isinstance(genesis_block, Block) + genesis_block = storage.get_block(settings.GENESIS_BLOCK_HASH) result = genesis_block.get_feature_activation_bit_counts() assert result == [0, 0, 0, 0] @pytest.fixture -def block_mocks() -> list[Block]: +def tx_storage() -> TransactionStorage: settings = get_global_settings() - blocks: list[Block] = [] + indexes = MemoryIndexesManager() + storage = TransactionMemoryStorage(indexes=indexes, settings=settings) feature_activation_bits = [ 0b0000, # 0: boundary block 0b1010, @@ -55,20 +58,19 @@ def block_mocks() -> list[Block]: 0b0000, ] - for i, bits in enumerate(feature_activation_bits): - genesis_hash = settings.GENESIS_BLOCK_HASH - block_hash = genesis_hash if i == 0 else b'some_hash' + for height, bits in enumerate(feature_activation_bits): + if height == 0: + continue + parent = not_none(storage.get_block_by_height(height - 1)) + block = Block(signal_bits=bits, parents=[parent.hash], storage=storage) + block.update_hash() + meta = block.get_metadata() + meta.validation = ValidationState.FULL + meta.height = height + storage.save_transaction(block) + indexes.height.add_new(height, block.hash, block.timestamp) - storage = Mock(spec_set=TransactionStorage) - storage.get_metadata = Mock(return_value=None) - - block = Block(hash=block_hash, storage=storage, signal_bits=bits) - blocks.append(block) - - get_block_parent_mock = Mock(return_value=blocks[i - 1]) - setattr(block, 'get_block_parent', get_block_parent_mock) - - return blocks + return storage @pytest.mark.parametrize( @@ -87,14 +89,12 @@ def block_mocks() -> list[Block]: ] ) def test_calculate_feature_activation_bit_counts( - block_mocks: list[Block], + tx_storage: TransactionStorage, block_height: int, expected_counts: list[int] ) -> None: - block = block_mocks[block_height] - result = block.get_feature_activation_bit_counts() - - assert result == expected_counts + block = not_none(tx_storage.get_block_by_height(block_height)) + assert block.get_feature_activation_bit_counts() == expected_counts def test_get_height(): From 79c03e92670e33d09880c0b001ea0cf167a1103a Mon Sep 17 00:00:00 2001 From: Jan Segre Date: Wed, 7 Aug 2024 15:59:00 +0200 Subject: [PATCH 06/61] feat(cli): early protection for invalid prometheus prefix --- hathor/builder/resources_builder.py | 33 ++++++++++++++++++++++++++++- 1 file changed, 32 insertions(+), 1 deletion(-) diff --git a/hathor/builder/resources_builder.py b/hathor/builder/resources_builder.py index 470f0c613..f2c2cc16a 100644 --- a/hathor/builder/resources_builder.py +++ b/hathor/builder/resources_builder.py @@ -13,6 +13,7 @@ # limitations under the License. import os +import re from typing import TYPE_CHECKING, Any, Optional from autobahn.twisted.resource import WebSocketResource @@ -32,6 +33,31 @@ logger = get_logger() +PROMETHEUS_METRIC_RE = re.compile(r'[a-zA-Z_:][a-zA-Z0-9_:]*') + + +def is_prometheus_metric_name_valid(name: str) -> bool: + """Whether a matric name is valid. + + See: https://prometheus.io/docs/concepts/data_model/#metric-names-and-labels + + >>> is_prometheus_metric_name_valid('') + False + >>> is_prometheus_metric_name_valid('hathor_core:') + True + >>> is_prometheus_metric_name_valid("'hathor_core:'") + False + >>> is_prometheus_metric_name_valid('_hathor_core') + True + >>> is_prometheus_metric_name_valid('__hathor_core') + False + """ + if not PROMETHEUS_METRIC_RE.match(name): + return False + if name.startswith('__'): + return False + return True + class ResourcesBuilder: def __init__( @@ -60,9 +86,14 @@ def build(self) -> Optional[server.Site]: return None def create_prometheus(self) -> PrometheusMetricsExporter: + prometheus_prefix = self._args.prometheus_prefix + if self._args.prometheus_prefix and not is_prometheus_metric_name_valid(prometheus_prefix): + raise BuilderError(f'Invalid prometheus prefix, must match {PROMETHEUS_METRIC_RE.pattern}, ' + 'but the value given is {repr(prometheus_prefix)}') + kwargs: dict[str, Any] = { 'metrics': self.manager.metrics, - 'metrics_prefix': self._args.prometheus_prefix + 'metrics_prefix': prometheus_prefix, } if self._args.data: From 78dcca19808b66168f3e08d2b13116542018aa71 Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Mon, 19 Aug 2024 16:06:36 -0300 Subject: [PATCH 07/61] chore(benchmark): improve benchmark CI (#1112) --- .github/workflows/pr_benchmarks.yml | 49 ++++++++++++++++++------ extras/benchmarking/benchmark_sync_v2.sh | 31 +-------------- 2 files changed, 39 insertions(+), 41 deletions(-) diff --git a/.github/workflows/pr_benchmarks.yml b/.github/workflows/pr_benchmarks.yml index 2aa2e16a4..d78ae72f7 100644 --- a/.github/workflows/pr_benchmarks.yml +++ b/.github/workflows/pr_benchmarks.yml @@ -13,6 +13,14 @@ jobs: permissions: pull-requests: write runs-on: ubuntu-22.04 + env: + N_BLOCKS: 20000 + CACHE_SIZE: 100000 + SERVER_DATA_DIR: server-data + TCP_PORT: 40403 + N_RUNS: 2 + BENCH_FILE: bench_results.json + BENCH_DATA_DIR: bench-data steps: - uses: actions/checkout@v4 - uses: bencherdev/bencher@main @@ -25,17 +33,36 @@ jobs: with: python: 3.11 os: ubuntu-22.04 + - name: Download benchmark data + run: | + mkdir $SERVER_DATA_DIR + poetry run hathor-cli quick_test \ + --testnet \ + --data $SERVER_DATA_DIR \ + --cache \ + --cache-size $CACHE_SIZE \ + --quit-after-n-blocks $N_BLOCKS + - name: Run server node + run: | + poetry run hathor-cli run_node \ + --testnet \ + --data $SERVER_DATA_DIR \ + --cache \ + --cache-size $CACHE_SIZE \ + --x-localhost-only \ + --listen tcp:$TCP_PORT \ + & - name: Track PR Benchmarks with Bencher run: | bencher run \ - --project hathor-core \ - --token '${{ secrets.BENCHER_API_TOKEN }}' \ - --branch '${{ github.head_ref }}' \ - --branch-start-point '${{ github.base_ref }}' \ - --branch-start-point-hash '${{ github.event.pull_request.base.sha }}' \ - --testbed ubuntu-22.04 \ - --adapter shell_hyperfine \ - --err \ - --github-actions '${{ secrets.GITHUB_TOKEN }}' \ - --file bench_results.json \ - './extras/benchmarking/benchmark_sync_v2.sh' + --project hathor-core \ + --token '${{ secrets.BENCHER_API_TOKEN }}' \ + --branch '${{ github.head_ref }}' \ + --branch-start-point '${{ github.base_ref }}' \ + --branch-start-point-hash '${{ github.event.pull_request.base.sha }}' \ + --testbed ubuntu-22.04 \ + --adapter shell_hyperfine \ + --err \ + --github-actions '${{ secrets.GITHUB_TOKEN }}' \ + --file $BENCH_FILE \ + './extras/benchmarking/benchmark_sync_v2.sh' diff --git a/extras/benchmarking/benchmark_sync_v2.sh b/extras/benchmarking/benchmark_sync_v2.sh index 9df5b87c4..edbaba945 100755 --- a/extras/benchmarking/benchmark_sync_v2.sh +++ b/extras/benchmarking/benchmark_sync_v2.sh @@ -1,34 +1,5 @@ -N_BLOCKS=20000 -CACHE_SIZE=100000 -TESTNET_DATA_DIR=server-data -TCP_PORT=40403 -AWAIT_INIT_DELAY=10 -N_RUNS=2 -BENCH_FILE=bench_results.json -BENCH_DATA_DIR=bench-data - -BLUE='\033[0;34m' -NO_COLOR='\033[0m' - -echo "${BLUE}Downloading testnet data...${NO_COLOR}" -mkdir $TESTNET_DATA_DIR -poetry run hathor-cli quick_test --testnet --data $TESTNET_DATA_DIR --quit-after-n-blocks $N_BLOCKS > /dev/null 2>&1 - -echo "${BLUE}Running server node in the background...${NO_COLOR}" -poetry run hathor-cli run_node \ - --testnet \ - --data $TESTNET_DATA_DIR \ - --cache \ - --cache-size $CACHE_SIZE \ - --x-localhost-only \ - --listen tcp:$TCP_PORT \ - > /dev/null 2>&1 & - -# Await initialization -sleep $AWAIT_INIT_DELAY - -echo "${BLUE}Running benchmark...${NO_COLOR}" hyperfine \ + --warmup 1 \ --runs $N_RUNS \ --export-json $BENCH_FILE \ --command-name "sync-v2 (up to $N_BLOCKS blocks)" \ From 7f03142cdbb372794aa5d2abc6f6188c87eeedff Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Tue, 20 Aug 2024 15:56:04 -0300 Subject: [PATCH 08/61] chore: update dependencies (#998) --- hathor/p2p/manager.py | 7 +- hathor/p2p/utils.py | 2 +- .../resources/thin_wallet/send_tokens.py | 3 +- hathor/wallet/wallet.py | 5 +- hathor/websocket/protocol.py | 6 +- poetry.lock | 642 +++++++++--------- pyproject.toml | 38 +- 7 files changed, 351 insertions(+), 352 deletions(-) diff --git a/hathor/p2p/manager.py b/hathor/p2p/manager.py index b34254282..c0f6b58f6 100644 --- a/hathor/p2p/manager.py +++ b/hathor/p2p/manager.py @@ -12,13 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import TYPE_CHECKING, Any, Iterable, NamedTuple, Optional, Union +from typing import TYPE_CHECKING, Any, Iterable, NamedTuple, Optional from structlog import get_logger from twisted.internet import endpoints from twisted.internet.address import IPv4Address, IPv6Address from twisted.internet.defer import Deferred -from twisted.internet.interfaces import IListeningPort, IProtocolFactory, IStreamClientEndpoint +from twisted.internet.interfaces import IListeningPort, IProtocol, IProtocolFactory, IStreamClientEndpoint from twisted.internet.task import LoopingCall from twisted.protocols.tls import TLSMemoryBIOFactory, TLSMemoryBIOProtocol from twisted.python.failure import Failure @@ -593,7 +593,7 @@ def connect_to_if_not_connected(self, peer: PeerId, now: int) -> None: def _connect_to_callback( self, - protocol: Union[HathorProtocol, TLSMemoryBIOProtocol], + protocol: IProtocol, peer: Optional[PeerId], endpoint: IStreamClientEndpoint, entrypoint: Entrypoint, @@ -602,6 +602,7 @@ def _connect_to_callback( if isinstance(protocol, HathorProtocol): protocol.on_outbound_connect(entrypoint) else: + assert isinstance(protocol, TLSMemoryBIOProtocol) assert isinstance(protocol.wrappedProtocol, HathorProtocol) protocol.wrappedProtocol.on_outbound_connect(entrypoint) self.connecting_peers.pop(endpoint) diff --git a/hathor/p2p/utils.py b/hathor/p2p/utils.py index 0dfe9ebc1..7141030d1 100644 --- a/hathor/p2p/utils.py +++ b/hathor/p2p/utils.py @@ -19,8 +19,8 @@ import requests from cryptography import x509 from cryptography.hazmat.backends import default_backend -from cryptography.hazmat.backends.openssl.rsa import RSAPrivateKey from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey from cryptography.hazmat.primitives.serialization import load_pem_private_key from cryptography.x509 import Certificate from cryptography.x509.oid import NameOID diff --git a/hathor/wallet/resources/thin_wallet/send_tokens.py b/hathor/wallet/resources/thin_wallet/send_tokens.py index 16f21d004..83f1f4bf5 100644 --- a/hathor/wallet/resources/thin_wallet/send_tokens.py +++ b/hathor/wallet/resources/thin_wallet/send_tokens.py @@ -154,7 +154,8 @@ def _render_POST_stratum(self, context: _Context) -> None: # When using stratum to solve pow, we already set timestamp and parents stratum_deferred: Deferred[None] = Deferred() - stratum_deferred.addCallback(self._stratum_deferred_resolve, request) + # FIXME: Skipping mypy on the line below for now, as it looks like it's wrong but we don't have tests for it. + stratum_deferred.addCallback(self._stratum_deferred_resolve, request) # type: ignore fn_timeout = partial(self._stratum_timeout, request=request, tx=tx) stratum_deferred.addTimeout(TIMEOUT_STRATUM_RESOLVE_POW, self.manager.reactor, onTimeoutCancel=fn_timeout) diff --git a/hathor/wallet/wallet.py b/hathor/wallet/wallet.py index 55ce19211..acc87cd17 100644 --- a/hathor/wallet/wallet.py +++ b/hathor/wallet/wallet.py @@ -17,7 +17,6 @@ import os from typing import Any, Optional -from cryptography.hazmat.backends.openssl.ec import _EllipticCurvePrivateKey from cryptography.hazmat.primitives import hashes from cryptography.hazmat.primitives.asymmetric import ec from twisted.internet.interfaces import IDelayedCall @@ -179,7 +178,7 @@ def generate_keys(self, count: int = 20) -> None: # Publish to pubsub that new keys were generated self.publish_update(HathorEvents.WALLET_KEYS_GENERATED, keys_count=count) - def get_private_key(self, address58: str) -> _EllipticCurvePrivateKey: + def get_private_key(self, address58: str) -> ec.EllipticCurvePrivateKey: """ Get private key from the address58 :param address58: address in base58 @@ -204,7 +203,7 @@ def tokens_received(self, address58: str) -> None: def is_locked(self): return self.password is None - def get_input_aux_data(self, data_to_sign: bytes, private_key: _EllipticCurvePrivateKey) -> tuple[bytes, bytes]: + def get_input_aux_data(self, data_to_sign: bytes, private_key: ec.EllipticCurvePrivateKey) -> tuple[bytes, bytes]: """ Sign the data to be used in input and get public key compressed in bytes :param data_to_sign: Data to be signed diff --git a/hathor/websocket/protocol.py b/hathor/websocket/protocol.py index 1b3bde0bb..e506901f9 100644 --- a/hathor/websocket/protocol.py +++ b/hathor/websocket/protocol.py @@ -16,6 +16,7 @@ from autobahn.twisted.websocket import WebSocketServerProtocol from structlog import get_logger +from twisted.python.failure import Failure from hathor.p2p.utils import format_address from hathor.util import json_dumpb, json_loadb, json_loads @@ -243,12 +244,13 @@ def _handle_history_manual_streamer(self, message: dict[Any, Any]) -> None: gap_limit=gap_limit, last=last) - def _streamer_callback(self, success: bool) -> None: + def _streamer_callback(self, result: bool | Failure) -> None: """Callback used to identify when the streamer has ended.""" + # TODO: Handle the case when `result` is Failure assert self._history_streamer is not None self.log.info('websocket xpub streaming has been finished', stream_id=self._history_streamer.stream_id, - success=success, + success=result, sent_addresses=self._history_streamer.stats_sent_addresses, sent_vertices=self._history_streamer.stats_sent_vertices) self._history_streamer = None diff --git a/poetry.lock b/poetry.lock index 163ae443e..5782686ae 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,91 +1,103 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. + +[[package]] +name = "aiohappyeyeballs" +version = "2.3.5" +description = "Happy Eyeballs for asyncio" +optional = false +python-versions = ">=3.8" +files = [ + {file = "aiohappyeyeballs-2.3.5-py3-none-any.whl", hash = "sha256:4d6dea59215537dbc746e93e779caea8178c866856a721c9c660d7a5a7b8be03"}, + {file = "aiohappyeyeballs-2.3.5.tar.gz", hash = "sha256:6fa48b9f1317254f122a07a131a86b71ca6946ca989ce6326fff54a99a920105"}, +] [[package]] name = "aiohttp" -version = "3.9.1" +version = "3.10.3" description = "Async http client/server framework (asyncio)" optional = false python-versions = ">=3.8" files = [ - {file = "aiohttp-3.9.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e1f80197f8b0b846a8d5cf7b7ec6084493950d0882cc5537fb7b96a69e3c8590"}, - {file = "aiohttp-3.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c72444d17777865734aa1a4d167794c34b63e5883abb90356a0364a28904e6c0"}, - {file = "aiohttp-3.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9b05d5cbe9dafcdc733262c3a99ccf63d2f7ce02543620d2bd8db4d4f7a22f83"}, - {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c4fa235d534b3547184831c624c0b7c1e262cd1de847d95085ec94c16fddcd5"}, - {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:289ba9ae8e88d0ba16062ecf02dd730b34186ea3b1e7489046fc338bdc3361c4"}, - {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bff7e2811814fa2271be95ab6e84c9436d027a0e59665de60edf44e529a42c1f"}, - {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81b77f868814346662c96ab36b875d7814ebf82340d3284a31681085c051320f"}, - {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b9c7426923bb7bd66d409da46c41e3fb40f5caf679da624439b9eba92043fa6"}, - {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8d44e7bf06b0c0a70a20f9100af9fcfd7f6d9d3913e37754c12d424179b4e48f"}, - {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22698f01ff5653fe66d16ffb7658f582a0ac084d7da1323e39fd9eab326a1f26"}, - {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ca7ca5abfbfe8d39e653870fbe8d7710be7a857f8a8386fc9de1aae2e02ce7e4"}, - {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:8d7f98fde213f74561be1d6d3fa353656197f75d4edfbb3d94c9eb9b0fc47f5d"}, - {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5216b6082c624b55cfe79af5d538e499cd5f5b976820eac31951fb4325974501"}, - {file = "aiohttp-3.9.1-cp310-cp310-win32.whl", hash = "sha256:0e7ba7ff228c0d9a2cd66194e90f2bca6e0abca810b786901a569c0de082f489"}, - {file = "aiohttp-3.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:c7e939f1ae428a86e4abbb9a7c4732bf4706048818dfd979e5e2839ce0159f23"}, - {file = "aiohttp-3.9.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:df9cf74b9bc03d586fc53ba470828d7b77ce51b0582d1d0b5b2fb673c0baa32d"}, - {file = "aiohttp-3.9.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ecca113f19d5e74048c001934045a2b9368d77b0b17691d905af18bd1c21275e"}, - {file = "aiohttp-3.9.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8cef8710fb849d97c533f259103f09bac167a008d7131d7b2b0e3a33269185c0"}, - {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bea94403a21eb94c93386d559bce297381609153e418a3ffc7d6bf772f59cc35"}, - {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91c742ca59045dce7ba76cab6e223e41d2c70d79e82c284a96411f8645e2afff"}, - {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6c93b7c2e52061f0925c3382d5cb8980e40f91c989563d3d32ca280069fd6a87"}, - {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee2527134f95e106cc1653e9ac78846f3a2ec1004cf20ef4e02038035a74544d"}, - {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11ff168d752cb41e8492817e10fb4f85828f6a0142b9726a30c27c35a1835f01"}, - {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b8c3a67eb87394386847d188996920f33b01b32155f0a94f36ca0e0c635bf3e3"}, - {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c7b5d5d64e2a14e35a9240b33b89389e0035e6de8dbb7ffa50d10d8b65c57449"}, - {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:69985d50a2b6f709412d944ffb2e97d0be154ea90600b7a921f95a87d6f108a2"}, - {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:c9110c06eaaac7e1f5562caf481f18ccf8f6fdf4c3323feab28a93d34cc646bd"}, - {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d737e69d193dac7296365a6dcb73bbbf53bb760ab25a3727716bbd42022e8d7a"}, - {file = "aiohttp-3.9.1-cp311-cp311-win32.whl", hash = "sha256:4ee8caa925aebc1e64e98432d78ea8de67b2272252b0a931d2ac3bd876ad5544"}, - {file = "aiohttp-3.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:a34086c5cc285be878622e0a6ab897a986a6e8bf5b67ecb377015f06ed316587"}, - {file = "aiohttp-3.9.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f800164276eec54e0af5c99feb9494c295118fc10a11b997bbb1348ba1a52065"}, - {file = "aiohttp-3.9.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:500f1c59906cd142d452074f3811614be04819a38ae2b3239a48b82649c08821"}, - {file = "aiohttp-3.9.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0b0a6a36ed7e164c6df1e18ee47afbd1990ce47cb428739d6c99aaabfaf1b3af"}, - {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69da0f3ed3496808e8cbc5123a866c41c12c15baaaead96d256477edf168eb57"}, - {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:176df045597e674fa950bf5ae536be85699e04cea68fa3a616cf75e413737eb5"}, - {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b796b44111f0cab6bbf66214186e44734b5baab949cb5fb56154142a92989aeb"}, - {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f27fdaadce22f2ef950fc10dcdf8048407c3b42b73779e48a4e76b3c35bca26c"}, - {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcb6532b9814ea7c5a6a3299747c49de30e84472fa72821b07f5a9818bce0f66"}, - {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:54631fb69a6e44b2ba522f7c22a6fb2667a02fd97d636048478db2fd8c4e98fe"}, - {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4b4c452d0190c5a820d3f5c0f3cd8a28ace48c54053e24da9d6041bf81113183"}, - {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:cae4c0c2ca800c793cae07ef3d40794625471040a87e1ba392039639ad61ab5b"}, - {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:565760d6812b8d78d416c3c7cfdf5362fbe0d0d25b82fed75d0d29e18d7fc30f"}, - {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:54311eb54f3a0c45efb9ed0d0a8f43d1bc6060d773f6973efd90037a51cd0a3f"}, - {file = "aiohttp-3.9.1-cp312-cp312-win32.whl", hash = "sha256:85c3e3c9cb1d480e0b9a64c658cd66b3cfb8e721636ab8b0e746e2d79a7a9eed"}, - {file = "aiohttp-3.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:11cb254e397a82efb1805d12561e80124928e04e9c4483587ce7390b3866d213"}, - {file = "aiohttp-3.9.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8a22a34bc594d9d24621091d1b91511001a7eea91d6652ea495ce06e27381f70"}, - {file = "aiohttp-3.9.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:598db66eaf2e04aa0c8900a63b0101fdc5e6b8a7ddd805c56d86efb54eb66672"}, - {file = "aiohttp-3.9.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2c9376e2b09895c8ca8b95362283365eb5c03bdc8428ade80a864160605715f1"}, - {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41473de252e1797c2d2293804e389a6d6986ef37cbb4a25208de537ae32141dd"}, - {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c5857612c9813796960c00767645cb5da815af16dafb32d70c72a8390bbf690"}, - {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ffcd828e37dc219a72c9012ec44ad2e7e3066bec6ff3aaa19e7d435dbf4032ca"}, - {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:219a16763dc0294842188ac8a12262b5671817042b35d45e44fd0a697d8c8361"}, - {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f694dc8a6a3112059258a725a4ebe9acac5fe62f11c77ac4dcf896edfa78ca28"}, - {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bcc0ea8d5b74a41b621ad4a13d96c36079c81628ccc0b30cfb1603e3dfa3a014"}, - {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:90ec72d231169b4b8d6085be13023ece8fa9b1bb495e4398d847e25218e0f431"}, - {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:cf2a0ac0615842b849f40c4d7f304986a242f1e68286dbf3bd7a835e4f83acfd"}, - {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:0e49b08eafa4f5707ecfb321ab9592717a319e37938e301d462f79b4e860c32a"}, - {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2c59e0076ea31c08553e868cec02d22191c086f00b44610f8ab7363a11a5d9d8"}, - {file = "aiohttp-3.9.1-cp38-cp38-win32.whl", hash = "sha256:4831df72b053b1eed31eb00a2e1aff6896fb4485301d4ccb208cac264b648db4"}, - {file = "aiohttp-3.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:3135713c5562731ee18f58d3ad1bf41e1d8883eb68b363f2ffde5b2ea4b84cc7"}, - {file = "aiohttp-3.9.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cfeadf42840c1e870dc2042a232a8748e75a36b52d78968cda6736de55582766"}, - {file = "aiohttp-3.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:70907533db712f7aa791effb38efa96f044ce3d4e850e2d7691abd759f4f0ae0"}, - {file = "aiohttp-3.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cdefe289681507187e375a5064c7599f52c40343a8701761c802c1853a504558"}, - {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7481f581251bb5558ba9f635db70908819caa221fc79ee52a7f58392778c636"}, - {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:49f0c1b3c2842556e5de35f122fc0f0b721334ceb6e78c3719693364d4af8499"}, - {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d406b01a9f5a7e232d1b0d161b40c05275ffbcbd772dc18c1d5a570961a1ca4"}, - {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d8e4450e7fe24d86e86b23cc209e0023177b6d59502e33807b732d2deb6975f"}, - {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c0266cd6f005e99f3f51e583012de2778e65af6b73860038b968a0a8888487a"}, - {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab221850108a4a063c5b8a70f00dd7a1975e5a1713f87f4ab26a46e5feac5a0e"}, - {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c88a15f272a0ad3d7773cf3a37cc7b7d077cbfc8e331675cf1346e849d97a4e5"}, - {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:237533179d9747080bcaad4d02083ce295c0d2eab3e9e8ce103411a4312991a0"}, - {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:02ab6006ec3c3463b528374c4cdce86434e7b89ad355e7bf29e2f16b46c7dd6f"}, - {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04fa38875e53eb7e354ece1607b1d2fdee2d175ea4e4d745f6ec9f751fe20c7c"}, - {file = "aiohttp-3.9.1-cp39-cp39-win32.whl", hash = "sha256:82eefaf1a996060602f3cc1112d93ba8b201dbf5d8fd9611227de2003dddb3b7"}, - {file = "aiohttp-3.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:9b05d33ff8e6b269e30a7957bd3244ffbce2a7a35a81b81c382629b80af1a8bf"}, - {file = "aiohttp-3.9.1.tar.gz", hash = "sha256:8fc49a87ac269d4529da45871e2ffb6874e87779c3d0e2ccd813c0899221239d"}, + {file = "aiohttp-3.10.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cc36cbdedf6f259371dbbbcaae5bb0e95b879bc501668ab6306af867577eb5db"}, + {file = "aiohttp-3.10.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:85466b5a695c2a7db13eb2c200af552d13e6a9313d7fa92e4ffe04a2c0ea74c1"}, + {file = "aiohttp-3.10.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:71bb1d97bfe7e6726267cea169fdf5df7658831bb68ec02c9c6b9f3511e108bb"}, + {file = "aiohttp-3.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baec1eb274f78b2de54471fc4c69ecbea4275965eab4b556ef7a7698dee18bf2"}, + {file = "aiohttp-3.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:13031e7ec1188274bad243255c328cc3019e36a5a907978501256000d57a7201"}, + {file = "aiohttp-3.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2bbc55a964b8eecb341e492ae91c3bd0848324d313e1e71a27e3d96e6ee7e8e8"}, + {file = "aiohttp-3.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8cc0564b286b625e673a2615ede60a1704d0cbbf1b24604e28c31ed37dc62aa"}, + {file = "aiohttp-3.10.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f817a54059a4cfbc385a7f51696359c642088710e731e8df80d0607193ed2b73"}, + {file = "aiohttp-3.10.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8542c9e5bcb2bd3115acdf5adc41cda394e7360916197805e7e32b93d821ef93"}, + {file = "aiohttp-3.10.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:671efce3a4a0281060edf9a07a2f7e6230dca3a1cbc61d110eee7753d28405f7"}, + {file = "aiohttp-3.10.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:0974f3b5b0132edcec92c3306f858ad4356a63d26b18021d859c9927616ebf27"}, + {file = "aiohttp-3.10.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:44bb159b55926b57812dca1b21c34528e800963ffe130d08b049b2d6b994ada7"}, + {file = "aiohttp-3.10.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6ae9ae382d1c9617a91647575255ad55a48bfdde34cc2185dd558ce476bf16e9"}, + {file = "aiohttp-3.10.3-cp310-cp310-win32.whl", hash = "sha256:aed12a54d4e1ee647376fa541e1b7621505001f9f939debf51397b9329fd88b9"}, + {file = "aiohttp-3.10.3-cp310-cp310-win_amd64.whl", hash = "sha256:b51aef59370baf7444de1572f7830f59ddbabd04e5292fa4218d02f085f8d299"}, + {file = "aiohttp-3.10.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e021c4c778644e8cdc09487d65564265e6b149896a17d7c0f52e9a088cc44e1b"}, + {file = "aiohttp-3.10.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:24fade6dae446b183e2410a8628b80df9b7a42205c6bfc2eff783cbeedc224a2"}, + {file = "aiohttp-3.10.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bc8e9f15939dacb0e1f2d15f9c41b786051c10472c7a926f5771e99b49a5957f"}, + {file = "aiohttp-3.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5a9ec959b5381271c8ec9310aae1713b2aec29efa32e232e5ef7dcca0df0279"}, + {file = "aiohttp-3.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a5d0ea8a6467b15d53b00c4e8ea8811e47c3cc1bdbc62b1aceb3076403d551f"}, + {file = "aiohttp-3.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c9ed607dbbdd0d4d39b597e5bf6b0d40d844dfb0ac6a123ed79042ef08c1f87e"}, + {file = "aiohttp-3.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3e66d5b506832e56add66af88c288c1d5ba0c38b535a1a59e436b300b57b23e"}, + {file = "aiohttp-3.10.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fda91ad797e4914cca0afa8b6cccd5d2b3569ccc88731be202f6adce39503189"}, + {file = "aiohttp-3.10.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:61ccb867b2f2f53df6598eb2a93329b5eee0b00646ee79ea67d68844747a418e"}, + {file = "aiohttp-3.10.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6d881353264e6156f215b3cb778c9ac3184f5465c2ece5e6fce82e68946868ef"}, + {file = "aiohttp-3.10.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:b031ce229114825f49cec4434fa844ccb5225e266c3e146cb4bdd025a6da52f1"}, + {file = "aiohttp-3.10.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5337cc742a03f9e3213b097abff8781f79de7190bbfaa987bd2b7ceb5bb0bdec"}, + {file = "aiohttp-3.10.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ab3361159fd3dcd0e48bbe804006d5cfb074b382666e6c064112056eb234f1a9"}, + {file = "aiohttp-3.10.3-cp311-cp311-win32.whl", hash = "sha256:05d66203a530209cbe40f102ebaac0b2214aba2a33c075d0bf825987c36f1f0b"}, + {file = "aiohttp-3.10.3-cp311-cp311-win_amd64.whl", hash = "sha256:70b4a4984a70a2322b70e088d654528129783ac1ebbf7dd76627b3bd22db2f17"}, + {file = "aiohttp-3.10.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:166de65e2e4e63357cfa8417cf952a519ac42f1654cb2d43ed76899e2319b1ee"}, + {file = "aiohttp-3.10.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7084876352ba3833d5d214e02b32d794e3fd9cf21fdba99cff5acabeb90d9806"}, + {file = "aiohttp-3.10.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d98c604c93403288591d7d6d7d6cc8a63459168f8846aeffd5b3a7f3b3e5e09"}, + {file = "aiohttp-3.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d73b073a25a0bb8bf014345374fe2d0f63681ab5da4c22f9d2025ca3e3ea54fc"}, + {file = "aiohttp-3.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8da6b48c20ce78f5721068f383e0e113dde034e868f1b2f5ee7cb1e95f91db57"}, + {file = "aiohttp-3.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3a9dcdccf50284b1b0dc72bc57e5bbd3cc9bf019060dfa0668f63241ccc16aa7"}, + {file = "aiohttp-3.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56fb94bae2be58f68d000d046172d8b8e6b1b571eb02ceee5535e9633dcd559c"}, + {file = "aiohttp-3.10.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bf75716377aad2c718cdf66451c5cf02042085d84522aec1f9246d3e4b8641a6"}, + {file = "aiohttp-3.10.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6c51ed03e19c885c8e91f574e4bbe7381793f56f93229731597e4a499ffef2a5"}, + {file = "aiohttp-3.10.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b84857b66fa6510a163bb083c1199d1ee091a40163cfcbbd0642495fed096204"}, + {file = "aiohttp-3.10.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c124b9206b1befe0491f48185fd30a0dd51b0f4e0e7e43ac1236066215aff272"}, + {file = "aiohttp-3.10.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3461d9294941937f07bbbaa6227ba799bc71cc3b22c40222568dc1cca5118f68"}, + {file = "aiohttp-3.10.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:08bd0754d257b2db27d6bab208c74601df6f21bfe4cb2ec7b258ba691aac64b3"}, + {file = "aiohttp-3.10.3-cp312-cp312-win32.whl", hash = "sha256:7f9159ae530297f61a00116771e57516f89a3de6ba33f314402e41560872b50a"}, + {file = "aiohttp-3.10.3-cp312-cp312-win_amd64.whl", hash = "sha256:e1128c5d3a466279cb23c4aa32a0f6cb0e7d2961e74e9e421f90e74f75ec1edf"}, + {file = "aiohttp-3.10.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d1100e68e70eb72eadba2b932b185ebf0f28fd2f0dbfe576cfa9d9894ef49752"}, + {file = "aiohttp-3.10.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a541414578ff47c0a9b0b8b77381ea86b0c8531ab37fc587572cb662ccd80b88"}, + {file = "aiohttp-3.10.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d5548444ef60bf4c7b19ace21f032fa42d822e516a6940d36579f7bfa8513f9c"}, + {file = "aiohttp-3.10.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ba2e838b5e6a8755ac8297275c9460e729dc1522b6454aee1766c6de6d56e5e"}, + {file = "aiohttp-3.10.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:48665433bb59144aaf502c324694bec25867eb6630fcd831f7a893ca473fcde4"}, + {file = "aiohttp-3.10.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bac352fceed158620ce2d701ad39d4c1c76d114255a7c530e057e2b9f55bdf9f"}, + {file = "aiohttp-3.10.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b0f670502100cdc567188c49415bebba947eb3edaa2028e1a50dd81bd13363f"}, + {file = "aiohttp-3.10.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43b09f38a67679e32d380fe512189ccb0b25e15afc79b23fbd5b5e48e4fc8fd9"}, + {file = "aiohttp-3.10.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:cd788602e239ace64f257d1c9d39898ca65525583f0fbf0988bcba19418fe93f"}, + {file = "aiohttp-3.10.3-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:214277dcb07ab3875f17ee1c777d446dcce75bea85846849cc9d139ab8f5081f"}, + {file = "aiohttp-3.10.3-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:32007fdcaab789689c2ecaaf4b71f8e37bf012a15cd02c0a9db8c4d0e7989fa8"}, + {file = "aiohttp-3.10.3-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:123e5819bfe1b87204575515cf448ab3bf1489cdeb3b61012bde716cda5853e7"}, + {file = "aiohttp-3.10.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:812121a201f0c02491a5db335a737b4113151926a79ae9ed1a9f41ea225c0e3f"}, + {file = "aiohttp-3.10.3-cp38-cp38-win32.whl", hash = "sha256:b97dc9a17a59f350c0caa453a3cb35671a2ffa3a29a6ef3568b523b9113d84e5"}, + {file = "aiohttp-3.10.3-cp38-cp38-win_amd64.whl", hash = "sha256:3731a73ddc26969d65f90471c635abd4e1546a25299b687e654ea6d2fc052394"}, + {file = "aiohttp-3.10.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38d91b98b4320ffe66efa56cb0f614a05af53b675ce1b8607cdb2ac826a8d58e"}, + {file = "aiohttp-3.10.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9743fa34a10a36ddd448bba8a3adc2a66a1c575c3c2940301bacd6cc896c6bf1"}, + {file = "aiohttp-3.10.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7c126f532caf238031c19d169cfae3c6a59129452c990a6e84d6e7b198a001dc"}, + {file = "aiohttp-3.10.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:926e68438f05703e500b06fe7148ef3013dd6f276de65c68558fa9974eeb59ad"}, + {file = "aiohttp-3.10.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:434b3ab75833accd0b931d11874e206e816f6e6626fd69f643d6a8269cd9166a"}, + {file = "aiohttp-3.10.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d35235a44ec38109b811c3600d15d8383297a8fab8e3dec6147477ec8636712a"}, + {file = "aiohttp-3.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59c489661edbd863edb30a8bd69ecb044bd381d1818022bc698ba1b6f80e5dd1"}, + {file = "aiohttp-3.10.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50544fe498c81cb98912afabfc4e4d9d85e89f86238348e3712f7ca6a2f01dab"}, + {file = "aiohttp-3.10.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:09bc79275737d4dc066e0ae2951866bb36d9c6b460cb7564f111cc0427f14844"}, + {file = "aiohttp-3.10.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:af4dbec58e37f5afff4f91cdf235e8e4b0bd0127a2a4fd1040e2cad3369d2f06"}, + {file = "aiohttp-3.10.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:b22cae3c9dd55a6b4c48c63081d31c00fc11fa9db1a20c8a50ee38c1a29539d2"}, + {file = "aiohttp-3.10.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ba562736d3fbfe9241dad46c1a8994478d4a0e50796d80e29d50cabe8fbfcc3f"}, + {file = "aiohttp-3.10.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f25d6c4e82d7489be84f2b1c8212fafc021b3731abdb61a563c90e37cced3a21"}, + {file = "aiohttp-3.10.3-cp39-cp39-win32.whl", hash = "sha256:b69d832e5f5fa15b1b6b2c8eb6a9fd2c0ec1fd7729cb4322ed27771afc9fc2ac"}, + {file = "aiohttp-3.10.3-cp39-cp39-win_amd64.whl", hash = "sha256:673bb6e3249dc8825df1105f6ef74e2eab779b7ff78e96c15cadb78b04a83752"}, + {file = "aiohttp-3.10.3.tar.gz", hash = "sha256:21650e7032cc2d31fc23d353d7123e771354f2a3d5b05a5647fc30fea214e696"}, ] [package.dependencies] +aiohappyeyeballs = ">=2.3.0" aiosignal = ">=1.1.2" async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} attrs = ">=17.3.0" @@ -94,7 +106,7 @@ multidict = ">=4.5,<7.0" yarl = ">=1.0,<2.0" [package.extras] -speedups = ["Brotli", "aiodns", "brotlicffi"] +speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"] [[package]] name = "aiosignal" @@ -170,12 +182,13 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte [[package]] name = "autobahn" -version = "23.6.2" +version = "24.4.2" description = "WebSocket client & server library, WAMP real-time framework" optional = false python-versions = ">=3.9" files = [ - {file = "autobahn-23.6.2.tar.gz", hash = "sha256:ec9421c52a2103364d1ef0468036e6019ee84f71721e86b36fe19ad6966c1181"}, + {file = "autobahn-24.4.2-py2.py3-none-any.whl", hash = "sha256:c56a2abe7ac78abbfb778c02892d673a4de58fd004d088cd7ab297db25918e81"}, + {file = "autobahn-24.4.2.tar.gz", hash = "sha256:a2d71ef1b0cf780b6d11f8b205fd2c7749765e65795f2ea7d823796642ee92c9"}, ] [package.dependencies] @@ -185,14 +198,14 @@ setuptools = "*" txaio = ">=21.2.1" [package.extras] -all = ["PyGObject (>=3.40.0)", "argon2_cffi (>=20.1.0)", "attrs (>=20.3.0)", "base58 (>=2.1.0)", "bitarray (>=2.7.5)", "cbor2 (>=5.2.0)", "cffi (>=1.14.5)", "click (>=8.1.2)", "ecdsa (>=0.16.1)", "eth-abi (>=4.0.0)", "flatbuffers (>=22.12.6)", "hkdf (>=0.0.3)", "jinja2 (>=2.11.3)", "mnemonic (>=0.19)", "msgpack (>=1.0.2)", "passlib (>=1.7.4)", "py-ecc (>=5.1.0)", "py-eth-sig-utils (>=0.4.0)", "py-multihash (>=2.0.1)", "py-ubjson (>=0.16.1)", "pynacl (>=1.4.0)", "pyopenssl (>=20.0.1)", "python-snappy (>=0.6.0)", "pytrie (>=0.4.0)", "qrcode (>=7.3.1)", "rlp (>=2.0.1)", "service_identity (>=18.1.0)", "spake2 (>=0.8)", "twisted (>=20.3.0)", "ujson (>=4.0.2)", "web3[ipfs] (>=6.0.0)", "xbr (>=21.2.1)", "yapf (==0.29.0)", "zlmdb (>=21.2.1)", "zope.interface (>=5.2.0)"] +all = ["PyGObject (>=3.40.0)", "argon2-cffi (>=20.1.0)", "attrs (>=20.3.0)", "base58 (>=2.1.0)", "bitarray (>=2.7.5)", "cbor2 (>=5.2.0)", "cffi (>=1.14.5)", "click (>=8.1.2)", "ecdsa (>=0.16.1)", "eth-abi (>=4.0.0)", "flatbuffers (>=22.12.6)", "hkdf (>=0.0.3)", "jinja2 (>=2.11.3)", "mnemonic (>=0.19)", "msgpack (>=1.0.2)", "passlib (>=1.7.4)", "py-ecc (>=5.1.0)", "py-eth-sig-utils (>=0.4.0)", "py-multihash (>=2.0.1)", "py-ubjson (>=0.16.1)", "pynacl (>=1.4.0)", "pyopenssl (>=20.0.1)", "python-snappy (>=0.6.0)", "pytrie (>=0.4.0)", "qrcode (>=7.3.1)", "rlp (>=2.0.1)", "service-identity (>=18.1.0)", "spake2 (>=0.8)", "twisted (>=20.3.0)", "twisted (>=24.3.0)", "u-msgpack-python (>=2.1)", "ujson (>=4.0.2)", "web3[ipfs] (>=6.0.0)", "xbr (>=21.2.1)", "yapf (==0.29.0)", "zlmdb (>=21.2.1)", "zope.interface (>=5.2.0)"] compress = ["python-snappy (>=0.6.0)"] -dev = ["backports.tempfile (>=1.0)", "bumpversion (>=0.5.3)", "codecov (>=2.0.15)", "flake8 (<5)", "humanize (>=0.5.1)", "mypy (>=0.610)", "passlib", "pep8-naming (>=0.3.3)", "pip (>=9.0.1)", "pyenchant (>=1.6.6)", "pyflakes (>=1.0.0)", "pyinstaller (>=4.2)", "pylint (>=1.9.2)", "pytest (>=3.4.2)", "pytest-aiohttp", "pytest-asyncio (>=0.14.0)", "pytest-runner (>=2.11.1)", "pyyaml (>=4.2b4)", "qualname", "sphinx (>=1.7.1)", "sphinx-autoapi (>=1.7.0)", "sphinx_rtd_theme (>=0.1.9)", "sphinxcontrib-images (>=0.9.1)", "tox (>=4.2.8)", "tox-gh-actions (>=2.2.0)", "twine (>=3.3.0)", "twisted (>=22.10.0)", "txaio (>=20.4.1)", "watchdog (>=0.8.3)", "wheel (>=0.36.2)", "yapf (==0.29.0)"] -encryption = ["pynacl (>=1.4.0)", "pyopenssl (>=20.0.1)", "pytrie (>=0.4.0)", "qrcode (>=7.3.1)", "service_identity (>=18.1.0)"] +dev = ["backports.tempfile (>=1.0)", "build (>=1.2.1)", "bumpversion (>=0.5.3)", "codecov (>=2.0.15)", "flake8 (<5)", "humanize (>=0.5.1)", "mypy (>=0.610)", "passlib", "pep8-naming (>=0.3.3)", "pip (>=9.0.1)", "pyenchant (>=1.6.6)", "pyflakes (>=1.0.0)", "pyinstaller (>=4.2)", "pylint (>=1.9.2)", "pytest (>=3.4.2)", "pytest-aiohttp", "pytest-asyncio (>=0.14.0)", "pytest-runner (>=2.11.1)", "pyyaml (>=4.2b4)", "qualname", "sphinx (>=1.7.1)", "sphinx-autoapi (>=1.7.0)", "sphinx-rtd-theme (>=0.1.9)", "sphinxcontrib-images (>=0.9.1)", "tox (>=4.2.8)", "tox-gh-actions (>=2.2.0)", "twine (>=3.3.0)", "twisted (>=22.10.0)", "txaio (>=20.4.1)", "watchdog (>=0.8.3)", "wheel (>=0.36.2)", "yapf (==0.29.0)"] +encryption = ["pynacl (>=1.4.0)", "pyopenssl (>=20.0.1)", "pytrie (>=0.4.0)", "qrcode (>=7.3.1)", "service-identity (>=18.1.0)"] nvx = ["cffi (>=1.14.5)"] -scram = ["argon2_cffi (>=20.1.0)", "cffi (>=1.14.5)", "passlib (>=1.7.4)"] -serialization = ["cbor2 (>=5.2.0)", "flatbuffers (>=22.12.6)", "msgpack (>=1.0.2)", "py-ubjson (>=0.16.1)", "ujson (>=4.0.2)"] -twisted = ["attrs (>=20.3.0)", "twisted (>=20.3.0)", "zope.interface (>=5.2.0)"] +scram = ["argon2-cffi (>=20.1.0)", "cffi (>=1.14.5)", "passlib (>=1.7.4)"] +serialization = ["cbor2 (>=5.2.0)", "flatbuffers (>=22.12.6)", "msgpack (>=1.0.2)", "py-ubjson (>=0.16.1)", "u-msgpack-python (>=2.1)", "ujson (>=4.0.2)"] +twisted = ["attrs (>=20.3.0)", "twisted (>=24.3.0)", "zope.interface (>=5.2.0)"] ui = ["PyGObject (>=3.40.0)"] xbr = ["base58 (>=2.1.0)", "bitarray (>=2.7.5)", "cbor2 (>=5.2.0)", "click (>=8.1.2)", "ecdsa (>=0.16.1)", "eth-abi (>=4.0.0)", "hkdf (>=0.0.3)", "jinja2 (>=2.11.3)", "mnemonic (>=0.19)", "py-ecc (>=5.1.0)", "py-eth-sig-utils (>=0.4.0)", "py-multihash (>=2.0.1)", "rlp (>=2.0.1)", "spake2 (>=0.8)", "twisted (>=20.3.0)", "web3[ipfs] (>=6.0.0)", "xbr (>=21.2.1)", "yapf (==0.29.0)", "zlmdb (>=21.2.1)"] @@ -451,49 +464,57 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "38.0.4" +version = "42.0.8" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "cryptography-38.0.4-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:2fa36a7b2cc0998a3a4d5af26ccb6273f3df133d61da2ba13b3286261e7efb70"}, - {file = "cryptography-38.0.4-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:1f13ddda26a04c06eb57119caf27a524ccae20533729f4b1e4a69b54e07035eb"}, - {file = "cryptography-38.0.4-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:2ec2a8714dd005949d4019195d72abed84198d877112abb5a27740e217e0ea8d"}, - {file = "cryptography-38.0.4-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50a1494ed0c3f5b4d07650a68cd6ca62efe8b596ce743a5c94403e6f11bf06c1"}, - {file = "cryptography-38.0.4-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a10498349d4c8eab7357a8f9aa3463791292845b79597ad1b98a543686fb1ec8"}, - {file = "cryptography-38.0.4-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:10652dd7282de17990b88679cb82f832752c4e8237f0c714be518044269415db"}, - {file = "cryptography-38.0.4-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:bfe6472507986613dc6cc00b3d492b2f7564b02b3b3682d25ca7f40fa3fd321b"}, - {file = "cryptography-38.0.4-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:ce127dd0a6a0811c251a6cddd014d292728484e530d80e872ad9806cfb1c5b3c"}, - {file = "cryptography-38.0.4-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:53049f3379ef05182864d13bb9686657659407148f901f3f1eee57a733fb4b00"}, - {file = "cryptography-38.0.4-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:8a4b2bdb68a447fadebfd7d24855758fe2d6fecc7fed0b78d190b1af39a8e3b0"}, - {file = "cryptography-38.0.4-cp36-abi3-win32.whl", hash = "sha256:1d7e632804a248103b60b16fb145e8df0bc60eed790ece0d12efe8cd3f3e7744"}, - {file = "cryptography-38.0.4-cp36-abi3-win_amd64.whl", hash = "sha256:8e45653fb97eb2f20b8c96f9cd2b3a0654d742b47d638cf2897afbd97f80fa6d"}, - {file = "cryptography-38.0.4-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca57eb3ddaccd1112c18fc80abe41db443cc2e9dcb1917078e02dfa010a4f353"}, - {file = "cryptography-38.0.4-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:c9e0d79ee4c56d841bd4ac6e7697c8ff3c8d6da67379057f29e66acffcd1e9a7"}, - {file = "cryptography-38.0.4-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:0e70da4bdff7601b0ef48e6348339e490ebfb0cbe638e083c9c41fb49f00c8bd"}, - {file = "cryptography-38.0.4-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:998cd19189d8a747b226d24c0207fdaa1e6658a1d3f2494541cb9dfbf7dcb6d2"}, - {file = "cryptography-38.0.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67461b5ebca2e4c2ab991733f8ab637a7265bb582f07c7c88914b5afb88cb95b"}, - {file = "cryptography-38.0.4-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:4eb85075437f0b1fd8cd66c688469a0c4119e0ba855e3fef86691971b887caf6"}, - {file = "cryptography-38.0.4-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3178d46f363d4549b9a76264f41c6948752183b3f587666aff0555ac50fd7876"}, - {file = "cryptography-38.0.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:6391e59ebe7c62d9902c24a4d8bcbc79a68e7c4ab65863536127c8a9cd94043b"}, - {file = "cryptography-38.0.4-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:78e47e28ddc4ace41dd38c42e6feecfdadf9c3be2af389abbfeef1ff06822285"}, - {file = "cryptography-38.0.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fb481682873035600b5502f0015b664abc26466153fab5c6bc92c1ea69d478b"}, - {file = "cryptography-38.0.4-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:4367da5705922cf7070462e964f66e4ac24162e22ab0a2e9d31f1b270dd78083"}, - {file = "cryptography-38.0.4-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b4cad0cea995af760f82820ab4ca54e5471fc782f70a007f31531957f43e9dee"}, - {file = "cryptography-38.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:80ca53981ceeb3241998443c4964a387771588c4e4a5d92735a493af868294f9"}, - {file = "cryptography-38.0.4.tar.gz", hash = "sha256:175c1a818b87c9ac80bb7377f5520b7f31b3ef2a0004e2420319beadedb67290"}, + {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e"}, + {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7"}, + {file = "cryptography-42.0.8-cp37-abi3-win32.whl", hash = "sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2"}, + {file = "cryptography-42.0.8-cp37-abi3-win_amd64.whl", hash = "sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba"}, + {file = "cryptography-42.0.8-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14"}, + {file = "cryptography-42.0.8-cp39-abi3-win32.whl", hash = "sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c"}, + {file = "cryptography-42.0.8-cp39-abi3-win_amd64.whl", hash = "sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad"}, + {file = "cryptography-42.0.8.tar.gz", hash = "sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2"}, ] [package.dependencies] -cffi = ">=1.12" +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] -docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"] -docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] -pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"] -sdist = ["setuptools-rust (>=0.11.4)"] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] +nox = ["nox"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] +sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist", "pytz"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] [[package]] name = "debugpy" @@ -549,13 +570,13 @@ test = ["pytest (>=6)"] [[package]] name = "execnet" -version = "2.0.2" +version = "2.1.1" description = "execnet: rapid multi-Python deployment" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "execnet-2.0.2-py3-none-any.whl", hash = "sha256:88256416ae766bc9e8895c76a87928c0012183da3cc4fc18016e6f050e025f41"}, - {file = "execnet-2.0.2.tar.gz", hash = "sha256:cc59bc4423742fd71ad227122eb0dd44db51efb3dc4095b45ac9a08c770096af"}, + {file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"}, + {file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"}, ] [package.extras] @@ -577,29 +598,29 @@ tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipyth [[package]] name = "flake8" -version = "6.1.0" +version = "7.1.1" description = "the modular source code checker: pep8 pyflakes and co" optional = false python-versions = ">=3.8.1" files = [ - {file = "flake8-6.1.0-py2.py3-none-any.whl", hash = "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5"}, - {file = "flake8-6.1.0.tar.gz", hash = "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23"}, + {file = "flake8-7.1.1-py2.py3-none-any.whl", hash = "sha256:597477df7860daa5aa0fdd84bf5208a043ab96b8e96ab708770ae0364dd03213"}, + {file = "flake8-7.1.1.tar.gz", hash = "sha256:049d058491e228e03e67b390f311bbf88fce2dbaa8fa673e7aea87b7198b8d38"}, ] [package.dependencies] mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.11.0,<2.12.0" -pyflakes = ">=3.1.0,<3.2.0" +pycodestyle = ">=2.12.0,<2.13.0" +pyflakes = ">=3.2.0,<3.3.0" [[package]] name = "flaky" -version = "3.7.0" -description = "Plugin for nose or pytest that automatically reruns flaky tests." +version = "3.8.1" +description = "Plugin for pytest that automatically reruns flaky tests." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.5" files = [ - {file = "flaky-3.7.0-py2.py3-none-any.whl", hash = "sha256:d6eda73cab5ae7364504b7c44670f70abed9e75f77dd116352f662817592ec9c"}, - {file = "flaky-3.7.0.tar.gz", hash = "sha256:3ad100780721a1911f57a165809b7ea265a7863305acb66708220820caf8aa0d"}, + {file = "flaky-3.8.1-py2.py3-none-any.whl", hash = "sha256:194ccf4f0d3a22b2de7130f4b62e45e977ac1b5ccad74d4d48f3005dcc38815e"}, + {file = "flaky-3.8.1.tar.gz", hash = "sha256:47204a81ec905f3d5acfbd61daeabcada8f9d4031616d9bcb0618461729699f5"}, ] [[package]] @@ -706,22 +727,22 @@ test = ["coverage", "mock (>=4)", "pytest (>=7)", "pytest-cov", "pytest-mock (>= [[package]] name = "hathorlib" -version = "0.5.2" +version = "0.6.1" description = "Hathor Network base objects library" optional = false -python-versions = ">=3.9,<4" +python-versions = "<4,>=3.9" files = [ - {file = "hathorlib-0.5.2-py3-none-any.whl", hash = "sha256:bf50853efff592a90fd10d9ef3988c62029bd728418ec88600cd00e21c075240"}, - {file = "hathorlib-0.5.2.tar.gz", hash = "sha256:565958f66cfbebdb159450855b7218ab0b3a6fdcb4f6f5ca4e8294e0c018e913"}, + {file = "hathorlib-0.6.1-py3-none-any.whl", hash = "sha256:d5c004379bf46e334161c9b9566afb5b52ab73f1ec9b037567b50ca20083531d"}, + {file = "hathorlib-0.6.1.tar.gz", hash = "sha256:a0c6be59bfd759598d15d358f77b903c3feb3eecb3a6f8249dd593063aa49ac1"}, ] [package.dependencies] base58 = ">=2.1.1,<2.2.0" -cryptography = ">=38.0.3,<38.1.0" +cryptography = ">=42.0.5,<42.1.0" pycoin = ">=0.92,<0.93" [package.extras] -client = ["aiohttp (>=3.8.3,<3.9.0)", "structlog (>=22.3.0,<22.4.0)"] +client = ["aiohttp (>=3.9.3,<3.10.0)", "structlog (>=22.3.0,<22.4.0)"] [[package]] name = "hyperlink" @@ -750,18 +771,21 @@ files = [ [[package]] name = "incremental" -version = "22.10.0" -description = "\"A small library that versions your Python projects.\"" +version = "24.7.2" +description = "A small library that versions your Python projects." optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "incremental-22.10.0-py2.py3-none-any.whl", hash = "sha256:b864a1f30885ee72c5ac2835a761b8fe8aa9c28b9395cacf27286602688d3e51"}, - {file = "incremental-22.10.0.tar.gz", hash = "sha256:912feeb5e0f7e0188e6f42241d2f450002e11bbc0937c65865045854c24c0bd0"}, + {file = "incremental-24.7.2-py3-none-any.whl", hash = "sha256:8cb2c3431530bec48ad70513931a760f446ad6c25e8333ca5d95e24b0ed7b8fe"}, + {file = "incremental-24.7.2.tar.gz", hash = "sha256:fb4f1d47ee60efe87d4f6f0ebb5f70b9760db2b2574c59c8e8912be4ebd464c9"}, ] +[package.dependencies] +setuptools = ">=61.0" +tomli = {version = "*", markers = "python_version < \"3.11\""} + [package.extras] -mypy = ["click (>=6.0)", "mypy (==0.812)", "twisted (>=16.4.0)"] -scripts = ["click (>=6.0)", "twisted (>=16.4.0)"] +scripts = ["click (>=6.0)"] [[package]] name = "iniconfig" @@ -861,23 +885,20 @@ test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.20)", "pa [[package]] name = "isort" -version = "5.12.0" +version = "5.13.2" description = "A Python utility / library to sort Python imports." optional = false python-versions = ">=3.8.0" files = [ - {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, - {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, ] [package.dependencies] -colorama = {version = ">=0.4.3", optional = true, markers = "extra == \"colors\""} +colorama = {version = ">=0.4.6", optional = true, markers = "extra == \"colors\""} [package.extras] -colors = ["colorama (>=0.4.3)"] -pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] -plugins = ["setuptools"] -requirements-deprecated-finder = ["pip-api", "pipreqs"] +colors = ["colorama (>=0.4.6)"] [[package]] name = "jedi" @@ -1061,38 +1082,38 @@ files = [ [[package]] name = "mypy" -version = "1.9.0" +version = "1.10.1" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8a67616990062232ee4c3952f41c779afac41405806042a8126fe96e098419f"}, - {file = "mypy-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d357423fa57a489e8c47b7c85dfb96698caba13d66e086b412298a1a0ea3b0ed"}, - {file = "mypy-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49c87c15aed320de9b438ae7b00c1ac91cd393c1b854c2ce538e2a72d55df150"}, - {file = "mypy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:48533cdd345c3c2e5ef48ba3b0d3880b257b423e7995dada04248725c6f77374"}, - {file = "mypy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:4d3dbd346cfec7cb98e6cbb6e0f3c23618af826316188d587d1c1bc34f0ede03"}, - {file = "mypy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:653265f9a2784db65bfca694d1edd23093ce49740b2244cde583aeb134c008f3"}, - {file = "mypy-1.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a3c007ff3ee90f69cf0a15cbcdf0995749569b86b6d2f327af01fd1b8aee9dc"}, - {file = "mypy-1.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2418488264eb41f69cc64a69a745fad4a8f86649af4b1041a4c64ee61fc61129"}, - {file = "mypy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:68edad3dc7d70f2f17ae4c6c1b9471a56138ca22722487eebacfd1eb5321d612"}, - {file = "mypy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:85ca5fcc24f0b4aeedc1d02f93707bccc04733f21d41c88334c5482219b1ccb3"}, - {file = "mypy-1.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aceb1db093b04db5cd390821464504111b8ec3e351eb85afd1433490163d60cd"}, - {file = "mypy-1.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0235391f1c6f6ce487b23b9dbd1327b4ec33bb93934aa986efe8a9563d9349e6"}, - {file = "mypy-1.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4d5ddc13421ba3e2e082a6c2d74c2ddb3979c39b582dacd53dd5d9431237185"}, - {file = "mypy-1.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:190da1ee69b427d7efa8aa0d5e5ccd67a4fb04038c380237a0d96829cb157913"}, - {file = "mypy-1.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:fe28657de3bfec596bbeef01cb219833ad9d38dd5393fc649f4b366840baefe6"}, - {file = "mypy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e54396d70be04b34f31d2edf3362c1edd023246c82f1730bbf8768c28db5361b"}, - {file = "mypy-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5e6061f44f2313b94f920e91b204ec600982961e07a17e0f6cd83371cb23f5c2"}, - {file = "mypy-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a10926e5473c5fc3da8abb04119a1f5811a236dc3a38d92015cb1e6ba4cb9e"}, - {file = "mypy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b685154e22e4e9199fc95f298661deea28aaede5ae16ccc8cbb1045e716b3e04"}, - {file = "mypy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:5d741d3fc7c4da608764073089e5f58ef6352bedc223ff58f2f038c2c4698a89"}, - {file = "mypy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:587ce887f75dd9700252a3abbc9c97bbe165a4a630597845c61279cf32dfbf02"}, - {file = "mypy-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f88566144752999351725ac623471661c9d1cd8caa0134ff98cceeea181789f4"}, - {file = "mypy-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61758fabd58ce4b0720ae1e2fea5cfd4431591d6d590b197775329264f86311d"}, - {file = "mypy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e49499be624dead83927e70c756970a0bc8240e9f769389cdf5714b0784ca6bf"}, - {file = "mypy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:571741dc4194b4f82d344b15e8837e8c5fcc462d66d076748142327626a1b6e9"}, - {file = "mypy-1.9.0-py3-none-any.whl", hash = "sha256:a260627a570559181a9ea5de61ac6297aa5af202f06fd7ab093ce74e7181e43e"}, - {file = "mypy-1.9.0.tar.gz", hash = "sha256:3cc5da0127e6a478cddd906068496a97a7618a21ce9b54bde5bf7e539c7af974"}, + {file = "mypy-1.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e36f229acfe250dc660790840916eb49726c928e8ce10fbdf90715090fe4ae02"}, + {file = "mypy-1.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:51a46974340baaa4145363b9e051812a2446cf583dfaeba124af966fa44593f7"}, + {file = "mypy-1.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:901c89c2d67bba57aaaca91ccdb659aa3a312de67f23b9dfb059727cce2e2e0a"}, + {file = "mypy-1.10.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0cd62192a4a32b77ceb31272d9e74d23cd88c8060c34d1d3622db3267679a5d9"}, + {file = "mypy-1.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:a2cbc68cb9e943ac0814c13e2452d2046c2f2b23ff0278e26599224cf164e78d"}, + {file = "mypy-1.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bd6f629b67bb43dc0d9211ee98b96d8dabc97b1ad38b9b25f5e4c4d7569a0c6a"}, + {file = "mypy-1.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a1bbb3a6f5ff319d2b9d40b4080d46cd639abe3516d5a62c070cf0114a457d84"}, + {file = "mypy-1.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8edd4e9bbbc9d7b79502eb9592cab808585516ae1bcc1446eb9122656c6066f"}, + {file = "mypy-1.10.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6166a88b15f1759f94a46fa474c7b1b05d134b1b61fca627dd7335454cc9aa6b"}, + {file = "mypy-1.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:5bb9cd11c01c8606a9d0b83ffa91d0b236a0e91bc4126d9ba9ce62906ada868e"}, + {file = "mypy-1.10.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d8681909f7b44d0b7b86e653ca152d6dff0eb5eb41694e163c6092124f8246d7"}, + {file = "mypy-1.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:378c03f53f10bbdd55ca94e46ec3ba255279706a6aacaecac52ad248f98205d3"}, + {file = "mypy-1.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bacf8f3a3d7d849f40ca6caea5c055122efe70e81480c8328ad29c55c69e93e"}, + {file = "mypy-1.10.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:701b5f71413f1e9855566a34d6e9d12624e9e0a8818a5704d74d6b0402e66c04"}, + {file = "mypy-1.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:3c4c2992f6ea46ff7fce0072642cfb62af7a2484efe69017ed8b095f7b39ef31"}, + {file = "mypy-1.10.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:604282c886497645ffb87b8f35a57ec773a4a2721161e709a4422c1636ddde5c"}, + {file = "mypy-1.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37fd87cab83f09842653f08de066ee68f1182b9b5282e4634cdb4b407266bade"}, + {file = "mypy-1.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8addf6313777dbb92e9564c5d32ec122bf2c6c39d683ea64de6a1fd98b90fe37"}, + {file = "mypy-1.10.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5cc3ca0a244eb9a5249c7c583ad9a7e881aa5d7b73c35652296ddcdb33b2b9c7"}, + {file = "mypy-1.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:1b3a2ffce52cc4dbaeee4df762f20a2905aa171ef157b82192f2e2f368eec05d"}, + {file = "mypy-1.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fe85ed6836165d52ae8b88f99527d3d1b2362e0cb90b005409b8bed90e9059b3"}, + {file = "mypy-1.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c2ae450d60d7d020d67ab440c6e3fae375809988119817214440033f26ddf7bf"}, + {file = "mypy-1.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6be84c06e6abd72f960ba9a71561c14137a583093ffcf9bbfaf5e613d63fa531"}, + {file = "mypy-1.10.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2189ff1e39db399f08205e22a797383613ce1cb0cb3b13d8bcf0170e45b96cc3"}, + {file = "mypy-1.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:97a131ee36ac37ce9581f4220311247ab6cba896b4395b9c87af0675a13a755f"}, + {file = "mypy-1.10.1-py3-none-any.whl", hash = "sha256:71d8ac0b906354ebda8ef1673e5fde785936ac1f29ff6987c7483cfbd5a4235a"}, + {file = "mypy-1.10.1.tar.gz", hash = "sha256:1f8f492d7db9e3593ef42d4f115f04e556130f2819ad33ab84551403e97dd4c0"}, ] [package.dependencies] @@ -1119,17 +1140,16 @@ files = [ [[package]] name = "mypy-zope" -version = "1.0.4" +version = "1.0.5" description = "Plugin for mypy to support zope interfaces" optional = false python-versions = "*" files = [ - {file = "mypy-zope-1.0.4.tar.gz", hash = "sha256:a9569e73ae85a65247787d98590fa6d4290e76f26aabe035d1c3e94a0b9ab6ee"}, - {file = "mypy_zope-1.0.4-py3-none-any.whl", hash = "sha256:c7298f93963a84f2b145c2b5cc98709fc2a5be4adf54bfe23fa7fdd8fd19c975"}, + {file = "mypy_zope-1.0.5.tar.gz", hash = "sha256:2440406d49c0e1199c1cd819c92a2c4957de65579c6abc8a081c927f4bdc8d49"}, ] [package.dependencies] -mypy = ">=1.0.0,<1.10.0" +mypy = ">=1.0.0,<1.11.0" "zope.interface" = "*" "zope.schema" = "*" @@ -1226,13 +1246,13 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co [[package]] name = "pluggy" -version = "1.3.0" +version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, - {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, ] [package.extras] @@ -1347,13 +1367,13 @@ pyasn1 = ">=0.4.6,<0.6.0" [[package]] name = "pycodestyle" -version = "2.11.1" +version = "2.12.1" description = "Python style guide checker" optional = false python-versions = ">=3.8" files = [ - {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, - {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, + {file = "pycodestyle-2.12.1-py2.py3-none-any.whl", hash = "sha256:46f0fb92069a7c28ab7bb558f05bfc0110dac69a0cd23c61ea0040283a9d78b3"}, + {file = "pycodestyle-2.12.1.tar.gz", hash = "sha256:6838eae08bbce4f6accd5d5572075c63626a15ee3e6f842df996bf62f6d73521"}, ] [[package]] @@ -1379,47 +1399,54 @@ files = [ [[package]] name = "pydantic" -version = "1.10.13" +version = "1.10.17" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:efff03cc7a4f29d9009d1c96ceb1e7a70a65cfe86e89d34e4a5f2ab1e5693737"}, - {file = "pydantic-1.10.13-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3ecea2b9d80e5333303eeb77e180b90e95eea8f765d08c3d278cd56b00345d01"}, - {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1740068fd8e2ef6eb27a20e5651df000978edce6da6803c2bef0bc74540f9548"}, - {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84bafe2e60b5e78bc64a2941b4c071a4b7404c5c907f5f5a99b0139781e69ed8"}, - {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bc0898c12f8e9c97f6cd44c0ed70d55749eaf783716896960b4ecce2edfd2d69"}, - {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:654db58ae399fe6434e55325a2c3e959836bd17a6f6a0b6ca8107ea0571d2e17"}, - {file = "pydantic-1.10.13-cp310-cp310-win_amd64.whl", hash = "sha256:75ac15385a3534d887a99c713aa3da88a30fbd6204a5cd0dc4dab3d770b9bd2f"}, - {file = "pydantic-1.10.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c553f6a156deb868ba38a23cf0df886c63492e9257f60a79c0fd8e7173537653"}, - {file = "pydantic-1.10.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5e08865bc6464df8c7d61439ef4439829e3ab62ab1669cddea8dd00cd74b9ffe"}, - {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e31647d85a2013d926ce60b84f9dd5300d44535a9941fe825dc349ae1f760df9"}, - {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:210ce042e8f6f7c01168b2d84d4c9eb2b009fe7bf572c2266e235edf14bacd80"}, - {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8ae5dd6b721459bfa30805f4c25880e0dd78fc5b5879f9f7a692196ddcb5a580"}, - {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f8e81fc5fb17dae698f52bdd1c4f18b6ca674d7068242b2aff075f588301bbb0"}, - {file = "pydantic-1.10.13-cp311-cp311-win_amd64.whl", hash = "sha256:61d9dce220447fb74f45e73d7ff3b530e25db30192ad8d425166d43c5deb6df0"}, - {file = "pydantic-1.10.13-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4b03e42ec20286f052490423682016fd80fda830d8e4119f8ab13ec7464c0132"}, - {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f59ef915cac80275245824e9d771ee939133be38215555e9dc90c6cb148aaeb5"}, - {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a1f9f747851338933942db7af7b6ee8268568ef2ed86c4185c6ef4402e80ba8"}, - {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:97cce3ae7341f7620a0ba5ef6cf043975cd9d2b81f3aa5f4ea37928269bc1b87"}, - {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:854223752ba81e3abf663d685f105c64150873cc6f5d0c01d3e3220bcff7d36f"}, - {file = "pydantic-1.10.13-cp37-cp37m-win_amd64.whl", hash = "sha256:b97c1fac8c49be29486df85968682b0afa77e1b809aff74b83081cc115e52f33"}, - {file = "pydantic-1.10.13-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c958d053453a1c4b1c2062b05cd42d9d5c8eb67537b8d5a7e3c3032943ecd261"}, - {file = "pydantic-1.10.13-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c5370a7edaac06daee3af1c8b1192e305bc102abcbf2a92374b5bc793818599"}, - {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d6f6e7305244bddb4414ba7094ce910560c907bdfa3501e9db1a7fd7eaea127"}, - {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3a3c792a58e1622667a2837512099eac62490cdfd63bd407993aaf200a4cf1f"}, - {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c636925f38b8db208e09d344c7aa4f29a86bb9947495dd6b6d376ad10334fb78"}, - {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:678bcf5591b63cc917100dc50ab6caebe597ac67e8c9ccb75e698f66038ea953"}, - {file = "pydantic-1.10.13-cp38-cp38-win_amd64.whl", hash = "sha256:6cf25c1a65c27923a17b3da28a0bdb99f62ee04230c931d83e888012851f4e7f"}, - {file = "pydantic-1.10.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8ef467901d7a41fa0ca6db9ae3ec0021e3f657ce2c208e98cd511f3161c762c6"}, - {file = "pydantic-1.10.13-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:968ac42970f57b8344ee08837b62f6ee6f53c33f603547a55571c954a4225691"}, - {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9849f031cf8a2f0a928fe885e5a04b08006d6d41876b8bbd2fc68a18f9f2e3fd"}, - {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56e3ff861c3b9c6857579de282ce8baabf443f42ffba355bf070770ed63e11e1"}, - {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f00790179497767aae6bcdc36355792c79e7bbb20b145ff449700eb076c5f96"}, - {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:75b297827b59bc229cac1a23a2f7a4ac0031068e5be0ce385be1462e7e17a35d"}, - {file = "pydantic-1.10.13-cp39-cp39-win_amd64.whl", hash = "sha256:e70ca129d2053fb8b728ee7d1af8e553a928d7e301a311094b8a0501adc8763d"}, - {file = "pydantic-1.10.13-py3-none-any.whl", hash = "sha256:b87326822e71bd5f313e7d3bfdc77ac3247035ac10b0c0618bd99dcf95b1e687"}, - {file = "pydantic-1.10.13.tar.gz", hash = "sha256:32c8b48dcd3b2ac4e78b0ba4af3a2c2eb6048cb75202f0ea7b34feb740efc340"}, + {file = "pydantic-1.10.17-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0fa51175313cc30097660b10eec8ca55ed08bfa07acbfe02f7a42f6c242e9a4b"}, + {file = "pydantic-1.10.17-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7e8988bb16988890c985bd2093df9dd731bfb9d5e0860db054c23034fab8f7a"}, + {file = "pydantic-1.10.17-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:371dcf1831f87c9e217e2b6a0c66842879a14873114ebb9d0861ab22e3b5bb1e"}, + {file = "pydantic-1.10.17-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4866a1579c0c3ca2c40575398a24d805d4db6cb353ee74df75ddeee3c657f9a7"}, + {file = "pydantic-1.10.17-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:543da3c6914795b37785703ffc74ba4d660418620cc273490d42c53949eeeca6"}, + {file = "pydantic-1.10.17-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7623b59876f49e61c2e283551cc3647616d2fbdc0b4d36d3d638aae8547ea681"}, + {file = "pydantic-1.10.17-cp310-cp310-win_amd64.whl", hash = "sha256:409b2b36d7d7d19cd8310b97a4ce6b1755ef8bd45b9a2ec5ec2b124db0a0d8f3"}, + {file = "pydantic-1.10.17-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fa43f362b46741df8f201bf3e7dff3569fa92069bcc7b4a740dea3602e27ab7a"}, + {file = "pydantic-1.10.17-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2a72d2a5ff86a3075ed81ca031eac86923d44bc5d42e719d585a8eb547bf0c9b"}, + {file = "pydantic-1.10.17-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4ad32aed3bf5eea5ca5decc3d1bbc3d0ec5d4fbcd72a03cdad849458decbc63"}, + {file = "pydantic-1.10.17-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb4e741782e236ee7dc1fb11ad94dc56aabaf02d21df0e79e0c21fe07c95741"}, + {file = "pydantic-1.10.17-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d2f89a719411cb234105735a520b7c077158a81e0fe1cb05a79c01fc5eb59d3c"}, + {file = "pydantic-1.10.17-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db3b48d9283d80a314f7a682f7acae8422386de659fffaba454b77a083c3937d"}, + {file = "pydantic-1.10.17-cp311-cp311-win_amd64.whl", hash = "sha256:9c803a5113cfab7bbb912f75faa4fc1e4acff43e452c82560349fff64f852e1b"}, + {file = "pydantic-1.10.17-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:820ae12a390c9cbb26bb44913c87fa2ff431a029a785642c1ff11fed0a095fcb"}, + {file = "pydantic-1.10.17-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c1e51d1af306641b7d1574d6d3307eaa10a4991542ca324f0feb134fee259815"}, + {file = "pydantic-1.10.17-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e53fb834aae96e7b0dadd6e92c66e7dd9cdf08965340ed04c16813102a47fab"}, + {file = "pydantic-1.10.17-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e2495309b1266e81d259a570dd199916ff34f7f51f1b549a0d37a6d9b17b4dc"}, + {file = "pydantic-1.10.17-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:098ad8de840c92ea586bf8efd9e2e90c6339d33ab5c1cfbb85be66e4ecf8213f"}, + {file = "pydantic-1.10.17-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:525bbef620dac93c430d5d6bdbc91bdb5521698d434adf4434a7ef6ffd5c4b7f"}, + {file = "pydantic-1.10.17-cp312-cp312-win_amd64.whl", hash = "sha256:6654028d1144df451e1da69a670083c27117d493f16cf83da81e1e50edce72ad"}, + {file = "pydantic-1.10.17-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c87cedb4680d1614f1d59d13fea353faf3afd41ba5c906a266f3f2e8c245d655"}, + {file = "pydantic-1.10.17-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11289fa895bcbc8f18704efa1d8020bb9a86314da435348f59745473eb042e6b"}, + {file = "pydantic-1.10.17-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94833612d6fd18b57c359a127cbfd932d9150c1b72fea7c86ab58c2a77edd7c7"}, + {file = "pydantic-1.10.17-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d4ecb515fa7cb0e46e163ecd9d52f9147ba57bc3633dca0e586cdb7a232db9e3"}, + {file = "pydantic-1.10.17-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7017971ffa7fd7808146880aa41b266e06c1e6e12261768a28b8b41ba55c8076"}, + {file = "pydantic-1.10.17-cp37-cp37m-win_amd64.whl", hash = "sha256:e840e6b2026920fc3f250ea8ebfdedf6ea7a25b77bf04c6576178e681942ae0f"}, + {file = "pydantic-1.10.17-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bfbb18b616abc4df70591b8c1ff1b3eabd234ddcddb86b7cac82657ab9017e33"}, + {file = "pydantic-1.10.17-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ebb249096d873593e014535ab07145498957091aa6ae92759a32d40cb9998e2e"}, + {file = "pydantic-1.10.17-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8c209af63ccd7b22fba94b9024e8b7fd07feffee0001efae50dd99316b27768"}, + {file = "pydantic-1.10.17-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4b40c9e13a0b61583e5599e7950490c700297b4a375b55b2b592774332798b7"}, + {file = "pydantic-1.10.17-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c31d281c7485223caf6474fc2b7cf21456289dbaa31401844069b77160cab9c7"}, + {file = "pydantic-1.10.17-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ae5184e99a060a5c80010a2d53c99aee76a3b0ad683d493e5f0620b5d86eeb75"}, + {file = "pydantic-1.10.17-cp38-cp38-win_amd64.whl", hash = "sha256:ad1e33dc6b9787a6f0f3fd132859aa75626528b49cc1f9e429cdacb2608ad5f0"}, + {file = "pydantic-1.10.17-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7e17c0ee7192e54a10943f245dc79e36d9fe282418ea05b886e1c666063a7b54"}, + {file = "pydantic-1.10.17-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cafb9c938f61d1b182dfc7d44a7021326547b7b9cf695db5b68ec7b590214773"}, + {file = "pydantic-1.10.17-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95ef534e3c22e5abbdbdd6f66b6ea9dac3ca3e34c5c632894f8625d13d084cbe"}, + {file = "pydantic-1.10.17-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62d96b8799ae3d782df7ec9615cb59fc32c32e1ed6afa1b231b0595f6516e8ab"}, + {file = "pydantic-1.10.17-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ab2f976336808fd5d539fdc26eb51f9aafc1f4b638e212ef6b6f05e753c8011d"}, + {file = "pydantic-1.10.17-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8ad363330557beac73159acfbeed220d5f1bfcd6b930302a987a375e02f74fd"}, + {file = "pydantic-1.10.17-cp39-cp39-win_amd64.whl", hash = "sha256:48db882e48575ce4b39659558b2f9f37c25b8d348e37a2b4e32971dd5a7d6227"}, + {file = "pydantic-1.10.17-py3-none-any.whl", hash = "sha256:e41b5b973e5c64f674b3b4720286ded184dcc26a691dd55f34391c62c6934688"}, + {file = "pydantic-1.10.17.tar.gz", hash = "sha256:f434160fb14b353caf634149baaf847206406471ba70e64657c1e8330277a991"}, ] [package.dependencies] @@ -1431,13 +1458,13 @@ email = ["email-validator (>=1.0.3)"] [[package]] name = "pyflakes" -version = "3.1.0" +version = "3.2.0" description = "passive checker of Python programs" optional = false python-versions = ">=3.8" files = [ - {file = "pyflakes-3.1.0-py2.py3-none-any.whl", hash = "sha256:4132f6d49cb4dae6819e5379898f2b8cce3c5f23994194c24b77d5da2e36f774"}, - {file = "pyflakes-3.1.0.tar.gz", hash = "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"}, + {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"}, + {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, ] [[package]] @@ -1457,31 +1484,31 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pyopenssl" -version = "22.1.0" +version = "24.2.1" description = "Python wrapper module around the OpenSSL library" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "pyOpenSSL-22.1.0-py3-none-any.whl", hash = "sha256:b28437c9773bb6c6958628cf9c3bebe585de661dba6f63df17111966363dd15e"}, - {file = "pyOpenSSL-22.1.0.tar.gz", hash = "sha256:7a83b7b272dd595222d672f5ce29aa030f1fb837630ef229f62e72e395ce8968"}, + {file = "pyOpenSSL-24.2.1-py3-none-any.whl", hash = "sha256:967d5719b12b243588573f39b0c677637145c7a1ffedcd495a487e58177fbb8d"}, + {file = "pyopenssl-24.2.1.tar.gz", hash = "sha256:4247f0dbe3748d560dcbb2ff3ea01af0f9a1a001ef5f7c4c647956ed8cbf0e95"}, ] [package.dependencies] -cryptography = ">=38.0.0,<39" +cryptography = ">=41.0.5,<44" [package.extras] -docs = ["sphinx (!=5.2.0,!=5.2.0.post0)", "sphinx-rtd-theme"] -test = ["flaky", "pretend", "pytest (>=3.0.1)"] +docs = ["sphinx (!=5.2.0,!=5.2.0.post0,!=7.2.5)", "sphinx-rtd-theme"] +test = ["pretend", "pytest (>=3.0.1)", "pytest-rerunfailures"] [[package]] name = "pytest" -version = "7.4.3" +version = "8.3.2" description = "pytest: simple powerful testing with Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, - {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, + {file = "pytest-8.3.2-py3-none-any.whl", hash = "sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5"}, + {file = "pytest-8.3.2.tar.gz", hash = "sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce"}, ] [package.dependencies] @@ -1489,21 +1516,21 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" -pluggy = ">=0.12,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} +pluggy = ">=1.5,<2" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-cov" -version = "4.1.0" +version = "5.0.0" description = "Pytest plugin for measuring coverage." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, - {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, + {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"}, + {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"}, ] [package.dependencies] @@ -1511,22 +1538,22 @@ coverage = {version = ">=5.2.1", extras = ["toml"]} pytest = ">=4.6" [package.extras] -testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] [[package]] name = "pytest-xdist" -version = "3.3.1" +version = "3.6.1" description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pytest-xdist-3.3.1.tar.gz", hash = "sha256:d5ee0520eb1b7bcca50a60a518ab7a7707992812c578198f8b44fdfac78e8c93"}, - {file = "pytest_xdist-3.3.1-py3-none-any.whl", hash = "sha256:ff9daa7793569e6a68544850fd3927cd257cc03a7ef76c95e86915355e82b5f2"}, + {file = "pytest_xdist-3.6.1-py3-none-any.whl", hash = "sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7"}, + {file = "pytest_xdist-3.6.1.tar.gz", hash = "sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d"}, ] [package.dependencies] -execnet = ">=1.1" -pytest = ">=6.2.0" +execnet = ">=2.1" +pytest = ">=7.0.0" [package.extras] psutil = ["psutil (>=3.0)"] @@ -1747,20 +1774,20 @@ cffi = {version = "*", markers = "implementation_name == \"pypy\""} [[package]] name = "requests" -version = "2.28.1" +version = "2.32.3" description = "Python HTTP for Humans." optional = false -python-versions = ">=3.7, <4" +python-versions = ">=3.8" files = [ - {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, - {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] [package.dependencies] certifi = ">=2017.4.17" -charset-normalizer = ">=2,<3" +charset-normalizer = ">=2,<4" idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<1.27" +urllib3 = ">=1.21.1,<3" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)"] @@ -2093,69 +2120,38 @@ test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0, [[package]] name = "twisted" -version = "22.10.0" +version = "24.7.0" description = "An asynchronous networking framework written in Python" optional = false -python-versions = ">=3.7.1" +python-versions = ">=3.8.0" files = [ - {file = "Twisted-22.10.0-py3-none-any.whl", hash = "sha256:86c55f712cc5ab6f6d64e02503352464f0400f66d4f079096d744080afcccbd0"}, - {file = "Twisted-22.10.0.tar.gz", hash = "sha256:32acbd40a94f5f46e7b42c109bfae2b302250945561783a8b7a059048f2d4d31"}, + {file = "twisted-24.7.0-py3-none-any.whl", hash = "sha256:734832ef98108136e222b5230075b1079dad8a3fc5637319615619a7725b0c81"}, + {file = "twisted-24.7.0.tar.gz", hash = "sha256:5a60147f044187a127ec7da96d170d49bcce50c6fd36f594e60f4587eff4d394"}, ] [package.dependencies] -attrs = ">=19.2.0" -Automat = ">=0.8.0" +attrs = ">=21.3.0" +automat = ">=0.8.0" constantly = ">=15.1" hyperlink = ">=17.1.1" -incremental = ">=21.3.0" -twisted-iocpsupport = {version = ">=1.0.2,<2", markers = "platform_system == \"Windows\""} -typing-extensions = ">=3.6.5" -"zope.interface" = ">=4.4.2" +incremental = ">=24.7.0" +typing-extensions = ">=4.2.0" +zope-interface = ">=5" [package.extras] -all-non-platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.0,<7.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)"] -conch = ["appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "cryptography (>=2.6)", "pyasn1"] -conch-nacl = ["PyNaCl", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "cryptography (>=2.6)", "pyasn1"] -contextvars = ["contextvars (>=2.4,<3)"] -dev = ["coverage (>=6b1,<7)", "pydoctor (>=22.9.0,<22.10.0)", "pyflakes (>=2.2,<3.0)", "python-subunit (>=1.4,<2.0)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "sphinx (>=5.0,<6)", "sphinx-rtd-theme (>=1.0,<2.0)", "towncrier (>=22.8,<23.0)", "twistedchecker (>=0.7,<1.0)"] -dev-release = ["pydoctor (>=22.9.0,<22.10.0)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "sphinx (>=5.0,<6)", "sphinx-rtd-theme (>=1.0,<2.0)", "towncrier (>=22.8,<23.0)"] -gtk-platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.0,<7.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pygobject", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)"] +all-non-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)"] +conch = ["appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)"] +dev = ["coverage (>=7.5,<8.0)", "cython-test-exception-raiser (>=1.0.2,<2)", "hypothesis (>=6.56)", "pydoctor (>=23.9.0,<23.10.0)", "pyflakes (>=2.2,<3.0)", "pyhamcrest (>=2)", "python-subunit (>=1.4,<2.0)", "sphinx (>=6,<7)", "sphinx-rtd-theme (>=1.3,<2.0)", "towncrier (>=23.6,<24.0)", "twistedchecker (>=0.7,<1.0)"] +dev-release = ["pydoctor (>=23.9.0,<23.10.0)", "pydoctor (>=23.9.0,<23.10.0)", "sphinx (>=6,<7)", "sphinx (>=6,<7)", "sphinx-rtd-theme (>=1.3,<2.0)", "sphinx-rtd-theme (>=1.3,<2.0)", "towncrier (>=23.6,<24.0)", "towncrier (>=23.6,<24.0)"] +gtk-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pygobject", "pygobject", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)"] http2 = ["h2 (>=3.0,<5.0)", "priority (>=1.1.0,<2.0)"] -macos-platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.0,<7.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyobjc-core", "pyobjc-framework-CFNetwork", "pyobjc-framework-Cocoa", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)"] -mypy = ["PyHamcrest (>=1.9.0)", "PyNaCl", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "coverage (>=6b1,<7)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.0,<7.0)", "idna (>=2.4)", "mypy (==0.930)", "mypy-zope (==0.3.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pydoctor (>=22.9.0,<22.10.0)", "pyflakes (>=2.2,<3.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "python-subunit (>=1.4,<2.0)", "pywin32 (!=226)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "service-identity (>=18.1.0)", "sphinx (>=5.0,<6)", "sphinx-rtd-theme (>=1.0,<2.0)", "towncrier (>=22.8,<23.0)", "twistedchecker (>=0.7,<1.0)", "types-pyOpenSSL", "types-setuptools"] -osx-platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.0,<7.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyobjc-core", "pyobjc-framework-CFNetwork", "pyobjc-framework-Cocoa", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)"] +macos-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyobjc-core", "pyobjc-core", "pyobjc-framework-cfnetwork", "pyobjc-framework-cfnetwork", "pyobjc-framework-cocoa", "pyobjc-framework-cocoa", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)"] +mypy = ["appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "coverage (>=7.5,<8.0)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "idna (>=2.4)", "mypy (>=1.8,<2.0)", "mypy-zope (>=1.0.3,<1.1.0)", "priority (>=1.1.0,<2.0)", "pydoctor (>=23.9.0,<23.10.0)", "pyflakes (>=2.2,<3.0)", "pyhamcrest (>=2)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "python-subunit (>=1.4,<2.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)", "sphinx (>=6,<7)", "sphinx-rtd-theme (>=1.3,<2.0)", "towncrier (>=23.6,<24.0)", "twistedchecker (>=0.7,<1.0)", "types-pyopenssl", "types-setuptools"] +osx-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyobjc-core", "pyobjc-core", "pyobjc-framework-cfnetwork", "pyobjc-framework-cfnetwork", "pyobjc-framework-cocoa", "pyobjc-framework-cocoa", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)"] serial = ["pyserial (>=3.0)", "pywin32 (!=226)"] -test = ["PyHamcrest (>=1.9.0)", "cython-test-exception-raiser (>=1.0.2,<2)", "hypothesis (>=6.0,<7.0)"] +test = ["cython-test-exception-raiser (>=1.0.2,<2)", "hypothesis (>=6.56)", "pyhamcrest (>=2)"] tls = ["idna (>=2.4)", "pyopenssl (>=21.0.0)", "service-identity (>=18.1.0)"] -windows-platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.0,<7.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)"] - -[[package]] -name = "twisted-iocpsupport" -version = "1.0.4" -description = "An extension for use in the twisted I/O Completion Ports reactor." -optional = false -python-versions = "*" -files = [ - {file = "twisted-iocpsupport-1.0.4.tar.gz", hash = "sha256:858096c0d15e33f15ac157f455d8f86f2f2cdd223963e58c0f682a3af8362d89"}, - {file = "twisted_iocpsupport-1.0.4-cp310-cp310-win32.whl", hash = "sha256:afa2b630797f9ed2f27f3d9f55e3f72b4244911e45a8c82756f44babbf0b243e"}, - {file = "twisted_iocpsupport-1.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:0058c963c8957bcd3deda62122e89953c9de1e867a274facc9b15dde1a9f31e8"}, - {file = "twisted_iocpsupport-1.0.4-cp311-cp311-win32.whl", hash = "sha256:196f7c7ccad4ba4d1783b1c4e1d1b22d93c04275cd780bf7498d16c77319ad6e"}, - {file = "twisted_iocpsupport-1.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:4e5f97bcbabdd79cbaa969b63439b89801ea560f11d42b0a387634275c633623"}, - {file = "twisted_iocpsupport-1.0.4-cp312-cp312-win32.whl", hash = "sha256:6081bd7c2f4fcf9b383dcdb3b3385d75a26a7c9d2be25b6950c3d8ea652d2d2d"}, - {file = "twisted_iocpsupport-1.0.4-cp312-cp312-win_amd64.whl", hash = "sha256:76f7e67cec1f1d097d1f4ed7de41be3d74546e1a4ede0c7d56e775c4dce5dfb0"}, - {file = "twisted_iocpsupport-1.0.4-cp36-cp36m-win32.whl", hash = "sha256:3d306fc4d88a6bcf61ce9d572c738b918578121bfd72891625fab314549024b5"}, - {file = "twisted_iocpsupport-1.0.4-cp36-cp36m-win_amd64.whl", hash = "sha256:391ac4d6002a80e15f35adc4ad6056f4fe1c17ceb0d1f98ba01b0f4f917adfd7"}, - {file = "twisted_iocpsupport-1.0.4-cp37-cp37m-win32.whl", hash = "sha256:0c1b5cf37f0b2d96cc3c9bc86fff16613b9f5d0ca565c96cf1f1fb8cfca4b81c"}, - {file = "twisted_iocpsupport-1.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:3c5dc11d72519e55f727320e3cee535feedfaee09c0f0765ed1ca7badff1ab3c"}, - {file = "twisted_iocpsupport-1.0.4-cp38-cp38-win32.whl", hash = "sha256:cc86c2ef598c15d824a243c2541c29459881c67fc3c0adb6efe2242f8f0ec3af"}, - {file = "twisted_iocpsupport-1.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:c27985e949b9b1a1fb4c20c71d315c10ea0f93fdf3ccdd4a8c158b5926edd8c8"}, - {file = "twisted_iocpsupport-1.0.4-cp39-cp39-win32.whl", hash = "sha256:e311dfcb470696e3c077249615893cada598e62fa7c4e4ca090167bd2b7d331f"}, - {file = "twisted_iocpsupport-1.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:4574eef1f3bb81501fb02f911298af3c02fe8179c31a33b361dd49180c3e644d"}, - {file = "twisted_iocpsupport-1.0.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:872747a3b64e2909aee59c803ccd0bceb9b75bf27915520ebd32d69687040fa2"}, - {file = "twisted_iocpsupport-1.0.4-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:c2712b778bacf1db434e3e065adfed3db300754186a29aecac1efae9ef4bcaff"}, - {file = "twisted_iocpsupport-1.0.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7c66fa0aa4236b27b3c61cb488662d85dae746a6d1c7b0d91cf7aae118445adf"}, - {file = "twisted_iocpsupport-1.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:300437af17396a945a58dcfffd77863303a8b6d9e65c6e81f1d2eed55b50d444"}, -] +windows-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)", "twisted-iocpsupport (>=1.0.2)", "twisted-iocpsupport (>=1.0.2)"] [[package]] name = "txaio" @@ -2236,13 +2232,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.8.0" +version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, - {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] [[package]] @@ -2274,13 +2270,13 @@ files = [ [[package]] name = "yamllint" -version = "1.32.0" +version = "1.35.1" description = "A linter for YAML files." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "yamllint-1.32.0-py3-none-any.whl", hash = "sha256:d97a66e48da820829d96077d76b8dfbe6c6140f106e558dae87e81ac4e6b30b7"}, - {file = "yamllint-1.32.0.tar.gz", hash = "sha256:d01dde008c65de5b235188ab3110bebc59d18e5c65fc8a58267cd211cd9df34a"}, + {file = "yamllint-1.35.1-py3-none-any.whl", hash = "sha256:2e16e504bb129ff515b37823b472750b36b6de07963bd74b307341ef5ad8bdc3"}, + {file = "yamllint-1.35.1.tar.gz", hash = "sha256:7a003809f88324fd2c877734f2d575ee7881dd9043360657cc8049c809eba6cd"}, ] [package.dependencies] @@ -2490,4 +2486,4 @@ sentry = ["sentry-sdk", "structlog-sentry"] [metadata] lock-version = "2.0" python-versions = ">=3.10,<4" -content-hash = "1eed0fc6c02c4ddb7b4a6634d6c5ba4873ce5a82c6b3d4197ca88b4644474c53" +content-hash = "cbdab9a3fa79583a3fb818013dc26a7c4dbcc78a5d832f59239334128718c37a" diff --git a/pyproject.toml b/pyproject.toml index d255c061a..ffb6b8620 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -36,15 +36,15 @@ exclude = ["tests", "tests.*"] hathor-cli = 'hathor.cli.main:main' [tool.poetry.dev-dependencies] -flake8 = "~6.1.0" -isort = {version = "~5.12.0", extras = ["colors"]} -mypy = {version = "^1.9.0", markers = "implementation_name == 'cpython'"} -mypy-zope = {version = "^1.0.4", markers = "implementation_name == 'cpython'"} -pytest = "~7.4.3" -pytest-cov = "~4.1.0" -flaky = "~3.7.0" -pytest-xdist = "~3.3.1" -yamllint = "~1.32.0" +flake8 = "~7.1.1" +isort = {version = "~5.13.2", extras = ["colors"]} +mypy = {version = "^1.10.1", markers = "implementation_name == 'cpython'"} +mypy-zope = {version = "^1.0.5", markers = "implementation_name == 'cpython'"} +pytest = "~8.3.2" +pytest-cov = "~5.0.0" +flaky = "~3.8.1" +pytest-xdist = "~3.6.1" +yamllint = "~1.35.1" # stubs: types-requests = "=2.28.11.4" types-pyopenssl = "=22.1.0.2" @@ -52,34 +52,34 @@ types-pyyaml = "=6.0.12.9" [tool.poetry.dependencies] python = ">=3.10,<4" -twisted = "~22.10.0" -autobahn = "~23.6.2" +twisted = "~24.7.0" +autobahn = "~24.4.2" base58 = "~2.1.1" colorama = "~0.4.6" configargparse = "~1.5.3" -cryptography = "~38.0.3" +cryptography = "~42.0.5" graphviz = "~0.20.1" ipython = {version = "~8.7.0", extras = ["kernel"]} mnemonic = "~0.20" prometheus_client = "~0.15.0" -pyopenssl = "=22.1.0" +pyopenssl = "=24.2.1" pycoin = "~0.92.20230326" pywin32 = {version = "306", markers = "sys_platform == 'win32'"} -requests = "=2.28.1" +requests = "=2.32.3" service_identity = "~21.1.0" pexpect = "~4.8.0" intervaltree = "~3.1.0" structlog = "~22.3.0" rocksdb = {git = "https://github.com/hathornetwork/python-rocksdb.git", markers = "sys_platform != 'win32'"} -aiohttp = "~3.9.0" +aiohttp = "~3.10.3" idna = "~3.4" -setproctitle = "^1.2.2" +setproctitle = "^1.3.3" sentry-sdk = {version = "^1.5.11", optional = true} structlog-sentry = {version = "^1.4.0", optional = true} -hathorlib = "^0.5.2" -pydantic = "~1.10.13" +hathorlib = "^0.6.1" +pydantic = "~1.10.17" pyyaml = "^6.0.1" -typing-extensions = "~4.8.0" +typing-extensions = "~4.12.2" python-healthchecklib = "^0.1.0" [tool.poetry.extras] From 25a8abda5f4422a4664febc55c82c225d5f3173a Mon Sep 17 00:00:00 2001 From: Jan Segre Date: Sat, 27 Apr 2024 00:11:21 +0200 Subject: [PATCH 09/61] chore(ci): update default Python version used on docker images --- Dockerfile | 2 +- extras/github/test_docker.py | 26 +++++++++++++++----------- 2 files changed, 16 insertions(+), 12 deletions(-) diff --git a/Dockerfile b/Dockerfile index f1e445072..f1fa13f38 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,6 +1,6 @@ # before changing these variables, make sure the tag $PYTHON-alpine$ALPINE exists first # list of valid tags hese: https://hub.docker.com/_/python -ARG PYTHON=3.10 +ARG PYTHON=3.11 ARG DEBIAN=bullseye # stage-0: copy pyproject.toml/poetry.lock and install the production set of dependencies diff --git a/extras/github/test_docker.py b/extras/github/test_docker.py index 6cfe2e2fc..a38060acc 100644 --- a/extras/github/test_docker.py +++ b/extras/github/test_docker.py @@ -3,6 +3,10 @@ from extras.github.docker import prep_base_version, prep_tags +DEFAULT_PYTHON_VERSION = '3.11' +NON_DEFAULT_PYTHON_VERSION = '3.10' + + class DockerWorkflowTest(unittest.TestCase): def setUp(self): os.environ.update({ @@ -17,7 +21,7 @@ def test_nightly_build_no_github_secret(self): 'GITHUB_EVENT_DEFAULT_BRANCH': 'master', 'GITHUB_EVENT_NUMBER': '', 'MATRIX_PYTHON_IMPL': 'python', - 'MATRIX_PYTHON_VERSION': '3.10', + 'MATRIX_PYTHON_VERSION': DEFAULT_PYTHON_VERSION, 'SECRETS_DOCKERHUB_IMAGE': '', 'SECRETS_GHCR_IMAGE': '', }) @@ -32,7 +36,7 @@ def test_nightly_build_no_github_secret(self): output = prep_tags(os.environ, base_version, is_release_candidate) self.assertEqual(output['slack-notification-version'], base_version) - self.assertEqual(output['version'], base_version + '-python3.10') + self.assertEqual(output['version'], base_version + f'-python{DEFAULT_PYTHON_VERSION}') self.assertEqual(output['login-dockerhub'], 'false') self.assertEqual(output['login-ghcr'], 'false') self.assertEqual(output['tags'], 'dont-push--local-only') @@ -47,7 +51,7 @@ def test_nightly_build(self): 'GITHUB_EVENT_DEFAULT_BRANCH': 'master', 'GITHUB_EVENT_NUMBER': '', 'MATRIX_PYTHON_IMPL': 'python', - 'MATRIX_PYTHON_VERSION': '3.10', + 'MATRIX_PYTHON_VERSION': DEFAULT_PYTHON_VERSION, 'SECRETS_DOCKERHUB_IMAGE': 'mock_image', 'SECRETS_GHCR_IMAGE': '', }) @@ -62,12 +66,12 @@ def test_nightly_build(self): output = prep_tags(os.environ, base_version, is_release_candidate) self.assertEqual(output['slack-notification-version'], base_version) - self.assertEqual(output['version'], base_version + '-python3.10') + self.assertEqual(output['version'], base_version + f'-python{DEFAULT_PYTHON_VERSION}') self.assertEqual(output['login-dockerhub'], 'true') self.assertEqual(output['login-ghcr'], 'false') self.assertEqual(len(output['tags'].split(',')), 2) self.assertIn('mock_image:nightly-55629a7d', output['tags'].split(',')) - self.assertIn('mock_image:nightly-55629a7d-python3.10', output['tags'].split(',')) + self.assertIn(f'mock_image:nightly-55629a7d-python{DEFAULT_PYTHON_VERSION}', output['tags'].split(',')) self.assertEqual(output['push'], 'true') self.assertEqual(output['dockerfile'], 'Dockerfile') @@ -80,7 +84,7 @@ def test_release_candidate_non_default_python(self): 'GITHUB_EVENT_DEFAULT_BRANCH': 'master', 'GITHUB_EVENT_NUMBER': '', 'MATRIX_PYTHON_IMPL': 'python', - 'MATRIX_PYTHON_VERSION': '3.11', + 'MATRIX_PYTHON_VERSION': NON_DEFAULT_PYTHON_VERSION, 'SECRETS_DOCKERHUB_IMAGE': 'mock_image', 'SECRETS_GHCR_IMAGE': '', }) @@ -110,7 +114,7 @@ def test_release_candidate_default_python(self): 'GITHUB_EVENT_DEFAULT_BRANCH': 'master', 'GITHUB_EVENT_NUMBER': '', 'MATRIX_PYTHON_IMPL': 'python', - 'MATRIX_PYTHON_VERSION': '3.10', + 'MATRIX_PYTHON_VERSION': DEFAULT_PYTHON_VERSION, 'SECRETS_DOCKERHUB_IMAGE': 'mock_image', 'SECRETS_GHCR_IMAGE': '', }) @@ -140,7 +144,7 @@ def test_release_default_python(self): 'GITHUB_EVENT_DEFAULT_BRANCH': 'master', 'GITHUB_EVENT_NUMBER': '', 'MATRIX_PYTHON_IMPL': 'python', - 'MATRIX_PYTHON_VERSION': '3.10', + 'MATRIX_PYTHON_VERSION': DEFAULT_PYTHON_VERSION, 'SECRETS_DOCKERHUB_IMAGE': 'mock_image', 'SECRETS_GHCR_IMAGE': '', }) @@ -155,12 +159,12 @@ def test_release_default_python(self): output = prep_tags(os.environ, base_version, is_release_candidate) self.assertEqual(output['slack-notification-version'], base_version) - self.assertEqual(output['version'], base_version + '-python3.10') + self.assertEqual(output['version'], base_version + f'-python{DEFAULT_PYTHON_VERSION}') self.assertEqual(output['login-dockerhub'], 'true') self.assertEqual(output['login-ghcr'], 'false') self.assertEqual(len(output['tags'].split(',')), 4) - self.assertIn('mock_image:v0.53-python3.10', output['tags'].split(',')) - self.assertIn('mock_image:v0.53.0-python3.10', output['tags'].split(',')) + self.assertIn(f'mock_image:v0.53-python{DEFAULT_PYTHON_VERSION}', output['tags'].split(',')) + self.assertIn(f'mock_image:v0.53.0-python{DEFAULT_PYTHON_VERSION}', output['tags'].split(',')) self.assertIn('mock_image:v0.53.0', output['tags'].split(',')) self.assertIn('mock_image:latest', output['tags'].split(',')) self.assertEqual(output['push'], 'true') From 7ccc8461cda2a2bc9223b7d64d5213e24c806e76 Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Tue, 20 Aug 2024 22:06:39 -0300 Subject: [PATCH 10/61] fix(benchmark): fix CI on master (#1117) --- .github/workflows/base_benchmarks.yml | 2 ++ .github/workflows/pr_benchmarks.yml | 10 ++-------- extras/benchmarking/.env | 7 +++++++ 3 files changed, 11 insertions(+), 8 deletions(-) create mode 100644 extras/benchmarking/.env diff --git a/.github/workflows/base_benchmarks.yml b/.github/workflows/base_benchmarks.yml index a2e59a68d..36a66037d 100644 --- a/.github/workflows/base_benchmarks.yml +++ b/.github/workflows/base_benchmarks.yml @@ -21,6 +21,8 @@ jobs: with: python: 3.11 os: ubuntu-22.04 + - name: Set env vars + run: cat ./extras/benchmarking/.env >> $GITHUB_ENV - name: Track base branch benchmarks with Bencher run: | bencher run \ diff --git a/.github/workflows/pr_benchmarks.yml b/.github/workflows/pr_benchmarks.yml index d78ae72f7..7c8415f99 100644 --- a/.github/workflows/pr_benchmarks.yml +++ b/.github/workflows/pr_benchmarks.yml @@ -13,14 +13,6 @@ jobs: permissions: pull-requests: write runs-on: ubuntu-22.04 - env: - N_BLOCKS: 20000 - CACHE_SIZE: 100000 - SERVER_DATA_DIR: server-data - TCP_PORT: 40403 - N_RUNS: 2 - BENCH_FILE: bench_results.json - BENCH_DATA_DIR: bench-data steps: - uses: actions/checkout@v4 - uses: bencherdev/bencher@main @@ -33,6 +25,8 @@ jobs: with: python: 3.11 os: ubuntu-22.04 + - name: Set env vars + run: cat ./extras/benchmarking/.env >> $GITHUB_ENV - name: Download benchmark data run: | mkdir $SERVER_DATA_DIR diff --git a/extras/benchmarking/.env b/extras/benchmarking/.env new file mode 100644 index 000000000..4f22d614a --- /dev/null +++ b/extras/benchmarking/.env @@ -0,0 +1,7 @@ +N_BLOCKS=20000 +CACHE_SIZE=100000 +SERVER_DATA_DIR=server-data +TCP_PORT=40403 +N_RUNS=2 +BENCH_FILE=bench_results.json +BENCH_DATA_DIR=bench-data From 7d3e1ecad34b2f12274ba287b872b67a1cc1e248 Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Wed, 21 Aug 2024 12:01:05 -0300 Subject: [PATCH 11/61] fix(benchmark): fix CI on master (#1121) --- .github/workflows/base_benchmarks.yml | 24 +++++++++++++++++++++++- .github/workflows/pr_benchmarks.yml | 3 +++ 2 files changed, 26 insertions(+), 1 deletion(-) diff --git a/.github/workflows/base_benchmarks.yml b/.github/workflows/base_benchmarks.yml index 36a66037d..a91d54ad6 100644 --- a/.github/workflows/base_benchmarks.yml +++ b/.github/workflows/base_benchmarks.yml @@ -4,6 +4,9 @@ on: # yamllint disable-line rule:truthy push: branches: - master +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true jobs: benchmark_base_branch: @@ -23,6 +26,25 @@ jobs: os: ubuntu-22.04 - name: Set env vars run: cat ./extras/benchmarking/.env >> $GITHUB_ENV + - name: Download benchmark data + run: | + mkdir $SERVER_DATA_DIR + poetry run hathor-cli quick_test \ + --testnet \ + --data $SERVER_DATA_DIR \ + --cache \ + --cache-size $CACHE_SIZE \ + --quit-after-n-blocks $N_BLOCKS + - name: Run server node + run: | + poetry run hathor-cli run_node \ + --testnet \ + --data $SERVER_DATA_DIR \ + --cache \ + --cache-size $CACHE_SIZE \ + --x-localhost-only \ + --listen tcp:$TCP_PORT \ + & - name: Track base branch benchmarks with Bencher run: | bencher run \ @@ -32,5 +54,5 @@ jobs: --testbed ubuntu-22.04 \ --adapter shell_hyperfine \ --err \ - --file bench_results.json \ + --file $BENCH_FILE \ './extras/benchmarking/benchmark_sync_v2.sh' diff --git a/.github/workflows/pr_benchmarks.yml b/.github/workflows/pr_benchmarks.yml index 7c8415f99..40c9b2794 100644 --- a/.github/workflows/pr_benchmarks.yml +++ b/.github/workflows/pr_benchmarks.yml @@ -4,6 +4,9 @@ on: # yamllint disable-line rule:truthy pull_request: branches: - master +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true jobs: benchmark_pr_branch: From 72ab590f8d74058bedd36cc95c3b9c211289ea67 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Abadesso?= Date: Wed, 21 Aug 2024 13:33:12 -0300 Subject: [PATCH 12/61] chore: added a flake.nix creating a devshell with all dependencies needed to run the fullnode (#1098) --- .envrc | 7 +++ .gitignore | 3 ++ flake.lock | 130 +++++++++++++++++++++++++++++++++++++++++++++++++++++ flake.nix | 40 +++++++++++++++++ 4 files changed, 180 insertions(+) create mode 100644 .envrc create mode 100644 flake.lock create mode 100644 flake.nix diff --git a/.envrc b/.envrc new file mode 100644 index 000000000..7a65628c4 --- /dev/null +++ b/.envrc @@ -0,0 +1,7 @@ +if [[ $(type -t use_flake) != function ]]; then + echo "ERROR: use_flake function missing." + echo "Please update direnv to v2.30.0 or later." + exit 1 +fi + +use flake diff --git a/.gitignore b/.gitignore index 002fa8b74..1db0c5e78 100644 --- a/.gitignore +++ b/.gitignore @@ -22,3 +22,6 @@ extras/docker/envvars /dist/ /requirements.txt *.egg-info + +# Nix +.direnv/ diff --git a/flake.lock b/flake.lock new file mode 100644 index 000000000..722d363c8 --- /dev/null +++ b/flake.lock @@ -0,0 +1,130 @@ +{ + "nodes": { + "devshell": { + "inputs": { + "flake-utils": "flake-utils", + "nixpkgs": "nixpkgs" + }, + "locked": { + "lastModified": 1717408969, + "narHash": "sha256-Q0OEFqe35fZbbRPPRdrjTUUChKVhhWXz3T9ZSKmaoVY=", + "owner": "numtide", + "repo": "devshell", + "rev": "1ebbe68d57457c8cae98145410b164b5477761f4", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "devshell", + "type": "github" + } + }, + "flake-utils": { + "inputs": { + "systems": "systems" + }, + "locked": { + "lastModified": 1701680307, + "narHash": "sha256-kAuep2h5ajznlPMD9rnQyffWG8EM/C73lejGofXvdM8=", + "owner": "numtide", + "repo": "flake-utils", + "rev": "4022d587cbbfd70fe950c1e2083a02621806a725", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "flake-utils", + "type": "github" + } + }, + "flake-utils_2": { + "inputs": { + "systems": "systems_2" + }, + "locked": { + "lastModified": 1710146030, + "narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=", + "owner": "numtide", + "repo": "flake-utils", + "rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "flake-utils", + "type": "github" + } + }, + "nixpkgs": { + "locked": { + "lastModified": 1704161960, + "narHash": "sha256-QGua89Pmq+FBAro8NriTuoO/wNaUtugt29/qqA8zeeM=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "63143ac2c9186be6d9da6035fa22620018c85932", + "type": "github" + }, + "original": { + "owner": "NixOS", + "ref": "nixpkgs-unstable", + "repo": "nixpkgs", + "type": "github" + } + }, + "nixpkgs_2": { + "locked": { + "lastModified": 1722013011, + "narHash": "sha256-to6bktzSzfcC7KfwA6+UwGqzh1lSgYGVuGrookW+PrE=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "58e9d6e92dcc6c80c01a3fcfb51a9bd230025e9d", + "type": "github" + }, + "original": { + "owner": "NixOS", + "ref": "master", + "repo": "nixpkgs", + "type": "github" + } + }, + "root": { + "inputs": { + "devshell": "devshell", + "flake-utils": "flake-utils_2", + "nixpkgs": "nixpkgs_2" + } + }, + "systems": { + "locked": { + "lastModified": 1681028828, + "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", + "owner": "nix-systems", + "repo": "default", + "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", + "type": "github" + }, + "original": { + "owner": "nix-systems", + "repo": "default", + "type": "github" + } + }, + "systems_2": { + "locked": { + "lastModified": 1681028828, + "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", + "owner": "nix-systems", + "repo": "default", + "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", + "type": "github" + }, + "original": { + "owner": "nix-systems", + "repo": "default", + "type": "github" + } + } + }, + "root": "root", + "version": 7 +} diff --git a/flake.nix b/flake.nix new file mode 100644 index 000000000..ba17e7ef4 --- /dev/null +++ b/flake.nix @@ -0,0 +1,40 @@ +{ + description = "virtual environments"; + + inputs.devshell.url = "github:numtide/devshell"; + inputs.flake-utils.url = "github:numtide/flake-utils"; + inputs.nixpkgs.url = "github:NixOS/nixpkgs/master"; + + outputs = { self, flake-utils, devshell, nixpkgs }: + + flake-utils.lib.eachDefaultSystem (system: { + devShells.default = + let + pkgs = import nixpkgs { + inherit system; + overlays = [ devshell.overlays.default ]; + }; + in + pkgs.mkShell { + buildInputs = [ + pkgs.python310 + pkgs.poetry + pkgs.rocksdb + pkgs.snappy + pkgs.openssl + pkgs.readline + pkgs.zlib + pkgs.xz + pkgs.bzip2 + pkgs.lz4 + pkgs.cmake + ]; + + shellHook = '' + export CFLAGS="-I${pkgs.rocksdb}/include" + export LDFLAGS="-L${pkgs.rocksdb}/lib" + poetry env use python3.10 + ''; + }; + }); +} From e85e4be5c825aa148f143fb8b6f4bc29cc4fd25d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Carneiro?= Date: Wed, 21 Aug 2024 22:50:00 -0300 Subject: [PATCH 13/61] chore: upgrade actions to a version that use node20 (LTS) (#1123) --- .github/actions/setup-hathor-env/action.yml | 2 +- .github/workflows/docker.yml | 16 ++++++++-------- .github/workflows/main.yml | 4 ++-- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/.github/actions/setup-hathor-env/action.yml b/.github/actions/setup-hathor-env/action.yml index 46df73176..2cc18ece9 100644 --- a/.github/actions/setup-hathor-env/action.yml +++ b/.github/actions/setup-hathor-env/action.yml @@ -13,7 +13,7 @@ runs: run: pipx install poetry - name: Set up Python ${{ inputs.python }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ inputs.python }} cache: 'poetry' diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 714565cc0..88d8898df 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -28,7 +28,7 @@ jobs: - '3.12' steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Prepare base version id: prep run: | @@ -49,28 +49,28 @@ jobs: VERSION: ${{ steps.prep.outputs.check-version }} run: make check-custom - name: Set up QEMU # arm64 is not available natively - uses: docker/setup-qemu-action@v2 + uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 with: version: latest install: true driver-opts: network=host - name: Login to DockerHub - uses: docker/login-action@v2 + uses: docker/login-action@v3 if: steps.prep.outputs.login-dockerhub == 'true' with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - name: Login to GitHub Container Registry - uses: docker/login-action@v2 + uses: docker/login-action@v3 if: steps.prep.outputs.login-ghcr == 'true' with: registry: ghcr.io username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - name: Cache Docker layers - uses: actions/cache@v3 + uses: actions/cache@v4 if: steps.prep_base_version.outputs.is-nightly == 'false' with: path: /tmp/.buildx-cache @@ -79,7 +79,7 @@ jobs: restore-keys: | ${{ runner.os }}-buildx-${{ matrix.python-impl }}${{ matrix.python-version }}-refs/heads/master- - name: Build and export to Docker - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v6 with: context: . file: ${{ steps.prep.outputs.dockerfile }} @@ -92,7 +92,7 @@ jobs: - name: Test image run: docker run --rm ${{ env.TEST_TAG }} quick_test --data / --testnet - name: Build and push - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v6 if: ${{ !env.ACT }} # Skip this step when testing locally with https://github.com/nektos/act with: context: . diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index befb9bc11..54293da10 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -62,14 +62,14 @@ jobs: matrix: ${{fromJson(needs.matrix.outputs.matrix)}} steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - uses: ./.github/actions/setup-hathor-env name: Setup Hathor node environment with: python: ${{ matrix.python }} os: ${{ matrix.os }} - name: Cache mypy - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: .mypy_cache # this key is setup such that every branch has its cache and new branches can reuse dev's cache, but not the other way around From f2c436f8c1a1b744f848c18c1c9e48a292ad9e5f Mon Sep 17 00:00:00 2001 From: Jan Segre Date: Thu, 15 Aug 2024 20:52:25 +0200 Subject: [PATCH 14/61] refactor(p2p): rename peer_id.PeerId to peer.Peer --- docs/legacy/ref/p2p.rst | 2 +- hathor/builder/builder.py | 32 ++++++------ hathor/builder/cli_builder.py | 14 ++--- hathor/cli/peer_id.py | 8 +-- hathor/cli/run_node.py | 2 +- hathor/daa.py | 2 +- hathor/manager.py | 10 ++-- hathor/p2p/factory.py | 6 +-- hathor/p2p/manager.py | 20 +++---- hathor/p2p/{peer_id.py => peer.py} | 30 +++++------ hathor/p2p/peer_storage.py | 10 ++-- hathor/p2p/protocol.py | 8 +-- hathor/p2p/states/peer_id.py | 4 +- hathor/p2p/states/ready.py | 6 +-- hathor/simulator/fake_connection.py | 4 +- hathor/simulator/simulator.py | 4 +- tests/cli/test_sysctl_init.py | 10 ++-- tests/event/event_simulation_tester.py | 10 ++-- tests/others/test_metrics.py | 6 +-- tests/p2p/netfilter/test_match.py | 14 ++--- tests/p2p/test_bootstrap.py | 6 +-- tests/p2p/test_peer_id.py | 64 +++++++++++------------ tests/p2p/test_protocol.py | 30 +++++------ tests/p2p/test_sync_v2.py | 16 +++--- tests/resources/p2p/test_add_peer.py | 4 +- tests/simulation/base.py | 2 +- tests/simulation/test_simulator_itself.py | 30 +++++------ tests/unittest.py | 43 ++++++++------- 28 files changed, 199 insertions(+), 198 deletions(-) rename hathor/p2p/{peer_id.py => peer.py} (95%) diff --git a/docs/legacy/ref/p2p.rst b/docs/legacy/ref/p2p.rst index 439c88c65..fca514871 100644 --- a/docs/legacy/ref/p2p.rst +++ b/docs/legacy/ref/p2p.rst @@ -27,7 +27,7 @@ The :py:mod:`hathor.p2p.states` has all states and messages of the p2p network. to send new messages and handle the new incoming ones. -The :py:class:`hathor.p2p.peer_id.PeerId` stores the peer's identity, entrypoint, reputation and history. +The :py:class:`hathor.p2p.peer.Peer` stores the peer's identity, entrypoint, reputation and history. diff --git a/hathor/builder/builder.py b/hathor/builder/builder.py index 455384852..8160bae17 100644 --- a/hathor/builder/builder.py +++ b/hathor/builder/builder.py @@ -35,7 +35,7 @@ from hathor.manager import HathorManager from hathor.mining.cpu_mining_service import CpuMiningService from hathor.p2p.manager import ConnectionsManager -from hathor.p2p.peer_id import PeerId +from hathor.p2p.peer import Peer from hathor.pubsub import PubSubManager from hathor.reactor import ReactorProtocol as Reactor from hathor.storage import RocksDBStorage @@ -96,7 +96,7 @@ class StorageType(Enum): class BuildArtifacts(NamedTuple): """Artifacts created by a builder.""" - peer_id: PeerId + peer: Peer settings: HathorSettingsType rng: Random reactor: Reactor @@ -137,7 +137,7 @@ def __init__(self) -> None: self._checkpoints: Optional[list[Checkpoint]] = None self._capabilities: Optional[list[str]] = None - self._peer_id: Optional[PeerId] = None + self._peer: Optional[Peer] = None self._network: Optional[str] = None self._cmdline: str = '' @@ -212,7 +212,7 @@ def build(self) -> BuildArtifacts: reactor = self._get_reactor() pubsub = self._get_or_create_pubsub() - peer_id = self._get_peer_id() + peer = self._get_peer() execution_manager = self._get_or_create_execution_manager() consensus_algorithm = self._get_or_create_consensus() @@ -256,7 +256,7 @@ def build(self) -> BuildArtifacts: pubsub=pubsub, consensus_algorithm=consensus_algorithm, daa=daa, - peer_id=peer_id, + peer=peer, tx_storage=tx_storage, p2p_manager=p2p_manager, event_manager=event_manager, @@ -264,7 +264,7 @@ def build(self) -> BuildArtifacts: rng=self._rng, checkpoints=self._checkpoints, capabilities=self._capabilities, - environment_info=get_environment_info(self._cmdline, peer_id.id), + environment_info=get_environment_info(self._cmdline, peer.id), bit_signaling_service=bit_signaling_service, verification_service=verification_service, cpu_mining_service=cpu_mining_service, @@ -284,7 +284,7 @@ def build(self) -> BuildArtifacts: stratum_factory = self._create_stratum_server(manager) self.artifacts = BuildArtifacts( - peer_id=peer_id, + peer=peer, settings=settings, rng=self._rng, reactor=reactor, @@ -337,9 +337,9 @@ def set_capabilities(self, capabilities: list[str]) -> 'Builder': self._capabilities = capabilities return self - def set_peer_id(self, peer_id: PeerId) -> 'Builder': + def set_peer(self, peer: Peer) -> 'Builder': self.check_if_can_modify() - self._peer_id = peer_id + self._peer = peer return self def _get_or_create_settings(self) -> HathorSettingsType: @@ -361,10 +361,10 @@ def _get_soft_voided_tx_ids(self) -> set[bytes]: return set(settings.SOFT_VOIDED_TX_IDS) - def _get_peer_id(self) -> PeerId: - if self._peer_id is not None: - return self._peer_id - raise ValueError('peer_id not set') + def _get_peer(self) -> Peer: + if self._peer is not None: + return self._peer + raise ValueError('peer not set') def _get_or_create_execution_manager(self) -> ExecutionManager: if self._execution_manager is None: @@ -416,7 +416,7 @@ def _get_or_create_p2p_manager(self) -> ConnectionsManager: enable_ssl = True reactor = self._get_reactor() - my_peer = self._get_peer_id() + my_peer = self._get_peer() assert self._network is not None @@ -510,12 +510,12 @@ def _get_or_create_event_storage(self) -> EventStorage: def _get_or_create_event_manager(self) -> EventManager: if self._event_manager is None: - peer_id = self._get_peer_id() + peer = self._get_peer() settings = self._get_or_create_settings() reactor = self._get_reactor() storage = self._get_or_create_event_storage() factory = EventWebsocketFactory( - peer_id=not_none(peer_id.id), + peer_id=not_none(peer.id), network=settings.NETWORK_NAME, reactor=reactor, event_storage=storage, diff --git a/hathor/builder/cli_builder.py b/hathor/builder/cli_builder.py index 3f3304f2a..54e7b8fa4 100644 --- a/hathor/builder/cli_builder.py +++ b/hathor/builder/cli_builder.py @@ -36,7 +36,7 @@ from hathor.mining.cpu_mining_service import CpuMiningService from hathor.p2p.entrypoint import Entrypoint from hathor.p2p.manager import ConnectionsManager -from hathor.p2p.peer_id import PeerId +from hathor.p2p.peer import Peer from hathor.p2p.utils import discover_hostname, get_genesis_short_hash from hathor.pubsub import PubSubManager from hathor.reactor import ReactorProtocol as Reactor @@ -98,7 +98,7 @@ def create_manager(self, reactor: Reactor) -> HathorManager: self.log = logger.new() self.reactor = reactor - peer_id = PeerId.create_from_json_path(self._args.peer) if self._args.peer else PeerId() + peer = Peer.create_from_json_path(self._args.peer) if self._args.peer else Peer() python = f'{platform.python_version()}-{platform.python_implementation()}' self.log.info( @@ -106,7 +106,7 @@ def create_manager(self, reactor: Reactor) -> HathorManager: hathor=hathor.__version__, pid=os.getpid(), genesis=get_genesis_short_hash(), - my_peer_id=str(peer_id.id), + my_peer_id=str(peer.id), python=python, platform=platform.platform(), settings=settings_source, @@ -225,7 +225,7 @@ def create_manager(self, reactor: Reactor) -> HathorManager: if self._args.x_enable_event_queue: self.event_ws_factory = EventWebsocketFactory( - peer_id=not_none(peer_id.id), + peer_id=not_none(peer.id), network=network, reactor=reactor, event_storage=event_storage @@ -307,7 +307,7 @@ def create_manager(self, reactor: Reactor) -> HathorManager: settings=settings, reactor=reactor, network=network, - my_peer=peer_id, + my_peer=peer, pubsub=pubsub, ssl=True, whitelist_only=False, @@ -348,13 +348,13 @@ def create_manager(self, reactor: Reactor) -> HathorManager: pubsub=pubsub, consensus_algorithm=consensus_algorithm, daa=daa, - peer_id=peer_id, + peer=peer, tx_storage=tx_storage, p2p_manager=p2p_manager, event_manager=event_manager, wallet=self.wallet, checkpoints=settings.CHECKPOINTS, - environment_info=get_environment_info(args=str(self._args), peer_id=peer_id.id), + environment_info=get_environment_info(args=str(self._args), peer_id=peer.id), full_verification=full_verification, enable_event_queue=self._args.x_enable_event_queue, bit_signaling_service=bit_signaling_service, diff --git a/hathor/cli/peer_id.py b/hathor/cli/peer_id.py index 3e0319ee6..45bf3a04d 100644 --- a/hathor/cli/peer_id.py +++ b/hathor/cli/peer_id.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -""" Generates a random PeerId and print it to stdout. +""" Generates a random Peer and print it to stdout. It may be used to testing purposes. """ @@ -20,9 +20,9 @@ def main() -> None: - from hathor.p2p.peer_id import PeerId + from hathor.p2p.peer import Peer - peer_id = PeerId() - data = peer_id.to_json(include_private_key=True) + peer = Peer() + data = peer.to_json(include_private_key=True) txt = json.dumps(data, indent=4) print(txt) diff --git a/hathor/cli/run_node.py b/hathor/cli/run_node.py index 399560307..a7d7be0d6 100644 --- a/hathor/cli/run_node.py +++ b/hathor/cli/run_node.py @@ -219,7 +219,7 @@ def prepare(self, *, register_resources: bool = True) -> None: from hathor.builder.builder import BuildArtifacts self.artifacts = BuildArtifacts( - peer_id=self.manager.my_peer, + peer=self.manager.my_peer, settings=settings, rng=self.manager.rng, reactor=self.manager.reactor, diff --git a/hathor/daa.py b/hathor/daa.py index afd309726..d3ae33379 100644 --- a/hathor/daa.py +++ b/hathor/daa.py @@ -47,7 +47,7 @@ class TestMode(IntFlag): class DifficultyAdjustmentAlgorithm: - # TODO: This singleton is temporary, and only used in PeerId. It should be removed from there, and then from here. + # TODO: This singleton is temporary, and only used in Peer. It should be removed from there, and then from here. singleton: ClassVar[Optional['DifficultyAdjustmentAlgorithm']] = None def __init__(self, *, settings: HathorSettings, test_mode: TestMode = TestMode.DISABLED) -> None: diff --git a/hathor/manager.py b/hathor/manager.py index 9f1210d40..0218fd663 100644 --- a/hathor/manager.py +++ b/hathor/manager.py @@ -46,7 +46,7 @@ from hathor.mining import BlockTemplate, BlockTemplates from hathor.mining.cpu_mining_service import CpuMiningService from hathor.p2p.manager import ConnectionsManager -from hathor.p2p.peer_id import PeerId +from hathor.p2p.peer import Peer from hathor.profiler import get_cpu_profiler from hathor.pubsub import HathorEvents, PubSubManager from hathor.reactor import ReactorProtocol as Reactor @@ -96,7 +96,7 @@ def __init__( pubsub: PubSubManager, consensus_algorithm: ConsensusAlgorithm, daa: DifficultyAdjustmentAlgorithm, - peer_id: PeerId, + peer: Peer, tx_storage: TransactionStorage, p2p_manager: ConnectionsManager, event_manager: EventManager, @@ -119,7 +119,7 @@ def __init__( ) -> None: """ :param reactor: Twisted reactor which handles the mainloop and the events. - :param peer_id: Id of this node. + :param peer: Peer object, with peer-id of this node. :param network: Name of the network this node participates. Usually it is either testnet or mainnet. :type network: string @@ -163,7 +163,7 @@ def __init__( # Remote address, which can be different from local address. self.remote_address = None - self.my_peer = peer_id + self.my_peer = peer self.network = network self.is_started: bool = False @@ -976,7 +976,7 @@ def on_new_tx( def has_sync_version_capability(self) -> bool: return self._settings.CAPABILITY_SYNC_VERSION in self.capabilities - def add_peer_to_whitelist(self, peer_id): + def add_peer_to_whitelist(self, peer_id: str) -> None: if not self._settings.ENABLE_PEER_WHITELIST: return diff --git a/hathor/p2p/factory.py b/hathor/p2p/factory.py index a90cf2882..521abaa09 100644 --- a/hathor/p2p/factory.py +++ b/hathor/p2p/factory.py @@ -19,7 +19,7 @@ from hathor.conf.settings import HathorSettings from hathor.p2p.manager import ConnectionsManager -from hathor.p2p.peer_id import PeerId +from hathor.p2p.peer import Peer from hathor.p2p.protocol import HathorLineReceiver if TYPE_CHECKING: @@ -39,7 +39,7 @@ class HathorServerFactory(protocol.ServerFactory): def __init__( self, network: str, - my_peer: PeerId, + my_peer: Peer, p2p_manager: ConnectionsManager, *, settings: HathorSettings, @@ -75,7 +75,7 @@ class HathorClientFactory(protocol.ClientFactory): def __init__( self, network: str, - my_peer: PeerId, + my_peer: Peer, p2p_manager: ConnectionsManager, *, settings: HathorSettings, diff --git a/hathor/p2p/manager.py b/hathor/p2p/manager.py index c0f6b58f6..4cf4e240e 100644 --- a/hathor/p2p/manager.py +++ b/hathor/p2p/manager.py @@ -27,8 +27,8 @@ from hathor.conf.settings import HathorSettings from hathor.p2p.entrypoint import Entrypoint from hathor.p2p.netfilter.factory import NetfilterFactory +from hathor.p2p.peer import Peer from hathor.p2p.peer_discovery import PeerDiscovery -from hathor.p2p.peer_id import PeerId from hathor.p2p.peer_storage import PeerStorage from hathor.p2p.protocol import HathorProtocol from hathor.p2p.rate_limiter import RateLimiter @@ -93,7 +93,7 @@ def __init__( settings: HathorSettings, reactor: Reactor, network: str, - my_peer: PeerId, + my_peer: Peer, pubsub: PubSubManager, ssl: bool, rng: Random, @@ -159,7 +159,7 @@ def __init__( self.received_peer_storage = PeerStorage() # List of known peers. - self.peer_storage = PeerStorage() # dict[string (peer.id), PeerId] + self.peer_storage = PeerStorage() # dict[string (peer.id), Peer] # Maximum unseen time before removing a peer (seconds). self.max_peer_unseen_dt: float = 30 * 60 # 30-minutes @@ -366,7 +366,7 @@ def disconnect_all_peers(self, *, force: bool = False) -> None: for conn in self.iter_all_connections(): conn.disconnect(force=force) - def on_connection_failure(self, failure: Failure, peer: Optional[PeerId], endpoint: IStreamClientEndpoint) -> None: + def on_connection_failure(self, failure: Failure, peer: Optional[Peer], endpoint: IStreamClientEndpoint) -> None: connecting_peer = self.connecting_peers[endpoint] entrypoint = connecting_peer.entrypoint self.log.warn('connection failure', entrypoint=entrypoint, failure=failure.getErrorMessage()) @@ -433,7 +433,7 @@ def on_peer_ready(self, protocol: HathorProtocol) -> None: # Notify other peers about this new peer connection. self.relay_peer_to_ready_connections(protocol.peer) - def relay_peer_to_ready_connections(self, peer: PeerId) -> None: + def relay_peer_to_ready_connections(self, peer: Peer) -> None: """Relay peer to all ready connections.""" for conn in self.iter_ready_connections(): if conn.peer == peer: @@ -491,7 +491,7 @@ def is_peer_connected(self, peer_id: str) -> bool: """ return peer_id in self.connected_peers - def on_receive_peer(self, peer: PeerId, origin: Optional[ReadyState] = None) -> None: + def on_receive_peer(self, peer: Peer, origin: Optional[ReadyState] = None) -> None: """ Update a peer information in our storage, and instantly attempt to connect to it if it is not connected yet. """ @@ -503,7 +503,7 @@ def on_receive_peer(self, peer: PeerId, origin: Optional[ReadyState] = None) -> def peers_cleanup(self) -> None: """Clean up aged peers.""" now = self.reactor.seconds() - to_be_removed: list[PeerId] = [] + to_be_removed: list[Peer] = [] for peer in self.peer_storage.values(): assert peer.id is not None if self.is_peer_connected(peer.id): @@ -574,7 +574,7 @@ def _update_whitelist_cb(self, body: bytes) -> None: for peer_id in peers_to_remove: self.manager.remove_peer_from_whitelist_and_disconnect(peer_id) - def connect_to_if_not_connected(self, peer: PeerId, now: int) -> None: + def connect_to_if_not_connected(self, peer: Peer, now: int) -> None: """ Attempts to connect if it is not connected to the peer. """ if not peer.entrypoints: @@ -594,7 +594,7 @@ def connect_to_if_not_connected(self, peer: PeerId, now: int) -> None: def _connect_to_callback( self, protocol: IProtocol, - peer: Optional[PeerId], + peer: Optional[Peer], endpoint: IStreamClientEndpoint, entrypoint: Entrypoint, ) -> None: @@ -610,7 +610,7 @@ def _connect_to_callback( def connect_to( self, entrypoint: Entrypoint, - peer: Optional[PeerId] = None, + peer: Optional[Peer] = None, use_ssl: Optional[bool] = None, ) -> None: """ Attempt to connect to a peer, even if a connection already exists. diff --git a/hathor/p2p/peer_id.py b/hathor/p2p/peer.py similarity index 95% rename from hathor/p2p/peer_id.py rename to hathor/p2p/peer.py index d2aef2634..9d40ec7b6 100644 --- a/hathor/p2p/peer_id.py +++ b/hathor/p2p/peer.py @@ -49,7 +49,7 @@ class PeerFlags(str, Enum): RETRIES_EXCEEDED = 'retries_exceeded' -class PeerId: +class Peer: """ Identify a peer, even when it is disconnected. The public_key and private_key are used to ensure that a new connection @@ -92,7 +92,7 @@ def __init__(self, auto_generate_keys: bool = True) -> None: def __str__(self): return ( - f'PeerId(id={self.id}, entrypoints={self.entrypoints_as_str()}, retry_timestamp={self.retry_timestamp}, ' + f'Peer(id={self.id}, entrypoints={self.entrypoints_as_str()}, retry_timestamp={self.retry_timestamp}, ' f'retry_interval={self.retry_interval})' ) @@ -100,8 +100,8 @@ def entrypoints_as_str(self) -> list[str]: """Return a list of entrypoints serialized as str""" return list(map(str, self.entrypoints)) - def merge(self, other: 'PeerId') -> None: - """ Merge two PeerId objects, checking that they have the same + def merge(self, other: 'Peer') -> None: + """ Merge two Peer objects, checking that they have the same id, public_key, and private_key. The entrypoints are merged without duplicating their entries. """ @@ -174,18 +174,18 @@ def verify_signature(self, signature: bytes, data: bytes) -> bool: return True @classmethod - def create_from_json_path(cls, path: str) -> 'PeerId': - """Create a new PeerId from a JSON file.""" + def create_from_json_path(cls, path: str) -> 'Peer': + """Create a new Peer from a JSON file.""" data = json.load(open(path, 'r')) - peer = PeerId.create_from_json(data) + peer = Peer.create_from_json(data) peer.source_file = path return peer @classmethod - def create_from_json(cls, data: dict[str, Any]) -> 'PeerId': - """ Create a new PeerId from JSON data. + def create_from_json(cls, data: dict[str, Any]) -> 'Peer': + """ Create a new Peer from JSON data. - It is used both to load a PeerId from disk and to create a PeerId + It is used both to load a Peer from disk and to create a Peer from a peer connection. """ obj = cls(auto_generate_keys=False) @@ -436,18 +436,18 @@ def validate_certificate(self, protocol: 'HathorProtocol') -> bool: return True def reload_entrypoints_from_source_file(self) -> None: - """Update this PeerId's entrypoints from the json file.""" + """Update this Peer's entrypoints from the json file.""" if not self.source_file: raise Exception('Trying to reload entrypoints but no peer config file was provided.') - new_peer_id = PeerId.create_from_json_path(self.source_file) + new_peer = Peer.create_from_json_path(self.source_file) - if new_peer_id.id != self.id: + if new_peer.id != self.id: self._log.error( 'Ignoring peer id file update because the peer_id does not match.', current_peer_id=self.id, - new_peer_id=new_peer_id.id, + new_peer_id=new_peer.id, ) return - self.entrypoints = new_peer_id.entrypoints + self.entrypoints = new_peer.entrypoints diff --git a/hathor/p2p/peer_storage.py b/hathor/p2p/peer_storage.py index 52131df11..6efb8a549 100644 --- a/hathor/p2p/peer_storage.py +++ b/hathor/p2p/peer_storage.py @@ -12,15 +12,15 @@ # See the License for the specific language governing permissions and # limitations under the License. -from hathor.p2p.peer_id import PeerId +from hathor.p2p.peer import Peer -class PeerStorage(dict[str, PeerId]): +class PeerStorage(dict[str, Peer]): """ PeerStorage is used to store all known peers in memory. It is a dict of peer objects, and peers can be retrieved by their `peer.id`. """ - def add(self, peer: PeerId) -> None: + def add(self, peer: Peer) -> None: """ Add a new peer to the storage. Raises a `ValueError` if the peer has already been added. @@ -30,7 +30,7 @@ def add(self, peer: PeerId) -> None: raise ValueError('Peer has already been added') self[peer.id] = peer - def add_or_merge(self, peer: PeerId) -> PeerId: + def add_or_merge(self, peer: Peer) -> Peer: """ Add a peer to the storage if it has not been added yet. Otherwise, merge the current peer with the given one. """ @@ -43,7 +43,7 @@ def add_or_merge(self, peer: PeerId) -> PeerId: current.merge(peer) return current - def remove(self, peer: PeerId) -> None: + def remove(self, peer: Peer) -> None: """ Remove a peer from the storage """ assert peer.id is not None diff --git a/hathor/p2p/protocol.py b/hathor/p2p/protocol.py index 99c63b29e..eeec521f2 100644 --- a/hathor/p2p/protocol.py +++ b/hathor/p2p/protocol.py @@ -26,7 +26,7 @@ from hathor.conf.settings import HathorSettings from hathor.p2p.entrypoint import Entrypoint from hathor.p2p.messages import ProtocolMessages -from hathor.p2p.peer_id import PeerId +from hathor.p2p.peer import Peer from hathor.p2p.rate_limiter import RateLimiter from hathor.p2p.states import BaseState, HelloState, PeerIdState, ReadyState from hathor.p2p.sync_version import SyncVersion @@ -73,12 +73,12 @@ class WarningFlags(str, Enum): NO_ENTRYPOINTS = 'no_entrypoints' network: str - my_peer: PeerId + my_peer: Peer connections: 'ConnectionsManager' node: 'HathorManager' app_version: str last_message: float - peer: Optional[PeerId] + peer: Optional[Peer] transport: Optional[ITransport] state: Optional[BaseState] connection_time: float @@ -94,7 +94,7 @@ class WarningFlags(str, Enum): def __init__( self, network: str, - my_peer: PeerId, + my_peer: Peer, p2p_manager: 'ConnectionsManager', *, settings: HathorSettings, diff --git a/hathor/p2p/states/peer_id.py b/hathor/p2p/states/peer_id.py index 8d68b669b..439ccd4b0 100644 --- a/hathor/p2p/states/peer_id.py +++ b/hathor/p2p/states/peer_id.py @@ -18,7 +18,7 @@ from hathor.conf.settings import HathorSettings from hathor.p2p.messages import ProtocolMessages -from hathor.p2p.peer_id import PeerId +from hathor.p2p.peer import Peer from hathor.p2p.states.base import BaseState from hathor.util import json_dumps, json_loads @@ -87,7 +87,7 @@ async def handle_peer_id(self, payload: str) -> None: data = json_loads(payload) - peer = PeerId.create_from_json(data) + peer = Peer.create_from_json(data) peer.validate() assert peer.id is not None diff --git a/hathor/p2p/states/ready.py b/hathor/p2p/states/ready.py index 35802b877..c04d97b4c 100644 --- a/hathor/p2p/states/ready.py +++ b/hathor/p2p/states/ready.py @@ -21,7 +21,7 @@ from hathor.conf.settings import HathorSettings from hathor.indexes.height_index import HeightInfo from hathor.p2p.messages import ProtocolMessages -from hathor.p2p.peer_id import PeerId +from hathor.p2p.peer import Peer from hathor.p2p.states.base import BaseState from hathor.p2p.sync_agent import SyncAgent from hathor.p2p.utils import to_height_info, to_serializable_best_blockchain @@ -158,7 +158,7 @@ def handle_get_peers(self, payload: str) -> None: for peer in self.protocol.connections.peer_storage.values(): self.send_peers([peer]) - def send_peers(self, peer_list: Iterable['PeerId']) -> None: + def send_peers(self, peer_list: Iterable['Peer']) -> None: """ Send a PEERS command with a list of peers. """ data = [] @@ -177,7 +177,7 @@ def handle_peers(self, payload: str) -> None: """ received_peers = json_loads(payload) for data in received_peers: - peer = PeerId.create_from_json(data) + peer = Peer.create_from_json(data) peer.validate() if self.protocol.connections: self.protocol.connections.on_receive_peer(peer, origin=self) diff --git a/hathor/simulator/fake_connection.py b/hathor/simulator/fake_connection.py index a2170d233..4473813e8 100644 --- a/hathor/simulator/fake_connection.py +++ b/hathor/simulator/fake_connection.py @@ -22,13 +22,13 @@ if TYPE_CHECKING: from hathor.manager import HathorManager - from hathor.p2p.peer_id import PeerId + from hathor.p2p.peer import Peer logger = get_logger() class HathorStringTransport(StringTransport): - def __init__(self, peer: 'PeerId'): + def __init__(self, peer: 'Peer'): super().__init__() self.peer = peer diff --git a/hathor/simulator/simulator.py b/hathor/simulator/simulator.py index 5eb4e20e0..c1d0754f4 100644 --- a/hathor/simulator/simulator.py +++ b/hathor/simulator/simulator.py @@ -26,7 +26,7 @@ from hathor.daa import DifficultyAdjustmentAlgorithm from hathor.feature_activation.feature_service import FeatureService from hathor.manager import HathorManager -from hathor.p2p.peer_id import PeerId +from hathor.p2p.peer import Peer from hathor.simulator.clock import HeapClock, MemoryReactorHeapClock from hathor.simulator.miner.geometric_miner import GeometricMiner from hathor.simulator.patches import SimulatorCpuMiningService, SimulatorVertexVerifier @@ -81,7 +81,7 @@ def get_default_builder(self) -> Builder: """ return Builder() \ .set_network(self._network) \ - .set_peer_id(PeerId()) \ + .set_peer(Peer()) \ .set_soft_voided_tx_ids(set()) \ .enable_full_verification() \ .enable_sync_v1() \ diff --git a/tests/cli/test_sysctl_init.py b/tests/cli/test_sysctl_init.py index a696e2008..b71da9d1e 100644 --- a/tests/cli/test_sysctl_init.py +++ b/tests/cli/test_sysctl_init.py @@ -156,13 +156,15 @@ def register_signal_handlers(self) -> None: expected_sysctl_dict['p2p.sync_update_interval']) # assert always_enabled_sync when it is set with a file + peer_1 = '0e2bd0d8cd1fb6d040801c32ec27e8986ce85eb8810b6c878dcad15bce3b5b1e' + peer_2 = '2ff0d2c80c50f724de79f132a2f8cae576c64b57ea531d400577adf7db3e7c15' expected_sysctl_dict = { - 'p2p.always_enable_sync': ['peer-3', 'peer-4'], + 'p2p.always_enable_sync': [peer_1, peer_2], } file_content = [ - 'peer-3', - 'peer-4', + peer_1, + peer_2, ] # set the always_enabled_sync peers file @@ -195,6 +197,6 @@ def register_signal_handlers(self) -> None: self.assertTrue(run_node is not None) conn = run_node.manager.connections - curr_always_enabled_sync = list(conn.always_enable_sync) + curr_always_enabled_sync = list(map(str, conn.always_enable_sync)) self.assertTrue( set(curr_always_enabled_sync).issuperset(set(expected_sysctl_dict['p2p.always_enable_sync']))) diff --git a/tests/event/event_simulation_tester.py b/tests/event/event_simulation_tester.py index 3e2bc4659..5541c6397 100644 --- a/tests/event/event_simulation_tester.py +++ b/tests/event/event_simulation_tester.py @@ -23,7 +23,7 @@ from hathor.event.websocket import EventWebsocketProtocol from hathor.event.websocket.request import Request from hathor.event.websocket.response import EventResponse, InvalidRequestResponse -from hathor.p2p.peer_id import PeerId +from hathor.p2p.peer import Peer from hathor.transaction.util import unpack, unpack_len from hathor.util import json_loadb from tests.simulation.base import SimulatorTestCase @@ -34,14 +34,14 @@ class BaseEventSimulationTester(SimulatorTestCase): builder: Builder def _create_artifacts(self) -> None: - peer_id = PeerId() - builder = self.builder.set_peer_id(peer_id) \ + peer = Peer() + builder = self.builder.set_peer(peer) \ .disable_full_verification() \ .enable_event_queue() artifacts = self.simulator.create_artifacts(builder) - assert peer_id.id is not None - self.peer_id: str = peer_id.id + assert peer.id is not None + self.peer_id: str = peer.id self.manager = artifacts.manager self.manager.allow_mining_without_peers() self.settings = artifacts.settings diff --git a/tests/others/test_metrics.py b/tests/others/test_metrics.py index 6573c43a2..99c410c50 100644 --- a/tests/others/test_metrics.py +++ b/tests/others/test_metrics.py @@ -5,7 +5,7 @@ from hathor.p2p.entrypoint import Entrypoint from hathor.p2p.manager import PeerConnectionsMetrics -from hathor.p2p.peer_id import PeerId +from hathor.p2p.peer import Peer from hathor.p2p.protocol import HathorProtocol from hathor.pubsub import HathorEvents from hathor.simulator.utils import add_new_blocks @@ -61,7 +61,7 @@ def test_connections_manager_integration(self): wallet.unlock(b'teste') manager = self.create_peer('testnet', tx_storage=tx_storage, wallet=wallet) - manager.connections.peer_storage.update({"1": PeerId(), "2": PeerId(), "3": PeerId()}) + manager.connections.peer_storage.update({"1": Peer(), "2": Peer(), "3": Peer()}) manager.connections.connected_peers.update({"1": Mock(), "2": Mock()}) manager.connections.handshaking_peers.update({Mock()}) @@ -223,7 +223,7 @@ def build_hathor_protocol(): inbound=False, settings=self._settings ) - protocol.peer = PeerId() + protocol.peer = Peer() return protocol diff --git a/tests/p2p/netfilter/test_match.py b/tests/p2p/netfilter/test_match.py index 39bb844fe..b8debf58f 100644 --- a/tests/p2p/netfilter/test_match.py +++ b/tests/p2p/netfilter/test_match.py @@ -11,7 +11,7 @@ NetfilterMatchOr, NetfilterMatchPeerId, ) -from hathor.p2p.peer_id import PeerId +from hathor.p2p.peer import Peer from hathor.simulator import FakeConnection from tests import unittest @@ -202,15 +202,15 @@ class BaseNetfilterMatchTest(unittest.TestCase): def test_match_peer_id(self) -> None: network = 'testnet' - peer_id1 = PeerId() - peer_id2 = PeerId() - manager1 = self.create_peer(network, peer_id=peer_id1) - manager2 = self.create_peer(network, peer_id=peer_id2) + peer1 = Peer() + peer2 = Peer() + manager1 = self.create_peer(network, peer=peer1) + manager2 = self.create_peer(network, peer=peer2) conn = FakeConnection(manager1, manager2) self.assertTrue(conn.proto2.is_state(conn.proto2.PeerState.HELLO)) - matcher = NetfilterMatchPeerId(str(peer_id1.id)) + matcher = NetfilterMatchPeerId(str(peer1.id)) context = NetfilterContext(protocol=conn.proto2) self.assertFalse(matcher.match(context)) @@ -231,7 +231,7 @@ def test_match_peer_id(self) -> None: # Guarantee the to_json is working fine json = matcher.to_json() self.assertEqual(json['type'], 'NetfilterMatchPeerId') - self.assertEqual(json['match_params']['peer_id'], str(peer_id1.id)) + self.assertEqual(json['match_params']['peer_id'], str(peer1.id)) class SyncV1NetfilterMatchTest(unittest.SyncV1Params, BaseNetfilterMatchTest): diff --git a/tests/p2p/test_bootstrap.py b/tests/p2p/test_bootstrap.py index b6a851a5c..721a1a1e2 100644 --- a/tests/p2p/test_bootstrap.py +++ b/tests/p2p/test_bootstrap.py @@ -6,9 +6,9 @@ from hathor.p2p.entrypoint import Entrypoint, Protocol from hathor.p2p.manager import ConnectionsManager +from hathor.p2p.peer import Peer from hathor.p2p.peer_discovery import DNSPeerDiscovery, PeerDiscovery from hathor.p2p.peer_discovery.dns import LookupResult -from hathor.p2p.peer_id import PeerId from hathor.pubsub import PubSubManager from tests import unittest from tests.test_memory_reactor_clock import TestMemoryReactorClock @@ -48,7 +48,7 @@ def do_lookup_text(self, host: str) -> Deferred[LookupResult]: class BootstrapTestCase(unittest.TestCase): def test_mock_discovery(self) -> None: pubsub = PubSubManager(self.clock) - connections = ConnectionsManager(self._settings, self.clock, 'testnet', PeerId(), pubsub, True, self.rng, True) + connections = ConnectionsManager(self._settings, self.clock, 'testnet', Peer(), pubsub, True, self.rng, True) host_ports1 = [ ('foobar', 1234), ('127.0.0.99', 9999), @@ -71,7 +71,7 @@ def test_mock_discovery(self) -> None: def test_dns_discovery(self) -> None: pubsub = PubSubManager(self.clock) - connections = ConnectionsManager(self._settings, self.clock, 'testnet', PeerId(), pubsub, True, self.rng, True) + connections = ConnectionsManager(self._settings, self.clock, 'testnet', Peer(), pubsub, True, self.rng, True) bootstrap_a = [ '127.0.0.99', '127.0.0.88', diff --git a/tests/p2p/test_peer_id.py b/tests/p2p/test_peer_id.py index aec32921a..b947f8337 100644 --- a/tests/p2p/test_peer_id.py +++ b/tests/p2p/test_peer_id.py @@ -7,7 +7,7 @@ from twisted.internet.interfaces import ITransport from hathor.p2p.entrypoint import Entrypoint -from hathor.p2p.peer_id import InvalidPeerIdException, PeerId +from hathor.p2p.peer import InvalidPeerIdException, Peer from hathor.p2p.peer_storage import PeerStorage from hathor.util import not_none from tests import unittest @@ -16,41 +16,41 @@ class PeerIdTest(unittest.TestCase): def test_invalid_id(self) -> None: - p1 = PeerId() + p1 = Peer() p1.id = not_none(p1.id)[::-1] self.assertRaises(InvalidPeerIdException, p1.validate) def test_invalid_public_key(self) -> None: - p1 = PeerId() - p2 = PeerId() + p1 = Peer() + p2 = Peer() p1.public_key = p2.public_key self.assertRaises(InvalidPeerIdException, p1.validate) def test_invalid_private_key(self) -> None: - p1 = PeerId() - p2 = PeerId() + p1 = Peer() + p2 = Peer() p1.private_key = p2.private_key self.assertRaises(InvalidPeerIdException, p1.validate) def test_no_private_key(self) -> None: - p1 = PeerId() + p1 = Peer() p1.private_key = None p1.validate() def test_create_from_json(self) -> None: - p1 = PeerId() + p1 = Peer() data1 = p1.to_json(include_private_key=True) - p2 = PeerId.create_from_json(data1) + p2 = Peer.create_from_json(data1) data2 = p2.to_json(include_private_key=True) self.assertEqual(data1, data2) p2.validate() def test_create_from_json_without_private_key(self) -> None: - p1 = PeerId() + p1 = Peer() data1 = p1.to_json() # Just to test a part of the code del data1['entrypoints'] - p2 = PeerId.create_from_json(data1) + p2 = Peer.create_from_json(data1) data2 = p2.to_json() self.assertEqual(data2['entrypoints'], []) data1['entrypoints'] = [] @@ -59,13 +59,13 @@ def test_create_from_json_without_private_key(self) -> None: def test_sign_verify(self) -> None: data = b'abacate' - p1 = PeerId() + p1 = Peer() signature = p1.sign(data) self.assertTrue(p1.verify_signature(signature, data)) def test_sign_verify_fail(self) -> None: data = b'abacate' - p1 = PeerId() + p1 = Peer() signature = p1.sign(data) signature = signature[::-1] self.assertFalse(p1.verify_signature(signature, data)) @@ -74,8 +74,8 @@ def test_merge_peer(self) -> None: # Testing peer storage with merge of peers peer_storage = PeerStorage() - p1 = PeerId() - p2 = PeerId() + p1 = Peer() + p2 = Peer() p2.id = p1.id p2.public_key = p1.public_key p1.public_key = None @@ -95,12 +95,12 @@ def test_merge_peer(self) -> None: ep2 = Entrypoint.parse('tcp://127.0.0.1:1002') ep3 = Entrypoint.parse('tcp://127.0.0.1:1003') - p3 = PeerId() + p3 = Peer() p3.entrypoints.append(ep1) p3.entrypoints.append(ep2) p3.public_key = None - p4 = PeerId() + p4 = Peer() p4.public_key = None p4.private_key = None p4.id = p3.id @@ -124,7 +124,7 @@ def test_merge_peer(self) -> None: def test_save_peer_file(self) -> None: import json - p = PeerId() + p = Peer() tmpdir = tempfile.mkdtemp() path = os.path.join(tmpdir, 'peer.json') p.save_to_file(path) @@ -138,7 +138,7 @@ def test_save_peer_file(self) -> None: shutil.rmtree(tmpdir) def test_retry_connection(self) -> None: - p = PeerId() + p = Peer() interval = p.retry_interval p.increment_retry_attempt(0) self.assertEqual(self._settings.PEER_CONNECTION_RETRY_INTERVAL_MULTIPLIER*interval, p.retry_interval) @@ -159,7 +159,7 @@ def test_validate_certificate(self) -> None: artifacts = builder.build() protocol = artifacts.p2p_manager.server_factory.buildProtocol(Mock()) - peer = PeerId() + peer = Peer() from OpenSSL import crypto @@ -167,14 +167,14 @@ class FakeTransport: def getPeerCertificate(self) -> crypto.X509: # we use a new peer here just to save the trouble of manually creating a certificate - random_peer = PeerId() + random_peer = Peer() return crypto.X509.from_cryptography(random_peer.get_certificate()) protocol.transport = cast(ITransport, FakeTransport()) result = peer.validate_certificate(protocol) self.assertFalse(result) def test_retry_logic(self) -> None: - peer = PeerId() + peer = Peer() self.assertTrue(peer.can_retry(0)) retry_interval = peer.retry_interval @@ -220,26 +220,26 @@ class BasePeerIdTest(unittest.TestCase): async def test_validate_entrypoint(self) -> None: manager = self.create_peer('testnet', unlock_wallet=False) - peer_id = manager.my_peer - peer_id.entrypoints = [Entrypoint.parse('tcp://127.0.0.1:40403')] + peer = manager.my_peer + peer.entrypoints = [Entrypoint.parse('tcp://127.0.0.1:40403')] # we consider that we are starting the connection to the peer protocol = manager.connections.client_factory.buildProtocol('127.0.0.1') protocol.entrypoint = Entrypoint.parse('tcp://127.0.0.1:40403') - result = await peer_id.validate_entrypoint(protocol) + result = await peer.validate_entrypoint(protocol) self.assertTrue(result) # if entrypoint is an URI - peer_id.entrypoints = [Entrypoint.parse('tcp://uri_name:40403')] - result = await peer_id.validate_entrypoint(protocol) + peer.entrypoints = [Entrypoint.parse('tcp://uri_name:40403')] + result = await peer.validate_entrypoint(protocol) self.assertTrue(result) # test invalid. DNS in test mode will resolve to '127.0.0.1:40403' protocol.entrypoint = Entrypoint.parse('tcp://45.45.45.45:40403') - result = await peer_id.validate_entrypoint(protocol) + result = await peer.validate_entrypoint(protocol) self.assertFalse(result) # now test when receiving the connection - i.e. the peer starts it protocol.entrypoint = None - peer_id.entrypoints = [Entrypoint.parse('tcp://127.0.0.1:40403')] + peer.entrypoints = [Entrypoint.parse('tcp://127.0.0.1:40403')] from collections import namedtuple Peer = namedtuple('Peer', 'host') @@ -248,11 +248,11 @@ class FakeTransport: def getPeer(self) -> Peer: return Peer(host='127.0.0.1') protocol.transport = FakeTransport() - result = await peer_id.validate_entrypoint(protocol) + result = await peer.validate_entrypoint(protocol) self.assertTrue(result) # if entrypoint is an URI - peer_id.entrypoints = [Entrypoint.parse('tcp://uri_name:40403')] - result = await peer_id.validate_entrypoint(protocol) + peer.entrypoints = [Entrypoint.parse('tcp://uri_name:40403')] + result = await peer.validate_entrypoint(protocol) self.assertTrue(result) diff --git a/tests/p2p/test_protocol.py b/tests/p2p/test_protocol.py index 317675c81..7187c7149 100644 --- a/tests/p2p/test_protocol.py +++ b/tests/p2p/test_protocol.py @@ -6,7 +6,7 @@ from twisted.python.failure import Failure from hathor.p2p.entrypoint import Entrypoint -from hathor.p2p.peer_id import PeerId +from hathor.p2p.peer import Peer from hathor.p2p.protocol import HathorLineReceiver, HathorProtocol from hathor.simulator import FakeConnection from hathor.util import json_dumps, json_loadb @@ -19,10 +19,10 @@ class BaseHathorProtocolTestCase(unittest.TestCase): def setUp(self) -> None: super().setUp() self.network = 'testnet' - self.peer_id1 = PeerId() - self.peer_id2 = PeerId() - self.manager1 = self.create_peer(self.network, peer_id=self.peer_id1) - self.manager2 = self.create_peer(self.network, peer_id=self.peer_id2) + self.peer1 = Peer() + self.peer2 = Peer() + self.manager1 = self.create_peer(self.network, peer=self.peer1) + self.manager2 = self.create_peer(self.network, peer=self.peer2) self.conn = FakeConnection(self.manager1, self.manager2) def assertAndStepConn(self, conn: FakeConnection, regex1: bytes, regex2: Optional[bytes] = None) -> None: @@ -70,11 +70,11 @@ def _check_cmd_and_value(self, result: bytes, expected: tuple[bytes, bytes]) -> def test_on_connect(self) -> None: self._check_result_only_cmd(self.conn.peek_tr1_value(), b'HELLO') - def test_peer_id_with_entrypoint(self) -> None: + def test_peer_with_entrypoint(self) -> None: entrypoint_str = 'tcp://192.168.1.1:54321' entrypoint = Entrypoint.parse(entrypoint_str) - self.peer_id1.entrypoints.append(entrypoint) - self.peer_id2.entrypoints.append(entrypoint) + self.peer1.entrypoints.append(entrypoint) + self.peer2.entrypoints.append(entrypoint) self.conn.run_one_step() # HELLO msg1 = self.conn.peek_tr1_value() @@ -196,7 +196,7 @@ def test_valid_hello(self) -> None: self.assertFalse(self.conn.tr2.disconnecting) def test_invalid_same_peer_id(self) -> None: - manager3 = self.create_peer(self.network, peer_id=self.peer_id1) + manager3 = self.create_peer(self.network, peer=self.peer1) conn = FakeConnection(self.manager1, manager3) conn.run_one_step() # HELLO conn.run_one_step() # PEER-ID @@ -213,7 +213,7 @@ def test_invalid_same_peer_id2(self) -> None: # runs the main loop. self.conn.disable_idle_timeout() # Create new peer and disable idle timeout. - manager3 = self.create_peer(self.network, peer_id=self.peer_id2) + manager3 = self.create_peer(self.network, peer=self.peer2) conn = FakeConnection(manager3, self.manager1) # Disable idle timeout. conn.disable_idle_timeout() @@ -287,26 +287,26 @@ def test_on_disconnect_after_hello(self) -> None: self.conn.disconnect(Failure(Exception('testing'))) self.assertNotIn(self.conn.proto1, self.manager1.connections.handshaking_peers) - def test_on_disconnect_after_peer_id(self) -> None: + def test_on_disconnect_after_peer(self) -> None: self.conn.run_one_step() # HELLO self.assertIn(self.conn.proto1, self.manager1.connections.handshaking_peers) # No peer id in the peer_storage (known_peers) - self.assertNotIn(self.peer_id2.id, self.manager1.connections.peer_storage) + self.assertNotIn(self.peer2.id, self.manager1.connections.peer_storage) # The peer READY now depends on a message exchange from both peers, so we need one more step self.conn.run_one_step() # PEER-ID self.conn.run_one_step() # READY self.assertIn(self.conn.proto1, self.manager1.connections.connected_peers.values()) # Peer id 2 in the peer_storage (known_peers) after connection - self.assertIn(self.peer_id2.id, self.manager1.connections.peer_storage) + self.assertIn(self.peer2.id, self.manager1.connections.peer_storage) self.assertNotIn(self.conn.proto1, self.manager1.connections.handshaking_peers) self.conn.disconnect(Failure(Exception('testing'))) # Peer id 2 in the peer_storage (known_peers) after disconnection but before looping call - self.assertIn(self.peer_id2.id, self.manager1.connections.peer_storage) + self.assertIn(self.peer2.id, self.manager1.connections.peer_storage) self.assertNotIn(self.conn.proto1, self.manager1.connections.connected_peers.values()) self.clock.advance(10) # Peer id 2 removed from peer_storage (known_peers) after disconnection and after looping call - self.assertNotIn(self.peer_id2.id, self.manager1.connections.peer_storage) + self.assertNotIn(self.peer2.id, self.manager1.connections.peer_storage) def test_idle_connection(self) -> None: self.clock.advance(self._settings.PEER_IDLE_TIMEOUT - 10) diff --git a/tests/p2p/test_sync_v2.py b/tests/p2p/test_sync_v2.py index 06e35ebdf..3e8af4f8b 100644 --- a/tests/p2p/test_sync_v2.py +++ b/tests/p2p/test_sync_v2.py @@ -7,7 +7,7 @@ from twisted.python.failure import Failure from hathor.p2p.messages import ProtocolMessages -from hathor.p2p.peer_id import PeerId +from hathor.p2p.peer import Peer from hathor.p2p.states import ReadyState from hathor.p2p.sync_v2.agent import NodeBlockSync, _HeightInfo from hathor.simulator import FakeConnection @@ -66,9 +66,9 @@ def _run_restart_test(self, *, full_verification: bool, use_tx_storage_cache: bo # Create a new peer and run sync for a while (but stop before getting synced). path = self.mkdtemp() - peer_id = PeerId() + peer = Peer() builder2 = self.simulator.get_default_builder() \ - .set_peer_id(peer_id) \ + .set_peer(peer) \ .disable_sync_v1() \ .enable_sync_v2() \ .use_rocksdb(path) @@ -107,7 +107,7 @@ def _run_restart_test(self, *, full_verification: bool, use_tx_storage_cache: bo # Restart full node using the same db. builder3 = self.simulator.get_default_builder() \ - .set_peer_id(peer_id) \ + .set_peer(peer) \ .disable_sync_v1() \ .enable_sync_v2() \ .use_rocksdb(path) @@ -220,9 +220,9 @@ def test_exceeds_streaming_and_mempool_limits(self) -> None: print() # Create a new peer and run sync for a while (but stop before getting synced). - peer_id = PeerId() + peer = Peer() builder2 = self.simulator.get_default_builder() \ - .set_peer_id(peer_id) \ + .set_peer(peer) \ .disable_sync_v1() \ .enable_sync_v2() \ @@ -311,9 +311,9 @@ def custom_gen_new_tx(manager: HathorManager, _address: str, value: int) -> Tran self.assertGreater(mempool_tips_count, 30) # Create a new peer and run sync for a while (but stop before getting synced). - peer_id = PeerId() + peer = Peer() builder2 = self.simulator.get_default_builder() \ - .set_peer_id(peer_id) \ + .set_peer(peer) \ .disable_sync_v1() \ .enable_sync_v2() \ diff --git a/tests/resources/p2p/test_add_peer.py b/tests/resources/p2p/test_add_peer.py index c22598b8a..79bb9fa5d 100644 --- a/tests/resources/p2p/test_add_peer.py +++ b/tests/resources/p2p/test_add_peer.py @@ -1,7 +1,7 @@ from twisted.internet.defer import inlineCallbacks from hathor.p2p.entrypoint import Entrypoint -from hathor.p2p.peer_id import PeerId +from hathor.p2p.peer import Peer from hathor.p2p.resources import AddPeersResource from tests import unittest from tests.resources.base_resource import StubSite, _BaseResourceTest @@ -21,7 +21,7 @@ def test_connecting_peers(self): self.assertTrue(data['success']) # test when we send a peer we're already connected to - peer = PeerId() + peer = Peer() peer.entrypoints = [Entrypoint.parse('tcp://localhost:8006')] self.manager.connections.peer_storage.add(peer) response = yield self.web.post('p2p/peers', ['tcp://localhost:8006', 'tcp://localhost:8007']) diff --git a/tests/simulation/base.py b/tests/simulation/base.py index 1811dd873..e3de5b68e 100644 --- a/tests/simulation/base.py +++ b/tests/simulation/base.py @@ -48,7 +48,7 @@ def create_peer( # type: ignore[override] simulator = self.simulator builder = simulator.get_default_builder() \ - .set_peer_id(self.get_random_peer_id_from_pool(rng=simulator.rng)) \ + .set_peer(self.get_random_peer_from_pool(rng=simulator.rng)) \ .set_soft_voided_tx_ids(soft_voided_tx_ids) \ .set_sync_v1_support(sync_v1_support) \ .set_sync_v2_support(sync_v2_support) diff --git a/tests/simulation/test_simulator_itself.py b/tests/simulation/test_simulator_itself.py index 22b1c311b..146f327ce 100644 --- a/tests/simulation/test_simulator_itself.py +++ b/tests/simulation/test_simulator_itself.py @@ -2,7 +2,7 @@ from hathor.builder import SyncSupportLevel from hathor.manager import HathorManager -from hathor.p2p.peer_id import PeerId +from hathor.p2p.peer import Peer from hathor.simulator import FakeConnection, Simulator from tests import unittest @@ -42,7 +42,7 @@ def tearDown(self) -> None: def create_simulator_peer( self, simulator: Simulator, - peer_id_pool: list[PeerId], + peer_pool: list[Peer], enable_sync_v1: bool | None = None, enable_sync_v2: bool | None = None ) -> HathorManager: @@ -59,7 +59,7 @@ def create_simulator_peer( sync_v2_support = SyncSupportLevel.ENABLED if enable_sync_v2 else SyncSupportLevel.DISABLED builder = simulator.get_default_builder() \ - .set_peer_id(self.get_random_peer_id_from_pool()) \ + .set_peer(self.get_random_peer_from_pool()) \ .set_sync_v1_support(sync_v1_support) \ .set_sync_v2_support(sync_v2_support) @@ -74,9 +74,9 @@ def _simulate_run(self, run_i: int, simulator: Simulator) -> list[HathorManager] nodes = [] miners = [] tx_generators = [] - peer_id_pool = self.new_peer_id_pool() + peer_pool = self.new_peer_pool() - manager = self.create_simulator_peer(simulator, peer_id_pool) + manager = self.create_simulator_peer(simulator, peer_pool) nodes.append(manager) miner = simulator.create_miner(manager, hashpower=10e6) miner.start() @@ -85,7 +85,7 @@ def _simulate_run(self, run_i: int, simulator: Simulator) -> list[HathorManager] simulator.run(10) for i, hashpower in enumerate([10e6, 8e6, 5e6]): - manager = self.create_simulator_peer(simulator, peer_id_pool) + manager = self.create_simulator_peer(simulator, peer_pool) for node in nodes: conn = FakeConnection(manager, node, latency=0.085) simulator.add_connection(conn) @@ -103,7 +103,7 @@ def _simulate_run(self, run_i: int, simulator: Simulator) -> list[HathorManager] self.log.debug(f'run{run_i}: adding late node') - late_manager = self.create_simulator_peer(simulator, peer_id_pool) + late_manager = self.create_simulator_peer(simulator, peer_pool) for node in nodes: conn = FakeConnection(late_manager, node, latency=0.300) simulator.add_connection(conn) @@ -152,18 +152,18 @@ def test_determinism_interleaved(self) -> None: miners2 = [] tx_generators1 = [] tx_generators2 = [] - peer_id_pool1 = self.new_peer_id_pool() - peer_id_pool2 = self.new_peer_id_pool() + peer_pool1 = self.new_peer_pool() + peer_pool2 = self.new_peer_pool() self.log.debug('part1 simulator1') - manager1 = self.create_simulator_peer(self.simulator1, peer_id_pool1) + manager1 = self.create_simulator_peer(self.simulator1, peer_pool1) nodes1.append(manager1) miner1 = self.simulator1.create_miner(manager1, hashpower=10e6) miner1.start() miners1.append(miner1) self.log.debug('part1 simulator2') - manager2 = self.create_simulator_peer(self.simulator2, peer_id_pool2) + manager2 = self.create_simulator_peer(self.simulator2, peer_pool2) nodes2.append(manager2) miner2 = self.simulator2.create_miner(manager2, hashpower=10e6) miner2.start() @@ -179,7 +179,7 @@ def test_determinism_interleaved(self) -> None: for i, hashpower in enumerate([10e6, 8e6, 5e6]): self.log.debug(f'part2.{i} simulator1') - manager1 = self.create_simulator_peer(self.simulator1, peer_id_pool1) + manager1 = self.create_simulator_peer(self.simulator1, peer_pool1) for node in nodes1: conn = FakeConnection(manager1, node, latency=0.085) self.simulator1.add_connection(conn) @@ -189,7 +189,7 @@ def test_determinism_interleaved(self) -> None: miners1.append(miner1) self.log.debug(f'part2.{i} simulator2') - manager2 = self.create_simulator_peer(self.simulator2, peer_id_pool2) + manager2 = self.create_simulator_peer(self.simulator2, peer_pool2) for node in nodes2: conn = FakeConnection(manager2, node, latency=0.085) self.simulator2.add_connection(conn) @@ -222,14 +222,14 @@ def test_determinism_interleaved(self) -> None: self.log.debug('adding late node') self.log.debug('part4 simulator1') - late_manager1 = self.create_simulator_peer(self.simulator1, peer_id_pool1) + late_manager1 = self.create_simulator_peer(self.simulator1, peer_pool1) for node in nodes1: conn = FakeConnection(late_manager1, node, latency=0.300) self.simulator1.add_connection(conn) nodes1.append(late_manager1) self.log.debug('part4 simulator2') - late_manager2 = self.create_simulator_peer(self.simulator2, peer_id_pool2) + late_manager2 = self.create_simulator_peer(self.simulator2, peer_pool2) for node in nodes2: conn = FakeConnection(late_manager2, node, latency=0.300) self.simulator2.add_connection(conn) diff --git a/tests/unittest.py b/tests/unittest.py index bac8fca7c..fc5ec27f1 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -17,7 +17,7 @@ from hathor.event import EventManager from hathor.event.storage import EventStorage from hathor.manager import HathorManager -from hathor.p2p.peer_id import PeerId +from hathor.p2p.peer import Peer from hathor.p2p.sync_v1.agent import NodeSyncTimestamp from hathor.p2p.sync_v2.agent import NodeBlockSync from hathor.p2p.sync_version import SyncVersion @@ -40,7 +40,7 @@ def short_hashes(container: Collection[bytes]) -> Iterable[str]: return map(lambda hash_bytes: hash_bytes[-2:].hex(), container) -def _load_peer_id_pool(file_path: Optional[str] = None) -> Iterator[PeerId]: +def _load_peer_pool(file_path: Optional[str] = None) -> Iterator[Peer]: import json if file_path is None: @@ -49,7 +49,7 @@ def _load_peer_id_pool(file_path: Optional[str] = None) -> Iterator[PeerId]: with open(file_path) as peer_id_pool_file: peer_id_pool_dict = json.load(peer_id_pool_file) for peer_id_dict in peer_id_pool_dict: - yield PeerId.create_from_json(peer_id_dict) + yield Peer.create_from_json(peer_id_dict) def _get_default_peer_id_pool_filepath() -> str: @@ -60,7 +60,7 @@ def _get_default_peer_id_pool_filepath() -> str: return file_path -PEER_ID_POOL = list(_load_peer_id_pool()) +PEER_ID_POOL = list(_load_peer_pool()) # XXX: Sync*Params classes should be inherited before the TestCase class when a sync version is needed @@ -97,10 +97,10 @@ def build(self) -> BuildArtifacts: artifacts.manager.connections.disable_rate_limiter() return artifacts - def _get_peer_id(self) -> PeerId: - if self._peer_id is not None: - return self._peer_id - return PeerId() + def _get_peer(self) -> Peer: + if self._peer is not None: + return self._peer + return Peer() def _get_reactor(self) -> Reactor: if self._reactor is None: @@ -120,7 +120,7 @@ def setUp(self) -> None: self.clock.advance(time.time()) self.reactor = self.clock self.log = logger.new() - self.reset_peer_id_pool() + self.reset_peer_pool() self.seed = secrets.randbits(64) if self.seed_config is None else self.seed_config self.log.info('set seed', seed=self.seed) self.rng = Random(self.seed) @@ -132,23 +132,22 @@ def tearDown(self) -> None: for fn in self._pending_cleanups: fn() - def reset_peer_id_pool(self) -> None: - self._free_peer_id_pool = self.new_peer_id_pool() + def reset_peer_pool(self) -> None: + self._free_peer_pool = self.new_peer_pool() - def new_peer_id_pool(self) -> list[PeerId]: + def new_peer_pool(self) -> list[Peer]: return PEER_ID_POOL.copy() - def get_random_peer_id_from_pool(self, pool: Optional[list[PeerId]] = None, - rng: Optional[Random] = None) -> PeerId: + def get_random_peer_from_pool(self, pool: Optional[list[Peer]] = None, rng: Optional[Random] = None) -> Peer: if pool is None: - pool = self._free_peer_id_pool + pool = self._free_peer_pool if not pool: raise RuntimeError('no more peer ids on the pool') if rng is None: rng = self.rng - peer_id = self.rng.choice(pool) - pool.remove(peer_id) - return peer_id + peer = self.rng.choice(pool) + pool.remove(peer) + return peer def mkdtemp(self) -> str: tmpdir = tempfile.mkdtemp() @@ -194,7 +193,7 @@ def create_peer_from_builder(self, builder: Builder, start_manager: bool = True) def create_peer( # type: ignore[no-untyped-def] self, network: str, - peer_id: PeerId | None = None, + peer: Peer | None = None, wallet: BaseWallet | None = None, tx_storage: TransactionStorage | None = None, unlock_wallet: bool = True, @@ -225,9 +224,9 @@ def create_peer( # type: ignore[no-untyped-def] if pubsub: builder.set_pubsub(pubsub) - if peer_id is None: - peer_id = PeerId() - builder.set_peer_id(peer_id) + if peer is None: + peer = Peer() + builder.set_peer(peer) if not wallet: wallet = self._create_test_wallet() From ada19b8d535e8af07809f5a7919cd27be05b083e Mon Sep 17 00:00:00 2001 From: Jan Segre Date: Fri, 16 Aug 2024 14:53:42 +0200 Subject: [PATCH 15/61] refactor(p2p): use PeerId type instead of str --- hathor/builder/builder.py | 6 +++--- hathor/builder/cli_builder.py | 6 +++--- hathor/manager.py | 9 ++++---- hathor/metrics.py | 2 +- hathor/p2p/entrypoint.py | 6 +----- hathor/p2p/manager.py | 29 +++++++++++++------------- hathor/p2p/netfilter/matches.py | 2 +- hathor/p2p/peer.py | 11 +++++----- hathor/p2p/peer_id.py | 19 +++++++++++++++++ hathor/p2p/peer_storage.py | 3 ++- hathor/p2p/protocol.py | 5 +++-- hathor/p2p/resources/status.py | 6 +++--- hathor/p2p/states/peer_id.py | 5 +++-- hathor/p2p/states/ready.py | 2 +- hathor/p2p/utils.py | 10 +++------ hathor/stratum/stratum.py | 2 +- hathor/sysctl/p2p/manager.py | 13 ++++++++---- tests/event/event_simulation_tester.py | 3 +-- tests/p2p/test_peer_id.py | 3 ++- tests/resources/p2p/test_status.py | 4 ++-- tests/sysctl/test_p2p.py | 20 +++++++++++------- 21 files changed, 96 insertions(+), 70 deletions(-) create mode 100644 hathor/p2p/peer_id.py diff --git a/hathor/builder/builder.py b/hathor/builder/builder.py index 8160bae17..cffa2c08e 100644 --- a/hathor/builder/builder.py +++ b/hathor/builder/builder.py @@ -47,7 +47,7 @@ TransactionStorage, ) from hathor.transaction.vertex_parser import VertexParser -from hathor.util import Random, get_environment_info, not_none +from hathor.util import Random, get_environment_info from hathor.verification.verification_service import VerificationService from hathor.verification.vertex_verifiers import VertexVerifiers from hathor.vertex_handler import VertexHandler @@ -264,7 +264,7 @@ def build(self) -> BuildArtifacts: rng=self._rng, checkpoints=self._checkpoints, capabilities=self._capabilities, - environment_info=get_environment_info(self._cmdline, peer.id), + environment_info=get_environment_info(self._cmdline, str(peer.id)), bit_signaling_service=bit_signaling_service, verification_service=verification_service, cpu_mining_service=cpu_mining_service, @@ -515,7 +515,7 @@ def _get_or_create_event_manager(self) -> EventManager: reactor = self._get_reactor() storage = self._get_or_create_event_storage() factory = EventWebsocketFactory( - peer_id=not_none(peer.id), + peer_id=str(peer.id), network=settings.NETWORK_NAME, reactor=reactor, event_storage=storage, diff --git a/hathor/builder/cli_builder.py b/hathor/builder/cli_builder.py index 54e7b8fa4..f13b88dad 100644 --- a/hathor/builder/cli_builder.py +++ b/hathor/builder/cli_builder.py @@ -42,7 +42,7 @@ from hathor.reactor import ReactorProtocol as Reactor from hathor.stratum import StratumFactory from hathor.transaction.vertex_parser import VertexParser -from hathor.util import Random, not_none +from hathor.util import Random from hathor.verification.verification_service import VerificationService from hathor.verification.vertex_verifiers import VertexVerifiers from hathor.vertex_handler import VertexHandler @@ -225,7 +225,7 @@ def create_manager(self, reactor: Reactor) -> HathorManager: if self._args.x_enable_event_queue: self.event_ws_factory = EventWebsocketFactory( - peer_id=not_none(peer.id), + peer_id=str(peer.id), network=network, reactor=reactor, event_storage=event_storage @@ -354,7 +354,7 @@ def create_manager(self, reactor: Reactor) -> HathorManager: event_manager=event_manager, wallet=self.wallet, checkpoints=settings.CHECKPOINTS, - environment_info=get_environment_info(args=str(self._args), peer_id=peer.id), + environment_info=get_environment_info(args=str(self._args), peer_id=str(peer.id)), full_verification=full_verification, enable_event_queue=self._args.x_enable_event_queue, bit_signaling_service=bit_signaling_service, diff --git a/hathor/manager.py b/hathor/manager.py index 0218fd663..28eb7930d 100644 --- a/hathor/manager.py +++ b/hathor/manager.py @@ -47,6 +47,7 @@ from hathor.mining.cpu_mining_service import CpuMiningService from hathor.p2p.manager import ConnectionsManager from hathor.p2p.peer import Peer +from hathor.p2p.peer_id import PeerId from hathor.profiler import get_cpu_profiler from hathor.pubsub import HathorEvents, PubSubManager from hathor.reactor import ReactorProtocol as Reactor @@ -225,7 +226,7 @@ def __init__( self._full_verification = full_verification # List of whitelisted peers - self.peers_whitelist: list[str] = [] + self.peers_whitelist: list[PeerId] = [] # List of capabilities of the peer if capabilities is not None: @@ -297,7 +298,7 @@ def start(self) -> None: sys.exit(-1) if self._enable_event_queue: - self._event_manager.start(not_none(self.my_peer.id)) + self._event_manager.start(str(not_none(self.my_peer.id))) self.state = self.NodeState.INITIALIZING self.pubsub.publish(HathorEvents.MANAGER_ON_START) @@ -976,7 +977,7 @@ def on_new_tx( def has_sync_version_capability(self) -> bool: return self._settings.CAPABILITY_SYNC_VERSION in self.capabilities - def add_peer_to_whitelist(self, peer_id: str) -> None: + def add_peer_to_whitelist(self, peer_id: PeerId) -> None: if not self._settings.ENABLE_PEER_WHITELIST: return @@ -985,7 +986,7 @@ def add_peer_to_whitelist(self, peer_id: str) -> None: else: self.peers_whitelist.append(peer_id) - def remove_peer_from_whitelist_and_disconnect(self, peer_id: str) -> None: + def remove_peer_from_whitelist_and_disconnect(self, peer_id: PeerId) -> None: if not self._settings.ENABLE_PEER_WHITELIST: return diff --git a/hathor/metrics.py b/hathor/metrics.py index 92b73d3ad..046c8c54d 100644 --- a/hathor/metrics.py +++ b/hathor/metrics.py @@ -253,7 +253,7 @@ def collect_peer_connection_metrics(self) -> None: metric = PeerConnectionMetrics( connection_string=str(connection.entrypoint) if connection.entrypoint else "", - peer_id=connection.peer.id, + peer_id=str(connection.peer.id), network=connection.network, received_messages=connection.metrics.received_messages, sent_messages=connection.metrics.sent_messages, diff --git a/hathor/p2p/entrypoint.py b/hathor/p2p/entrypoint.py index 3340f784d..23ead1199 100644 --- a/hathor/p2p/entrypoint.py +++ b/hathor/p2p/entrypoint.py @@ -21,18 +21,14 @@ from twisted.internet.interfaces import IStreamClientEndpoint from typing_extensions import Self +from hathor.p2p.peer_id import PeerId from hathor.reactor import ReactorProtocol as Reactor -from hathor.types import Hash class Protocol(Enum): TCP = 'tcp' -class PeerId(Hash): - pass - - @dataclass(frozen=True, slots=True) class Entrypoint: """Endpoint description (returned from DNS query, or received from the p2p network) may contain a peer-id.""" diff --git a/hathor/p2p/manager.py b/hathor/p2p/manager.py index 4cf4e240e..f70cd1424 100644 --- a/hathor/p2p/manager.py +++ b/hathor/p2p/manager.py @@ -29,6 +29,7 @@ from hathor.p2p.netfilter.factory import NetfilterFactory from hathor.p2p.peer import Peer from hathor.p2p.peer_discovery import PeerDiscovery +from hathor.p2p.peer_id import PeerId from hathor.p2p.peer_storage import PeerStorage from hathor.p2p.protocol import HathorProtocol from hathor.p2p.rate_limiter import RateLimiter @@ -51,11 +52,11 @@ class _SyncRotateInfo(NamedTuple): - candidates: list[str] - old: set[str] - new: set[str] - to_disable: set[str] - to_enable: set[str] + candidates: list[PeerId] + old: set[PeerId] + new: set[PeerId] + to_disable: set[PeerId] + to_enable: set[PeerId] class _ConnectingPeer(NamedTuple): @@ -79,7 +80,7 @@ class GlobalRateLimiter: manager: Optional['HathorManager'] connections: set[HathorProtocol] - connected_peers: dict[str, HathorProtocol] + connected_peers: dict[PeerId, HathorProtocol] connecting_peers: dict[IStreamClientEndpoint, _ConnectingPeer] handshaking_peers: set[HathorProtocol] whitelist_only: bool @@ -174,7 +175,7 @@ def __init__( self.lc_sync_update_interval: float = 5 # seconds # Peers that always have sync enabled. - self.always_enable_sync: set[str] = set() + self.always_enable_sync: set[PeerId] = set() # Timestamp of the last time sync was updated. self._last_sync_rotate: float = 0. @@ -485,7 +486,7 @@ def iter_not_ready_endpoints(self) -> Iterable[Entrypoint]: else: self.log.warn('handshaking protocol has empty connection string', protocol=protocol) - def is_peer_connected(self, peer_id: str) -> bool: + def is_peer_connected(self, peer_id: PeerId) -> bool: """ :type peer_id: string (peer.id) """ @@ -729,7 +730,7 @@ def get_connection_to_drop(self, protocol: HathorProtocol) -> HathorProtocol: assert protocol.peer.id is not None assert protocol.my_peer.id is not None other_connection = self.connected_peers[protocol.peer.id] - if protocol.my_peer.id > protocol.peer.id: + if bytes(protocol.my_peer.id) > bytes(protocol.peer.id): # connection started by me is kept if not protocol.inbound: # other connection is dropped @@ -751,7 +752,7 @@ def drop_connection(self, protocol: HathorProtocol) -> None: self.log.debug('dropping connection', peer_id=protocol.peer.id, protocol=type(protocol).__name__) protocol.send_error_and_close_connection('Connection droped') - def drop_connection_by_peer_id(self, peer_id: str) -> None: + def drop_connection_by_peer_id(self, peer_id: PeerId) -> None: """ Drop a connection by peer id """ protocol = self.connected_peers.get(peer_id) @@ -765,9 +766,9 @@ def sync_update(self) -> None: except Exception: self.log.error('_sync_rotate_if_needed failed', exc_info=True) - def set_always_enable_sync(self, values: list[str]) -> None: + def set_always_enable_sync(self, values: list[PeerId]) -> None: """Set a new list of peers to always enable sync. This operation completely replaces the previous list.""" - new: set[str] = set(values) + new: set[PeerId] = set(values) old = self.always_enable_sync if new == old: @@ -792,14 +793,14 @@ def set_always_enable_sync(self, values: list[str]) -> None: def _calculate_sync_rotate(self) -> _SyncRotateInfo: """Calculate new sync rotation.""" - current_enabled: set[str] = set() + current_enabled: set[PeerId] = set() for peer_id, conn in self.connected_peers.items(): if conn.is_sync_enabled(): current_enabled.add(peer_id) candidates = list(self.connected_peers.keys()) self.rng.shuffle(candidates) - selected_peers: set[str] = set(candidates[:self.MAX_ENABLED_SYNC]) + selected_peers: set[PeerId] = set(candidates[:self.MAX_ENABLED_SYNC]) to_disable = current_enabled - selected_peers to_enable = selected_peers - current_enabled diff --git a/hathor/p2p/netfilter/matches.py b/hathor/p2p/netfilter/matches.py index d686ac7aa..d7eb714bd 100644 --- a/hathor/p2p/netfilter/matches.py +++ b/hathor/p2p/netfilter/matches.py @@ -130,7 +130,7 @@ def match(self, context: 'NetfilterContext') -> bool: if context.protocol.peer is None: return False - if context.protocol.peer.id != self.peer_id: + if str(context.protocol.peer.id) != self.peer_id: return False return True diff --git a/hathor/p2p/peer.py b/hathor/p2p/peer.py index 9d40ec7b6..bb48e46fd 100644 --- a/hathor/p2p/peer.py +++ b/hathor/p2p/peer.py @@ -32,6 +32,7 @@ from hathor.conf.get_settings import get_global_settings from hathor.daa import DifficultyAdjustmentAlgorithm from hathor.p2p.entrypoint import Entrypoint +from hathor.p2p.peer_id import PeerId from hathor.p2p.utils import discover_dns, generate_certificate from hathor.util import not_none @@ -59,7 +60,7 @@ class Peer: Usually a peer will have only one entrypoint. """ - id: Optional[str] + id: Optional[PeerId] entrypoints: list[Entrypoint] private_key: Optional[rsa.RSAPrivateKeyWithSerialization] public_key: Optional[rsa.RSAPublicKey] @@ -135,7 +136,7 @@ def generate_keys(self, key_size: int = 2048) -> None: self.public_key = self.private_key.public_key() self.id = self.calculate_id() - def calculate_id(self) -> str: + def calculate_id(self) -> PeerId: """ Calculate and return the id based on the public key. """ assert self.public_key is not None @@ -143,7 +144,7 @@ def calculate_id(self) -> str: format=serialization.PublicFormat.SubjectPublicKeyInfo) h1 = hashlib.sha256(public_der) h2 = hashlib.sha256(h1.digest()) - return h2.hexdigest() + return PeerId(h2.digest()) def get_public_key(self) -> str: """ Return the public key in DER encoding as an `str`. @@ -189,7 +190,7 @@ def create_from_json(cls, data: dict[str, Any]) -> 'Peer': from a peer connection. """ obj = cls(auto_generate_keys=False) - obj.id = data['id'] + obj.id = PeerId(data['id']) if 'pubKey' in data: public_key_der = base64.b64decode(data['pubKey']) @@ -252,7 +253,7 @@ def to_json(self, include_private_key: bool = False) -> dict[str, Any]: format=serialization.PublicFormat.SubjectPublicKeyInfo) # This format is compatible with libp2p. result = { - 'id': self.id, + 'id': str(self.id), 'pubKey': base64.b64encode(public_der).decode('utf-8'), 'entrypoints': self.entrypoints_as_str(), } diff --git a/hathor/p2p/peer_id.py b/hathor/p2p/peer_id.py new file mode 100644 index 000000000..9a3c5eaca --- /dev/null +++ b/hathor/p2p/peer_id.py @@ -0,0 +1,19 @@ +# Copyright 2024 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.types import Hash + + +class PeerId(Hash): + pass diff --git a/hathor/p2p/peer_storage.py b/hathor/p2p/peer_storage.py index 6efb8a549..b6be116b7 100644 --- a/hathor/p2p/peer_storage.py +++ b/hathor/p2p/peer_storage.py @@ -13,9 +13,10 @@ # limitations under the License. from hathor.p2p.peer import Peer +from hathor.p2p.peer_id import PeerId -class PeerStorage(dict[str, Peer]): +class PeerStorage(dict[PeerId, Peer]): """ PeerStorage is used to store all known peers in memory. It is a dict of peer objects, and peers can be retrieved by their `peer.id`. """ diff --git a/hathor/p2p/protocol.py b/hathor/p2p/protocol.py index eeec521f2..5a77bdacb 100644 --- a/hathor/p2p/protocol.py +++ b/hathor/p2p/protocol.py @@ -27,6 +27,7 @@ from hathor.p2p.entrypoint import Entrypoint from hathor.p2p.messages import ProtocolMessages from hathor.p2p.peer import Peer +from hathor.p2p.peer_id import PeerId from hathor.p2p.rate_limiter import RateLimiter from hathor.p2p.states import BaseState, HelloState, PeerIdState, ReadyState from hathor.p2p.sync_version import SyncVersion @@ -192,7 +193,7 @@ def get_short_remote(self) -> str: assert self.transport is not None return format_address(self.transport.getPeer()) - def get_peer_id(self) -> Optional[str]: + def get_peer_id(self) -> Optional[PeerId]: """Get peer id for logging.""" if self.peer and self.peer.id: return self.peer.id @@ -201,7 +202,7 @@ def get_peer_id(self) -> Optional[str]: def get_short_peer_id(self) -> Optional[str]: """Get short peer id for logging.""" if self.peer and self.peer.id: - return self.peer.id[:7] + return str(self.peer.id)[:7] return None def get_logger_context(self) -> dict[str, Optional[str]]: diff --git a/hathor/p2p/resources/status.py b/hathor/p2p/resources/status.py index 225665930..bd099d3e1 100644 --- a/hathor/p2p/resources/status.py +++ b/hathor/p2p/resources/status.py @@ -64,7 +64,7 @@ def render_GET(self, request): status = {} status[conn.state.sync_agent.name] = conn.state.sync_agent.get_status() connected_peers.append({ - 'id': conn.peer.id, + 'id': str(conn.peer.id), 'app_version': conn.app_version, 'current_time': now, 'uptime': now - conn.connection_time, @@ -82,7 +82,7 @@ def render_GET(self, request): known_peers = [] for peer in self.manager.connections.peer_storage.values(): known_peers.append({ - 'id': peer.id, + 'id': str(peer.id), 'entrypoints': peer.entrypoints_as_str(), 'last_seen': now - peer.last_seen, 'flags': [flag.value for flag in peer.flags], @@ -102,7 +102,7 @@ def render_GET(self, request): data = { 'server': { - 'id': self.manager.connections.my_peer.id, + 'id': str(self.manager.connections.my_peer.id), 'app_version': app, 'state': self.manager.state.value, 'network': self.manager.network, diff --git a/hathor/p2p/states/peer_id.py b/hathor/p2p/states/peer_id.py index 439ccd4b0..7534487b3 100644 --- a/hathor/p2p/states/peer_id.py +++ b/hathor/p2p/states/peer_id.py @@ -19,6 +19,7 @@ from hathor.conf.settings import HathorSettings from hathor.p2p.messages import ProtocolMessages from hathor.p2p.peer import Peer +from hathor.p2p.peer_id import PeerId from hathor.p2p.states.base import BaseState from hathor.util import json_dumps, json_loads @@ -68,7 +69,7 @@ def send_peer_id(self) -> None: protocol = self.protocol my_peer = protocol.my_peer hello = { - 'id': my_peer.id, + 'id': str(my_peer.id), 'pubKey': my_peer.get_public_key(), 'entrypoints': my_peer.entrypoints_as_str(), } @@ -139,7 +140,7 @@ async def handle_peer_id(self, payload: str) -> None: self.send_ready() - def _should_block_peer(self, peer_id: str) -> bool: + def _should_block_peer(self, peer_id: PeerId) -> bool: """ Determine if peer should not be allowed to connect. Currently this is only because the peer is not in a whitelist and whitelist blocking is active. diff --git a/hathor/p2p/states/ready.py b/hathor/p2p/states/ready.py index c04d97b4c..1d011cf00 100644 --- a/hathor/p2p/states/ready.py +++ b/hathor/p2p/states/ready.py @@ -165,7 +165,7 @@ def send_peers(self, peer_list: Iterable['Peer']) -> None: for peer in peer_list: if peer.entrypoints: data.append({ - 'id': peer.id, + 'id': str(peer.id), 'entrypoints': peer.entrypoints_as_str(), }) self.send_message(ProtocolMessages.PEERS, json_dumps(data)) diff --git a/hathor/p2p/utils.py b/hathor/p2p/utils.py index 7141030d1..c0a25f3d8 100644 --- a/hathor/p2p/utils.py +++ b/hathor/p2p/utils.py @@ -31,6 +31,7 @@ from hathor.indexes.height_index import HeightInfo from hathor.p2p.entrypoint import Entrypoint from hathor.p2p.peer_discovery import DNSPeerDiscovery +from hathor.p2p.peer_id import PeerId from hathor.transaction.genesis import get_representation_for_all_genesis @@ -142,7 +143,7 @@ def parse_file(text: str, *, header: Optional[str] = None) -> list[str]: return list(nonblank_lines) -def parse_whitelist(text: str, *, header: Optional[str] = None) -> set[str]: +def parse_whitelist(text: str, *, header: Optional[str] = None) -> set[PeerId]: """ Parses the list of whitelist peer ids Example: @@ -161,12 +162,7 @@ def parse_whitelist(text: str, *, header: Optional[str] = None) -> set[str]: """ lines = parse_file(text, header=header) - peerids = {line.split()[0] for line in lines} - for peerid in peerids: - bpeerid = bytes.fromhex(peerid) - if len(bpeerid) != 32: - raise ValueError('invalid peerid size') - return peerids + return {PeerId(line.split()[0]) for line in lines} def format_address(addr: IAddress) -> str: diff --git a/hathor/stratum/stratum.py b/hathor/stratum/stratum.py index 92111a431..e3658d457 100644 --- a/hathor/stratum/stratum.py +++ b/hathor/stratum/stratum.py @@ -670,7 +670,7 @@ def create_job_tx(self, jobid: UUID) -> BaseTransaction: assert self.miner_id is not None # Only get first 32 bytes of peer_id because block data is limited to 100 bytes - data = '{}-{}-{}'.format(peer_id[:32], self.miner_id.hex, jobid.hex).encode() + data = '{}-{}-{}'.format(str(peer_id)[:32], self.miner_id.hex, jobid.hex).encode() data = data[:self._settings.BLOCK_DATA_MAX_SIZE] block = self.manager.generate_mining_block(data=data, address=self.miner_address, merge_mined=self.merged_mining) diff --git a/hathor/sysctl/p2p/manager.py b/hathor/sysctl/p2p/manager.py index e821039bd..9f9856a42 100644 --- a/hathor/sysctl/p2p/manager.py +++ b/hathor/sysctl/p2p/manager.py @@ -15,6 +15,7 @@ import os from hathor.p2p.manager import ConnectionsManager +from hathor.p2p.peer_id import PeerId from hathor.p2p.sync_version import SyncVersion from hathor.p2p.utils import discover_hostname from hathor.sysctl.exception import SysctlException @@ -161,11 +162,11 @@ def set_lc_sync_update_interval(self, value: float) -> None: def get_always_enable_sync(self) -> list[str]: """Return the list of sync-always-enabled peers.""" - return list(self.connections.always_enable_sync) + return list(map(str, self.connections.always_enable_sync)) def set_always_enable_sync(self, values: list[str]) -> None: """Change the list of sync-always-enabled peers.""" - self.connections.set_always_enable_sync(values) + self.connections.set_always_enable_sync(list(map(PeerId, values))) def set_always_enable_sync_readtxt(self, file_path: str) -> None: """Update the list of sync-always-enabled peers from a file.""" @@ -174,7 +175,7 @@ def set_always_enable_sync_readtxt(self, file_path: str) -> None: values: list[str] with open(file_path, 'r') as fp: values = parse_text(fp.read()) - self.connections.set_always_enable_sync(values) + self.connections.set_always_enable_sync(list(map(PeerId, values))) def get_max_enabled_sync(self) -> int: """Return the maximum number of peers running sync simultaneously.""" @@ -230,7 +231,11 @@ def set_kill_connection(self, peer_id: str, force: bool = False) -> None: self.connections.disconnect_all_peers(force=force) return - conn = self.connections.connected_peers.get(peer_id, None) + try: + peer_id_obj = PeerId(peer_id) + except ValueError: + raise SysctlException('invalid peer-id') + conn = self.connections.connected_peers.get(peer_id_obj, None) if conn is None: self.log.warn('Killing connection', peer_id=peer_id) raise SysctlException('peer-id is not connected') diff --git a/tests/event/event_simulation_tester.py b/tests/event/event_simulation_tester.py index 5541c6397..1383a4fd8 100644 --- a/tests/event/event_simulation_tester.py +++ b/tests/event/event_simulation_tester.py @@ -40,8 +40,7 @@ def _create_artifacts(self) -> None: .enable_event_queue() artifacts = self.simulator.create_artifacts(builder) - assert peer.id is not None - self.peer_id: str = peer.id + self.peer_id: str = str(peer.id) self.manager = artifacts.manager self.manager.allow_mining_without_peers() self.settings = artifacts.settings diff --git a/tests/p2p/test_peer_id.py b/tests/p2p/test_peer_id.py index b947f8337..f870f824a 100644 --- a/tests/p2p/test_peer_id.py +++ b/tests/p2p/test_peer_id.py @@ -8,6 +8,7 @@ from hathor.p2p.entrypoint import Entrypoint from hathor.p2p.peer import InvalidPeerIdException, Peer +from hathor.p2p.peer_id import PeerId from hathor.p2p.peer_storage import PeerStorage from hathor.util import not_none from tests import unittest @@ -17,7 +18,7 @@ class PeerIdTest(unittest.TestCase): def test_invalid_id(self) -> None: p1 = Peer() - p1.id = not_none(p1.id)[::-1] + p1.id = PeerId(str(not_none(p1.id))[::-1]) self.assertRaises(InvalidPeerIdException, p1.validate) def test_invalid_public_key(self) -> None: diff --git a/tests/resources/p2p/test_status.py b/tests/resources/p2p/test_status.py index 7ab42ae68..5b156326f 100644 --- a/tests/resources/p2p/test_status.py +++ b/tests/resources/p2p/test_status.py @@ -89,10 +89,10 @@ def test_get_with_one_peer(self): self.assertGreater(server_data['uptime'], 0) self.assertEqual(len(known_peers), 1) - self.assertEqual(known_peers[0]['id'], self.manager2.my_peer.id) + self.assertEqual(known_peers[0]['id'], str(self.manager2.my_peer.id)) self.assertEqual(len(connections['connected_peers']), 1) - self.assertEqual(connections['connected_peers'][0]['id'], self.manager2.my_peer.id) + self.assertEqual(connections['connected_peers'][0]['id'], str(self.manager2.my_peer.id)) @inlineCallbacks def test_connecting_peers(self): diff --git a/tests/sysctl/test_p2p.py b/tests/sysctl/test_p2p.py index 6a3980c5e..ec0366888 100644 --- a/tests/sysctl/test_p2p.py +++ b/tests/sysctl/test_p2p.py @@ -2,6 +2,7 @@ import tempfile from unittest.mock import MagicMock +from hathor.p2p.peer_id import PeerId from hathor.sysctl import ConnectionsManagerSysctl from hathor.sysctl.exception import SysctlException from tests import unittest @@ -100,9 +101,12 @@ def test_always_enable_sync(self): connections = manager.connections sysctl = ConnectionsManagerSysctl(connections) - sysctl.unsafe_set('always_enable_sync', ['peer-1', 'peer-2']) - self.assertEqual(connections.always_enable_sync, {'peer-1', 'peer-2'}) - self.assertEqual(set(sysctl.get('always_enable_sync')), {'peer-1', 'peer-2'}) + peer_id_1 = '0e2bd0d8cd1fb6d040801c32ec27e8986ce85eb8810b6c878dcad15bce3b5b1e' + peer_id_2 = '2ff0d2c80c50f724de79f132a2f8cae576c64b57ea531d400577adf7db3e7c15' + + sysctl.unsafe_set('always_enable_sync', [peer_id_1, peer_id_2]) + self.assertEqual(connections.always_enable_sync, {PeerId(peer_id_1), PeerId(peer_id_2)}) + self.assertEqual(set(sysctl.get('always_enable_sync')), {peer_id_1, peer_id_2}) sysctl.unsafe_set('always_enable_sync', []) self.assertEqual(connections.always_enable_sync, set()) @@ -110,8 +114,8 @@ def test_always_enable_sync(self): with tempfile.TemporaryDirectory() as dir_path: content = [ - 'peer-id-1', - 'peer-id-2', + peer_id_1, + peer_id_2, ] file_path = os.path.join(dir_path, 'a.txt') @@ -120,7 +124,7 @@ def test_always_enable_sync(self): fp.close() sysctl.unsafe_set('always_enable_sync.readtxt', file_path) - self.assertEqual(connections.always_enable_sync, set(content)) + self.assertEqual(connections.always_enable_sync, {PeerId(peer_id_1), PeerId(peer_id_2)}) self.assertEqual(set(sysctl.get('always_enable_sync')), set(content)) def test_available_sync_versions(self): @@ -166,9 +170,9 @@ def test_kill_one_connection(self): p2p_manager = manager.connections sysctl = ConnectionsManagerSysctl(p2p_manager) - peer_id = 'my-peer-id' + peer_id = '0e2bd0d8cd1fb6d040801c32ec27e8986ce85eb8810b6c878dcad15bce3b5b1e' conn = MagicMock() - p2p_manager.connected_peers[peer_id] = conn + p2p_manager.connected_peers[PeerId(peer_id)] = conn self.assertEqual(conn.disconnect.call_count, 0) sysctl.unsafe_set('kill_connection', peer_id) self.assertEqual(conn.disconnect.call_count, 1) From f6ddf3c8ba85616288beb1660d79a3edc8c64d30 Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Thu, 22 Aug 2024 14:30:50 -0300 Subject: [PATCH 16/61] refactor(metadata): move metadata attributes to static metadata (#1014) --- hathor/builder/resources_builder.py | 4 +- hathor/consensus/poa/poa_block_producer.py | 1 + hathor/feature_activation/feature_service.py | 21 +- .../feature_activation/resources/feature.py | 2 +- hathor/manager.py | 27 ++- hathor/mining/block_template.py | 2 +- hathor/p2p/resources/mining_info.py | 3 +- hathor/p2p/resources/status.py | 7 +- hathor/p2p/sync_v2/agent.py | 4 +- hathor/p2p/sync_v2/streamers.py | 4 +- hathor/stratum/stratum.py | 1 + hathor/transaction/base_transaction.py | 61 +----- hathor/transaction/block.py | 74 +------ hathor/transaction/resources/decode_tx.py | 3 + hathor/transaction/resources/mining.py | 6 +- hathor/transaction/resources/transaction.py | 5 +- hathor/transaction/static_metadata.py | 197 +++++++++++++++++- hathor/transaction/storage/cache_storage.py | 10 +- hathor/transaction/storage/memory_storage.py | 18 +- .../migrations/migrate_static_metadata.py | 67 ++++++ .../migrations/remove_first_nop_features.py | 6 +- hathor/transaction/storage/rocksdb_storage.py | 68 +++++- .../storage/transaction_storage.py | 25 ++- hathor/transaction/transaction.py | 43 +--- hathor/transaction/transaction_metadata.py | 71 ++++--- hathor/util.py | 6 +- hathor/verification/block_verifier.py | 9 +- hathor/verification/transaction_verifier.py | 7 +- hathor/verification/verification_service.py | 6 +- hathor/vertex_handler/vertex_handler.py | 5 +- hathor/wallet/resources/send_tokens.py | 8 +- .../resources/thin_wallet/send_tokens.py | 2 +- .../test_feature_service.py | 1 + .../test_feature_simulation.py | 2 +- .../test_mining_simulation.py | 2 +- tests/p2p/test_sync.py | 4 +- tests/poa/test_poa.py | 27 ++- tests/poa/test_poa_verification.py | 2 +- tests/resources/feature/test_feature.py | 25 ++- tests/resources/transaction/test_mining.py | 10 +- tests/resources/transaction/test_pushtx.py | 3 +- tests/resources/transaction/test_tx.py | 13 +- .../resources/transaction/test_utxo_search.py | 8 +- tests/resources/wallet/test_send_tokens.py | 4 +- tests/tx/test_block.py | 32 +-- tests/tx/test_blockchain.py | 6 +- tests/tx/test_cache_storage.py | 1 + tests/tx/test_indexes.py | 12 +- tests/tx/test_reward_lock.py | 19 +- tests/tx/test_tokens.py | 2 +- tests/tx/test_tx.py | 11 +- tests/tx/test_tx_storage.py | 4 +- tests/tx/test_verification.py | 8 +- tests/wallet/test_wallet.py | 4 +- tests/wallet/test_wallet_hd.py | 2 + 55 files changed, 609 insertions(+), 366 deletions(-) create mode 100644 hathor/transaction/storage/migrations/migrate_static_metadata.py diff --git a/hathor/builder/resources_builder.py b/hathor/builder/resources_builder.py index f2c2cc16a..9d48b082a 100644 --- a/hathor/builder/resources_builder.py +++ b/hathor/builder/resources_builder.py @@ -215,7 +215,7 @@ def create_resources(self) -> server.Site: # mining (b'mining', MiningResource(self.manager), root), (b'getmininginfo', MiningInfoResource(self.manager), root), - (b'get_block_template', GetBlockTemplateResource(self.manager), root), + (b'get_block_template', GetBlockTemplateResource(self.manager, settings), root), (b'submit_block', SubmitBlockResource(self.manager), root), (b'tx_parents', TxParentsResource(self.manager), root), # /thin_wallet @@ -281,7 +281,7 @@ def create_resources(self) -> server.Site: (b'balance', BalanceResource(self.manager), wallet_resource), (b'history', HistoryResource(self.manager), wallet_resource), (b'address', AddressResource(self.manager), wallet_resource), - (b'send_tokens', SendTokensResource(self.manager), wallet_resource), + (b'send_tokens', SendTokensResource(self.manager, settings), wallet_resource), (b'sign_tx', SignTxResource(self.manager), wallet_resource), (b'unlock', UnlockWalletResource(self.manager), wallet_resource), (b'lock', LockWalletResource(self.manager), wallet_resource), diff --git a/hathor/consensus/poa/poa_block_producer.py b/hathor/consensus/poa/poa_block_producer.py index 267e7792c..a11758246 100644 --- a/hathor/consensus/poa/poa_block_producer.py +++ b/hathor/consensus/poa/poa_block_producer.py @@ -174,6 +174,7 @@ def _produce_block(self, previous_block: PoaBlock) -> None: from hathor.transaction.poa import PoaBlock block_templates = self.manager.get_block_templates(parent_block_hash=previous_block.hash) block = block_templates.generate_mining_block(self.manager.rng, cls=PoaBlock) + block.init_static_metadata_from_storage(self._settings, self.manager.tx_storage) assert isinstance(block, PoaBlock) if block.get_height() <= self.manager.tx_storage.get_height_best_block(): diff --git a/hathor/feature_activation/feature_service.py b/hathor/feature_activation/feature_service.py index 9fa7ceb0b..bc3003825 100644 --- a/hathor/feature_activation/feature_service.py +++ b/hathor/feature_activation/feature_service.py @@ -60,8 +60,8 @@ def is_signaling_mandatory_features(self, block: 'Block') -> BlockSignalingState Return whether a block is signaling features that are mandatory, that is, any feature currently in the MUST_SIGNAL phase. """ - bit_counts = block.get_feature_activation_bit_counts() - height = block.get_height() + bit_counts = block.static_metadata.feature_activation_bit_counts + height = block.static_metadata.height offset_to_boundary = height % self._feature_settings.evaluation_interval remaining_blocks = self._feature_settings.evaluation_interval - offset_to_boundary - 1 descriptions = self.get_bits_description(block=block) @@ -95,7 +95,7 @@ def get_state(self, *, block: 'Block', feature: Feature) -> FeatureState: # All blocks within the same evaluation interval have the same state, that is, the state is only defined for # the block in each interval boundary. Therefore, we get the state of the previous boundary block or calculate # a new state if this block is a boundary block. - height = block.get_height() + height = block.static_metadata.height offset_to_boundary = height % self._feature_settings.evaluation_interval offset_to_previous_boundary = offset_to_boundary or self._feature_settings.evaluation_interval previous_boundary_height = height - offset_to_previous_boundary @@ -139,7 +139,7 @@ def _calculate_new_state( an AssertionError. Non-boundary blocks never calculate their own state, they get it from their parent block instead. """ - height = boundary_block.get_height() + height = boundary_block.static_metadata.height criteria = self._feature_settings.features.get(feature) evaluation_interval = self._feature_settings.evaluation_interval @@ -162,7 +162,7 @@ def _calculate_new_state( # Get the count for this block's parent. Since this is a boundary block, its parent count represents the # previous evaluation interval count. parent_block = boundary_block.get_block_parent() - counts = parent_block.get_feature_activation_bit_counts() + counts = parent_block.static_metadata.feature_activation_bit_counts count = counts[criteria.bit] threshold = criteria.get_threshold(self._feature_settings) @@ -209,8 +209,9 @@ def _get_ancestor_at_height(self, *, block: 'Block', ancestor_height: int) -> 'B Given a block, return its ancestor at a specific height. Uses the height index if the block is in the best blockchain, or search iteratively otherwise. """ - assert ancestor_height < block.get_height(), ( - f"ancestor height must be lower than the block's height: {ancestor_height} >= {block.get_height()}" + assert ancestor_height < block.static_metadata.height, ( + f"ancestor height must be lower than the block's height: " + f"{ancestor_height} >= {block.static_metadata.height}" ) # It's possible that this method is called before the consensus runs for this block, therefore we do not know @@ -219,7 +220,7 @@ def _get_ancestor_at_height(self, *, block: 'Block', ancestor_height: int) -> 'B parent_metadata = parent_block.get_metadata() assert parent_metadata.validation.is_fully_connected(), 'The parent should always be fully validated.' - if parent_block.get_height() == ancestor_height: + if parent_block.static_metadata.height == ancestor_height: return parent_block if not parent_metadata.voided_by and (ancestor := self._tx_storage.get_block_by_height(ancestor_height)): @@ -237,11 +238,11 @@ def _get_ancestor_iteratively(self, *, block: 'Block', ancestor_height: int) -> # TODO: there are further optimizations to be done here, the latest common block height could be persisted in # metadata, so we could still use the height index if the requested height is before that height. assert ancestor_height >= 0 - assert block.get_height() - ancestor_height <= self._feature_settings.evaluation_interval, ( + assert block.static_metadata.height - ancestor_height <= self._feature_settings.evaluation_interval, ( 'requested ancestor is deeper than the maximum allowed' ) ancestor = block - while ancestor.get_height() > ancestor_height: + while ancestor.static_metadata.height > ancestor_height: ancestor = ancestor.get_block_parent() return ancestor diff --git a/hathor/feature_activation/resources/feature.py b/hathor/feature_activation/resources/feature.py index f24579ddc..f39fb1a37 100644 --- a/hathor/feature_activation/resources/feature.py +++ b/hathor/feature_activation/resources/feature.py @@ -89,7 +89,7 @@ def get_block_features(self, request: Request) -> bytes: def get_features(self) -> bytes: best_block = self.tx_storage.get_best_block() - bit_counts = best_block.get_feature_activation_bit_counts() + bit_counts = best_block.static_metadata.feature_activation_bit_counts features = [] for feature, criteria in self._feature_settings.features.items(): diff --git a/hathor/manager.py b/hathor/manager.py index 28eb7930d..4f09d126c 100644 --- a/hathor/manager.py +++ b/hathor/manager.py @@ -449,7 +449,7 @@ def _initialize_components_full_verification(self) -> None: dt = LogDuration(t2 - t1) dcnt = cnt - cnt2 tx_rate = '?' if dt == 0 else dcnt / dt - h = max(h, tx_meta.height or 0) + h = max(h, (tx.static_metadata.height if isinstance(tx, Block) else 0)) if dt > 30: ts_date = datetime.datetime.fromtimestamp(self.tx_storage.latest_timestamp) if h == 0: @@ -469,12 +469,10 @@ def _initialize_components_full_verification(self) -> None: try: # TODO: deal with invalid tx - tx.calculate_height() tx._update_parents_children_metadata() if tx.can_validate_full(): tx.update_initial_metadata() - tx.calculate_min_height() if tx.is_genesis: assert tx.validate_checkpoint(self.checkpoints) assert self.verification_service.validate_full( @@ -661,31 +659,32 @@ def _verify_checkpoints(self) -> None: for checkpoint in expected_checkpoints: # XXX: query the database from checkpoint.hash and verify what comes out try: - tx = self.tx_storage.get_transaction(checkpoint.hash) + block = self.tx_storage.get_block(checkpoint.hash) except TransactionDoesNotExist as e: raise InitializationError(f'Expected checkpoint does not exist in database: {checkpoint}') from e - tx_meta = tx.get_metadata() - if tx_meta.height != checkpoint.height: + meta = block.get_metadata() + height = block.static_metadata.height + if height != checkpoint.height: raise InitializationError( - f'Expected checkpoint of hash {tx.hash_hex} to have height {checkpoint.height}, but instead it has' - f'height {tx_meta.height}' + f'Expected checkpoint of hash {block.hash_hex} to have height {checkpoint.height},' + f'but instead it has height {height}' ) - if tx_meta.voided_by: - pretty_voided_by = list(i.hex() for i in tx_meta.voided_by) + if meta.voided_by: + pretty_voided_by = list(i.hex() for i in meta.voided_by) raise InitializationError( f'Expected checkpoint {checkpoint} to *NOT* be voided, but it is being voided by: ' f'{pretty_voided_by}' ) # XXX: query the height index from checkpoint.height and check that the hash matches - tx_hash = self.tx_storage.indexes.height.get(checkpoint.height) - if tx_hash is None: + block_hash = self.tx_storage.indexes.height.get(checkpoint.height) + if block_hash is None: raise InitializationError( f'Expected checkpoint {checkpoint} to be found in the height index, but it was not found' ) - if tx_hash != tx.hash: + if block_hash != block.hash: raise InitializationError( f'Expected checkpoint {checkpoint} to be found in the height index, but it instead the block with ' - f'hash {tx_hash.hex()} was found' + f'hash {block_hash.hex()} was found' ) def get_new_tx_parents(self, timestamp: Optional[float] = None) -> list[VertexId]: diff --git a/hathor/mining/block_template.py b/hathor/mining/block_template.py index c1ef07f60..7884ba8e4 100644 --- a/hathor/mining/block_template.py +++ b/hathor/mining/block_template.py @@ -80,7 +80,7 @@ def generate_mining_block( block = cls(outputs=tx_outputs, parents=parents, timestamp=block_timestamp, data=data or b'', storage=storage, weight=self.weight, signal_bits=self.signal_bits) if include_metadata: - block._metadata = TransactionMetadata(height=self.height, score=self.score) + block._metadata = TransactionMetadata(score=self.score) block.get_metadata(use_storage=False) return block diff --git a/hathor/p2p/resources/mining_info.py b/hathor/p2p/resources/mining_info.py index 180e3876e..f08549ba9 100644 --- a/hathor/p2p/resources/mining_info.py +++ b/hathor/p2p/resources/mining_info.py @@ -49,8 +49,9 @@ def render_GET(self, request): self._settings.P2PKH_VERSION_BYTE.hex() + 'acbfb94571417423c1ed66f706730c4aea516ac5762cccb8' ) block = self.manager.generate_mining_block(address=burn_address) + block.init_static_metadata_from_storage(self._settings, self.manager.tx_storage) - height = block.calculate_height() - 1 + height = block.static_metadata.height - 1 difficulty = max(int(Weight(block.weight).to_pdiff()), 1) parent = block.get_block_parent() diff --git a/hathor/p2p/resources/status.py b/hathor/p2p/resources/status.py index bd099d3e1..6556e0d2b 100644 --- a/hathor/p2p/resources/status.py +++ b/hathor/p2p/resources/status.py @@ -92,9 +92,8 @@ def render_GET(self, request): best_block_tips = [] for tip in self.manager.tx_storage.get_best_block_tips(): - tx = self.manager.tx_storage.get_transaction(tip) - meta = tx.get_metadata() - best_block_tips.append({'hash': tx.hash_hex, 'height': meta.height}) + block = self.manager.tx_storage.get_block(tip) + best_block_tips.append({'hash': block.hash_hex, 'height': block.static_metadata.height}) best_block = self.manager.tx_storage.get_best_block() raw_best_blockchain = self.manager.tx_storage.get_n_height_tips(self._settings.DEFAULT_BEST_BLOCKCHAIN_BLOCKS) @@ -122,7 +121,7 @@ def render_GET(self, request): 'best_block_tips': best_block_tips, 'best_block': { 'hash': best_block.hash_hex, - 'height': best_block.get_metadata().height, + 'height': best_block.static_metadata.height, }, 'best_blockchain': best_blockchain, } diff --git a/hathor/p2p/sync_v2/agent.py b/hathor/p2p/sync_v2/agent.py index 2a71a4e4b..c3de1f3e2 100644 --- a/hathor/p2p/sync_v2/agent.py +++ b/hathor/p2p/sync_v2/agent.py @@ -42,7 +42,7 @@ from hathor.transaction.storage.exceptions import TransactionDoesNotExist from hathor.transaction.vertex_parser import VertexParser from hathor.types import VertexId -from hathor.util import collect_n, not_none +from hathor.util import collect_n if TYPE_CHECKING: from hathor.p2p.protocol import HathorProtocol @@ -846,7 +846,7 @@ def handle_get_best_block(self, _payload: str) -> None: assert meta.validation.is_fully_connected() payload = BestBlockPayload( block=best_block.hash, - height=not_none(meta.height), + height=best_block.static_metadata.height, ) self.send_message(ProtocolMessages.BEST_BLOCK, payload.json()) diff --git a/hathor/p2p/sync_v2/streamers.py b/hathor/p2p/sync_v2/streamers.py index df11131ba..5b215102f 100644 --- a/hathor/p2p/sync_v2/streamers.py +++ b/hathor/p2p/sync_v2/streamers.py @@ -296,8 +296,8 @@ def send_next(self) -> None: # Check if tx is confirmed by the `self.current_block` or any next block. assert cur_metadata.first_block is not None assert self.current_block is not None - first_block = self.tx_storage.get_transaction(cur_metadata.first_block) - if not_none(first_block.get_metadata().height) < not_none(self.current_block.get_metadata().height): + first_block = self.tx_storage.get_block(cur_metadata.first_block) + if not_none(first_block.static_metadata.height) < not_none(self.current_block.static_metadata.height): self.log.debug('skipping tx: out of current block') self.bfs.skip_neighbors(cur) return diff --git a/hathor/stratum/stratum.py b/hathor/stratum/stratum.py index e3658d457..a03f05271 100644 --- a/hathor/stratum/stratum.py +++ b/hathor/stratum/stratum.py @@ -674,6 +674,7 @@ def create_job_tx(self, jobid: UUID) -> BaseTransaction: data = data[:self._settings.BLOCK_DATA_MAX_SIZE] block = self.manager.generate_mining_block(data=data, address=self.miner_address, merge_mined=self.merged_mining) + block.init_static_metadata_from_storage(self._settings, self.manager.tx_storage) self.log.debug('prepared block for mining', block=block) return block diff --git a/hathor/transaction/base_transaction.py b/hathor/transaction/base_transaction.py index d2bb14e50..db6fce85b 100644 --- a/hathor/transaction/base_transaction.py +++ b/hathor/transaction/base_transaction.py @@ -24,7 +24,7 @@ from itertools import chain from math import inf, isfinite, log from struct import error as StructError, pack -from typing import TYPE_CHECKING, Any, ClassVar, Generic, Iterator, Optional, TypeAlias, TypeVar, cast +from typing import TYPE_CHECKING, Any, ClassVar, Generic, Iterator, Optional, TypeAlias, TypeVar from structlog import get_logger @@ -281,14 +281,6 @@ def __bytes__(self) -> bytes: def __hash__(self) -> int: return hash(self.hash) - @abstractmethod - def calculate_height(self) -> int: - raise NotImplementedError - - @abstractmethod - def calculate_min_height(self) -> int: - raise NotImplementedError - @property def hash(self) -> VertexId: assert self._hash is not None, 'Vertex hash must be initialized.' @@ -633,20 +625,12 @@ def get_metadata(self, *, force_reload: bool = False, use_storage: bool = True) metadata = self.storage.get_metadata(self.hash) self._metadata = metadata if not metadata: - # FIXME: there is code that set use_storage=False but relies on correct height being calculated - # which requires the use of a storage, this is a workaround that should be fixed, places where this - # happens include generating new mining blocks and some tests - height = self.calculate_height() if self.storage else None score = self.weight if self.is_genesis else 0 - min_height = 0 if self.is_genesis else None - metadata = TransactionMetadata( settings=self._settings, hash=self._hash, accumulated_weight=self.weight, - height=height, score=score, - min_height=min_height ) self._metadata = metadata if not metadata.hash: @@ -672,8 +656,6 @@ def reset_metadata(self) -> None: self._metadata.voided_by = {self._settings.PARTIALLY_VALIDATED_ID} self._metadata._tx_ref = weakref.ref(self) - self._update_height_metadata() - self.storage.save_transaction(self, only_metadata=True) def update_accumulated_weight(self, *, stop_value: float = inf, save_file: bool = True) -> TransactionMetadata: @@ -727,28 +709,12 @@ def update_initial_metadata(self, *, save: bool = True) -> None: It is called when a new transaction/block is received by HathorManager. """ - self._update_height_metadata() self._update_parents_children_metadata() - self.update_reward_lock_metadata() - self._update_feature_activation_bit_counts() self._update_initial_accumulated_weight() if save: assert self.storage is not None self.storage.save_transaction(self, only_metadata=True) - def _update_height_metadata(self) -> None: - """Update the vertice height metadata.""" - meta = self.get_metadata() - meta.height = self.calculate_height() - - def update_reward_lock_metadata(self) -> None: - """Update the txs/block min_height metadata.""" - metadata = self.get_metadata() - min_height = self.calculate_min_height() - if metadata.min_height is not None: - assert metadata.min_height == min_height - metadata.min_height = min_height - def _update_parents_children_metadata(self) -> None: """Update the txs/block parent's children metadata.""" assert self._hash is not None @@ -760,15 +726,6 @@ def _update_parents_children_metadata(self) -> None: metadata.children.append(self.hash) self.storage.save_transaction(parent, only_metadata=True) - def _update_feature_activation_bit_counts(self) -> None: - """Update the block's feature_activation_bit_counts.""" - if not self.is_block: - return - from hathor.transaction import Block - assert isinstance(self, Block) - # This method lazily calculates and stores the value in metadata - cast(Block, self).get_feature_activation_bit_counts() - def _update_initial_accumulated_weight(self) -> None: """Update the vertex initial accumulated_weight.""" metadata = self.get_metadata() @@ -911,22 +868,26 @@ def static_metadata(self) -> StaticMetadataT: return self._static_metadata @abstractmethod - def init_static_metadata_from_storage(self, storage: 'TransactionStorage') -> None: + def init_static_metadata_from_storage(self, settings: HathorSettings, storage: 'TransactionStorage') -> None: """Initialize this vertex's static metadata using dependencies from a storage. This can be called multiple - times, provided the dependencies don't change.""" + times, provided the dependencies don't change. Also, this must be fast, ideally O(1).""" raise NotImplementedError def set_static_metadata(self, static_metadata: StaticMetadataT | None) -> None: """Set this vertex's static metadata. After it's set, it can only be set again to the same value.""" - assert not self._static_metadata or self._static_metadata == static_metadata, ( - 'trying to set static metadata with different values' - ) + if self._static_metadata is not None: + assert self._static_metadata == static_metadata, 'trying to set static metadata with different values' + self.log.warn( + 'redundant call on set_static_metadata', vertex_id=self.hash_hex, static_metadata=static_metadata + ) + self._static_metadata = static_metadata """ Type aliases for easily working with `GenericVertex`. A `Vertex` is a superclass that includes all specific -vertex subclasses, and a `BaseTransaction` is simply an alias to `Vertex` for backwards compatibility. +vertex subclasses, and a `BaseTransaction` is simply an alias to `Vertex` for backwards compatibility (it can be +removed in the future). """ Vertex: TypeAlias = GenericVertex[VertexStaticMetadata] BaseTransaction: TypeAlias = Vertex diff --git a/hathor/transaction/block.py b/hathor/transaction/block.py index 617458a8a..9bdeb6ac9 100644 --- a/hathor/transaction/block.py +++ b/hathor/transaction/block.py @@ -15,8 +15,6 @@ from __future__ import annotations import base64 -from itertools import starmap, zip_longest -from operator import add from struct import pack from typing import TYPE_CHECKING, Any, Iterator, Optional @@ -30,7 +28,6 @@ from hathor.transaction.exceptions import CheckpointError from hathor.transaction.static_metadata import BlockStaticMetadata from hathor.transaction.util import VerboseCallback, int_to_bytes, unpack, unpack_len -from hathor.util import not_none from hathor.utils.int import get_bit_list if TYPE_CHECKING: @@ -106,64 +103,6 @@ def create_from_struct(cls, struct_bytes: bytes, storage: Optional['TransactionS return blc - def calculate_height(self) -> int: - """Return the height of the block, i.e., the number of blocks since genesis""" - if self.is_genesis: - return 0 - assert self.storage is not None - parent_block = self.get_block_parent() - return parent_block.get_height() + 1 - - def calculate_min_height(self) -> int: - """The minimum height the next block needs to have, basically the maximum min-height of this block's parents. - """ - assert self.storage is not None - # maximum min-height of any parent tx - min_height = 0 - for tx_hash in self.get_tx_parents(): - tx = self.storage.get_transaction(tx_hash) - tx_min_height = tx.get_metadata().min_height - min_height = max(min_height, not_none(tx_min_height)) - - return min_height - - def get_feature_activation_bit_counts(self) -> list[int]: - """ - Lazily calculates the feature_activation_bit_counts metadata attribute, which is a list of feature activation - bit counts. After it's calculated for the first time, it's persisted in block metadata and must not be changed. - - Each list index corresponds to a bit position, and its respective value is the rolling count of active bits - from the previous boundary block up to this block, including it. LSB is on the left. - """ - metadata = self.get_metadata() - - if metadata.feature_activation_bit_counts is not None: - return metadata.feature_activation_bit_counts - - previous_counts = self._get_previous_feature_activation_bit_counts() - bit_list = self._get_feature_activation_bit_list() - - count_and_bit_pairs = zip_longest(previous_counts, bit_list, fillvalue=0) - updated_counts = starmap(add, count_and_bit_pairs) - metadata.feature_activation_bit_counts = list(updated_counts) - - return metadata.feature_activation_bit_counts - - def _get_previous_feature_activation_bit_counts(self) -> list[int]: - """ - Returns the feature_activation_bit_counts metadata attribute from the parent block, - or no previous counts if this is a boundary block. - """ - evaluation_interval = self._settings.FEATURE_ACTIVATION.evaluation_interval - is_boundary_block = self.calculate_height() % evaluation_interval == 0 - - if is_boundary_block: - return [] - - parent_block = self.get_block_parent() - - return parent_block.get_feature_activation_bit_counts() - def get_next_block_best_chain_hash(self) -> Optional[bytes]: """Return the hash of the next block in the best blockchain. The blockchain is written from left-to-righ (->), meaning the next block has a greater height. @@ -320,7 +259,7 @@ def to_json(self, decode_script: bool = False, include_metadata: bool = False) - def to_json_extended(self) -> dict[str, Any]: json = super().to_json_extended() - json['height'] = self.get_metadata().height + json['height'] = self.static_metadata.height return json @@ -356,10 +295,8 @@ def get_base_hash(self) -> bytes: return sha256d_hash(self.get_header_without_nonce()) def get_height(self) -> int: - """Returns the block's height.""" - meta = self.get_metadata() - assert meta.height is not None - return meta.height + """Return this block's height.""" + return self.static_metadata.height def _get_feature_activation_bit_list(self) -> list[int]: """ @@ -431,5 +368,6 @@ def iter_transactions_in_this_block(self) -> Iterator[BaseTransaction]: yield tx @override - def init_static_metadata_from_storage(self, storage: 'TransactionStorage') -> None: - raise NotImplementedError('this will be implemented') + def init_static_metadata_from_storage(self, settings: HathorSettings, storage: 'TransactionStorage') -> None: + static_metadata = BlockStaticMetadata.create_from_storage(self, settings, storage) + self.set_static_metadata(static_metadata) diff --git a/hathor/transaction/resources/decode_tx.py b/hathor/transaction/resources/decode_tx.py index 39f32eec6..bdd409a92 100644 --- a/hathor/transaction/resources/decode_tx.py +++ b/hathor/transaction/resources/decode_tx.py @@ -16,6 +16,7 @@ from hathor.api_util import Resource, get_args, get_missing_params_msg, parse_args, set_cors from hathor.cli.openapi_files.register import register_resource +from hathor.conf.get_settings import get_global_settings from hathor.transaction.resources.transaction import get_tx_extra_data from hathor.util import json_dumpb @@ -33,6 +34,7 @@ class DecodeTxResource(Resource): def __init__(self, manager): # Important to have the manager so we can know the tx_storage self.manager = manager + self._settings = get_global_settings() def render_GET(self, request): """ Get request /decode_tx/ that returns the tx decoded, if success @@ -51,6 +53,7 @@ def render_GET(self, request): try: tx_bytes = bytes.fromhex(parsed['args']['hex_tx']) tx = self.manager.vertex_parser.deserialize(tx_bytes) + tx.init_static_metadata_from_storage(self._settings, self.manager.tx_storage) tx.storage = self.manager.tx_storage data = get_tx_extra_data(tx) except ValueError: diff --git a/hathor/transaction/resources/mining.py b/hathor/transaction/resources/mining.py index cda18f3ff..1cd20bfdc 100644 --- a/hathor/transaction/resources/mining.py +++ b/hathor/transaction/resources/mining.py @@ -18,8 +18,10 @@ from hathor.api_util import Resource, get_args, set_cors from hathor.cli.openapi_files.register import register_resource +from hathor.conf.settings import HathorSettings from hathor.crypto.util import decode_address from hathor.exception import HathorError +from hathor.manager import HathorManager from hathor.util import api_catch_exceptions, json_dumpb, json_loadb logger = get_logger() @@ -46,9 +48,10 @@ class GetBlockTemplateResource(Resource): """ isLeaf = True - def __init__(self, manager): + def __init__(self, manager: HathorManager, settings: HathorSettings) -> None: # Important to have the manager so we can know the tx_storage self.manager = manager + self._settings = settings self.log = logger.new() @api_catch_exceptions @@ -76,6 +79,7 @@ def render_GET(self, request): # get block # XXX: miner can edit block data and output_script, so it's fine if address is None block = self.manager.generate_mining_block(address=address, merge_mined=merged_mining) + block.init_static_metadata_from_storage(self._settings, self.manager.tx_storage) # serialize data = block.to_json(include_metadata=True) diff --git a/hathor/transaction/resources/transaction.py b/hathor/transaction/resources/transaction.py index 2a530b74d..0adc27639 100644 --- a/hathor/transaction/resources/transaction.py +++ b/hathor/transaction/resources/transaction.py @@ -28,6 +28,7 @@ ) from hathor.cli.openapi_files.register import register_resource from hathor.conf.get_settings import get_global_settings +from hathor.transaction import Block from hathor.transaction.base_transaction import BaseTransaction, TxVersion from hathor.transaction.token_creation_tx import TokenCreationTransaction from hathor.util import json_dumpb @@ -70,9 +71,9 @@ def get_tx_extra_data( # To get the updated accumulated weight just need to call the # TransactionAccumulatedWeightResource (/transaction_acc_weight) - if tx.is_block: + if isinstance(tx, Block): # For blocks we need to add the height - serialized['height'] = meta.height + serialized['height'] = tx.static_metadata.height # In the metadata we have the spent_outputs, that are the txs that spent the outputs for each index # However we need to send also which one of them is not voided diff --git a/hathor/transaction/static_metadata.py b/hathor/transaction/static_metadata.py index 43114c0fb..22473e5af 100644 --- a/hathor/transaction/static_metadata.py +++ b/hathor/transaction/static_metadata.py @@ -12,15 +12,26 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + import dataclasses from abc import ABC from dataclasses import dataclass -from typing import TYPE_CHECKING +from itertools import chain, starmap, zip_longest +from operator import add +from typing import TYPE_CHECKING, Callable + +from typing_extensions import Self +from hathor.feature_activation.feature import Feature +from hathor.feature_activation.model.feature_state import FeatureState +from hathor.types import VertexId from hathor.util import json_dumpb, json_loadb if TYPE_CHECKING: - from hathor.transaction import BaseTransaction + from hathor.conf.settings import HathorSettings + from hathor.transaction import BaseTransaction, Block, Transaction + from hathor.transaction.storage import TransactionStorage @dataclass(slots=True, frozen=True, kw_only=True) @@ -32,6 +43,10 @@ class VertexStaticMetadata(ABC): This class is an abstract base class for all static metadata types that includes attributes common to all vertex types. """ + + # XXX: this is only used to defer the reward-lock verification from the transaction spending a reward to the first + # block that confirming this transaction, it is important to always have this set to be able to distinguish an old + # metadata (that does not have this calculated, from a tx with a new format that does have this calculated) min_height: int def to_bytes(self) -> bytes: @@ -56,9 +71,185 @@ def from_bytes(cls, data: bytes, *, target: 'BaseTransaction') -> 'VertexStaticM @dataclass(slots=True, frozen=True, kw_only=True) class BlockStaticMetadata(VertexStaticMetadata): height: int + + # A list of feature activation bit counts. + # Each list index corresponds to a bit position, and its respective value is the rolling count of active bits from + # the previous boundary block up to this block, including it. LSB is on the left. feature_activation_bit_counts: list[int] + # A dict of features in the feature activation process and their respective state. + feature_states: dict[Feature, FeatureState] + + @classmethod + def create_from_storage(cls, block: 'Block', settings: HathorSettings, storage: 'TransactionStorage') -> Self: + """Create a `BlockStaticMetadata` using dependencies provided by a storage.""" + return cls.create(block, settings, storage.get_vertex) + + @classmethod + def create( + cls, + block: 'Block', + settings: HathorSettings, + vertex_getter: Callable[[VertexId], 'BaseTransaction'] + ) -> Self: + """Create a `BlockStaticMetadata` using dependencies provided by a `vertex_getter`. + This must be fast, ideally O(1).""" + height = cls._calculate_height(block, vertex_getter) + min_height = cls._calculate_min_height(block, vertex_getter) + feature_activation_bit_counts = cls._calculate_feature_activation_bit_counts( + block, + height, + settings, + vertex_getter, + ) + + return cls( + height=height, + min_height=min_height, + feature_activation_bit_counts=feature_activation_bit_counts, + feature_states={}, # This will be populated in the next PR + ) + + @staticmethod + def _calculate_height(block: 'Block', vertex_getter: Callable[[VertexId], 'BaseTransaction']) -> int: + """Return the height of the block, i.e., the number of blocks since genesis""" + if block.is_genesis: + return 0 + + from hathor.transaction import Block + parent_hash = block.get_block_parent_hash() + parent_block = vertex_getter(parent_hash) + assert isinstance(parent_block, Block) + return parent_block.static_metadata.height + 1 + + @staticmethod + def _calculate_min_height(block: 'Block', vertex_getter: Callable[[VertexId], 'BaseTransaction']) -> int: + """The minimum height the next block needs to have, basically the maximum min-height of this block's parents. + """ + # maximum min-height of any parent tx + min_height = 0 + for tx_hash in block.get_tx_parents(): + tx = vertex_getter(tx_hash) + min_height = max(min_height, tx.static_metadata.min_height) + + return min_height + + @classmethod + def _calculate_feature_activation_bit_counts( + cls, + block: 'Block', + height: int, + settings: HathorSettings, + vertex_getter: Callable[[VertexId], 'BaseTransaction'], + ) -> list[int]: + """ + Lazily calculates the feature_activation_bit_counts metadata attribute, which is a list of feature activation + bit counts. After it's calculated for the first time, it's persisted in block metadata and must not be changed. + + Each list index corresponds to a bit position, and its respective value is the rolling count of active bits + from the previous boundary block up to this block, including it. LSB is on the left. + """ + previous_counts = cls._get_previous_feature_activation_bit_counts(block, height, settings, vertex_getter) + bit_list = block._get_feature_activation_bit_list() + + count_and_bit_pairs = zip_longest(previous_counts, bit_list, fillvalue=0) + updated_counts = starmap(add, count_and_bit_pairs) + return list(updated_counts) + + @staticmethod + def _get_previous_feature_activation_bit_counts( + block: 'Block', + height: int, + settings: HathorSettings, + vertex_getter: Callable[[VertexId], 'BaseTransaction'], + ) -> list[int]: + """ + Returns the feature_activation_bit_counts metadata attribute from the parent block, + or no previous counts if this is a boundary block. + """ + evaluation_interval = settings.FEATURE_ACTIVATION.evaluation_interval + is_boundary_block = height % evaluation_interval == 0 + + if is_boundary_block: + return [] + + from hathor.transaction import Block + parent_hash = block.get_block_parent_hash() + parent_block = vertex_getter(parent_hash) + assert isinstance(parent_block, Block) + + return parent_block.static_metadata.feature_activation_bit_counts + @dataclass(slots=True, frozen=True, kw_only=True) class TransactionStaticMetadata(VertexStaticMetadata): - pass + @classmethod + def create_from_storage(cls, tx: 'Transaction', settings: HathorSettings, storage: 'TransactionStorage') -> Self: + """Create a `TransactionStaticMetadata` using dependencies provided by a storage.""" + return cls.create(tx, settings, storage.get_vertex) + + @classmethod + def create( + cls, + tx: 'Transaction', + settings: HathorSettings, + vertex_getter: Callable[[VertexId], 'BaseTransaction'], + ) -> Self: + """Create a `TransactionStaticMetadata` using dependencies provided by a `vertex_getter`. + This must be fast, ideally O(1).""" + min_height = cls._calculate_min_height( + tx, + settings, + vertex_getter=vertex_getter, + ) + + return cls( + min_height=min_height + ) + + @classmethod + def _calculate_min_height( + cls, + tx: 'Transaction', + settings: HathorSettings, + vertex_getter: Callable[[VertexId], 'BaseTransaction'], + ) -> int: + """Calculates the min height the first block confirming this tx needs to have for reward lock verification.""" + if tx.is_genesis: + return 0 + + return max( + # 1) don't drop the min height of any parent tx or input tx + cls._calculate_inherited_min_height(tx, vertex_getter), + # 2) include the min height for any reward being spent + cls._calculate_my_min_height(tx, settings, vertex_getter), + ) + + @staticmethod + def _calculate_inherited_min_height( + tx: 'Transaction', + vertex_getter: Callable[[VertexId], 'BaseTransaction'] + ) -> int: + """ Calculates min height inherited from any input or parent""" + min_height = 0 + iter_parents = tx.get_tx_parents() + iter_inputs = (tx_input.tx_id for tx_input in tx.inputs) + for vertex_id in chain(iter_parents, iter_inputs): + vertex = vertex_getter(vertex_id) + min_height = max(min_height, vertex.static_metadata.min_height) + return min_height + + @staticmethod + def _calculate_my_min_height( + tx: 'Transaction', + settings: HathorSettings, + vertex_getter: Callable[[VertexId], 'BaseTransaction'], + ) -> int: + """ Calculates min height derived from own spent block rewards""" + from hathor.transaction import Block + min_height = 0 + for tx_input in tx.inputs: + spent_tx = vertex_getter(tx_input.tx_id) + if isinstance(spent_tx, Block): + min_height = max(min_height, spent_tx.static_metadata.height + settings.REWARD_SPEND_MIN_BLOCKS + 1) + return min_height diff --git a/hathor/transaction/storage/cache_storage.py b/hathor/transaction/storage/cache_storage.py index 8f9536358..1539309f9 100644 --- a/hathor/transaction/storage/cache_storage.py +++ b/hathor/transaction/storage/cache_storage.py @@ -15,6 +15,7 @@ from collections import OrderedDict from typing import Any, Iterator, Optional +from structlog.stdlib import BoundLogger from twisted.internet import threads from typing_extensions import override @@ -22,7 +23,6 @@ from hathor.indexes import IndexesManager from hathor.reactor import ReactorProtocol as Reactor from hathor.transaction import BaseTransaction -from hathor.transaction.static_metadata import VertexStaticMetadata from hathor.transaction.storage.migrations import MigrationState from hathor.transaction.storage.transaction_storage import BaseTransactionStorage from hathor.transaction.storage.tx_allow_scope import TxAllowScope @@ -170,10 +170,6 @@ def save_transaction(self, tx: 'BaseTransaction', *, only_metadata: bool = False def _save_static_metadata(self, tx: BaseTransaction) -> None: self.store._save_static_metadata(tx) - @override - def _get_static_metadata(self, vertex: BaseTransaction) -> VertexStaticMetadata | None: - return self.store._get_static_metadata(vertex) - def get_all_genesis(self) -> set[BaseTransaction]: return self.store.get_all_genesis() @@ -255,3 +251,7 @@ def get_value(self, key: str) -> Optional[str]: def flush(self): self._flush_to_storage(self.dirty_txs.copy()) + + @override + def migrate_static_metadata(self, log: BoundLogger) -> None: + return self.store.migrate_static_metadata(log) diff --git a/hathor/transaction/storage/memory_storage.py b/hathor/transaction/storage/memory_storage.py index 861e19e65..85a68b491 100644 --- a/hathor/transaction/storage/memory_storage.py +++ b/hathor/transaction/storage/memory_storage.py @@ -14,12 +14,12 @@ from typing import Any, Iterator, Optional, TypeVar +from structlog.stdlib import BoundLogger from typing_extensions import override from hathor.conf.settings import HathorSettings from hathor.indexes import IndexesManager from hathor.transaction import BaseTransaction -from hathor.transaction.static_metadata import VertexStaticMetadata from hathor.transaction.storage.exceptions import TransactionDoesNotExist from hathor.transaction.storage.migrations import MigrationState from hathor.transaction.storage.transaction_storage import BaseTransactionStorage @@ -43,7 +43,6 @@ def __init__( """ self.transactions: dict[bytes, BaseTransaction] = {} self.metadata: dict[bytes, TransactionMetadata] = {} - self._static_metadata: dict[bytes, VertexStaticMetadata] = {} # Store custom key/value attributes self.attributes: dict[str, Any] = {} self._clone_if_needed = _clone_if_needed @@ -75,7 +74,6 @@ def remove_transaction(self, tx: BaseTransaction) -> None: super().remove_transaction(tx) self.transactions.pop(tx.hash, None) self.metadata.pop(tx.hash, None) - self._static_metadata.pop(tx.hash, None) def save_transaction(self, tx: 'BaseTransaction', *, only_metadata: bool = False) -> None: super().save_transaction(tx, only_metadata=only_metadata) @@ -90,11 +88,8 @@ def _save_transaction(self, tx: BaseTransaction, *, only_metadata: bool = False) @override def _save_static_metadata(self, tx: BaseTransaction) -> None: - self._static_metadata[tx.hash] = tx.static_metadata - - @override - def _get_static_metadata(self, vertex: BaseTransaction) -> VertexStaticMetadata | None: - return self._static_metadata.get(vertex.hash) + # We do not need to explicitly save the static metadata as the tx object already holds it in memory + pass def transaction_exists(self, hash_bytes: bytes) -> bool: return hash_bytes in self.transactions @@ -105,6 +100,7 @@ def _get_transaction(self, hash_bytes: bytes) -> BaseTransaction: if hash_bytes in self.metadata: tx._metadata = self._clone(self.metadata[hash_bytes]) assert tx._metadata is not None + assert tx._static_metadata is not None return tx else: raise TransactionDoesNotExist(hash_bytes.hex()) @@ -130,3 +126,9 @@ def remove_value(self, key: str) -> None: def get_value(self, key: str) -> Optional[str]: return self.attributes.get(key) + + @override + def migrate_static_metadata(self, log: BoundLogger) -> None: + # This method is only ever used by the `migrate_static_metadata` migration, and therefore must not be + # implemented for the memory storage. + raise NotImplementedError diff --git a/hathor/transaction/storage/migrations/migrate_static_metadata.py b/hathor/transaction/storage/migrations/migrate_static_metadata.py new file mode 100644 index 000000000..2edb3adf4 --- /dev/null +++ b/hathor/transaction/storage/migrations/migrate_static_metadata.py @@ -0,0 +1,67 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import TYPE_CHECKING + +from structlog import get_logger + +from hathor.conf.get_settings import get_global_settings +from hathor.transaction import Block, Transaction +from hathor.transaction.static_metadata import BlockStaticMetadata, TransactionStaticMetadata +from hathor.transaction.storage.migrations import BaseMigration +from hathor.util import progress + +if TYPE_CHECKING: + from hathor.transaction.storage import TransactionStorage + +logger = get_logger() + + +class Migration(BaseMigration): + def skip_empty_db(self) -> bool: + return True + + def get_db_name(self) -> str: + return 'migrate_static_metadata' + + def run(self, storage: 'TransactionStorage') -> None: + """This migration takes attributes from existing vertex metadata and saves them as static metadata.""" + log = logger.new() + settings = get_global_settings() + + # First we migrate static metadata using the storage itself since it uses internal structures. + log.info('creating static metadata...') + storage.migrate_static_metadata(log) + + # Now that static metadata is set, we can use the topological iterator normally + log.info('removing old metadata and validating...') + topological_iter = storage.topological_iterator() + + for vertex in progress(topological_iter, log=log, total=None): + # We re-save the vertex's metadata so it's serialized with the new `to_bytes()` method, excluding fields + # that were migrated. + storage.save_transaction(vertex, only_metadata=True) + + # We re-create the static metadata from scratch and compare it with the value that was created by the + # migration above, as a sanity check. + if isinstance(vertex, Block): + assert vertex.static_metadata == BlockStaticMetadata.create_from_storage( + vertex, settings, storage + ) + elif isinstance(vertex, Transaction): + assert vertex.static_metadata == TransactionStaticMetadata.create_from_storage( + vertex, settings, storage + ) + else: + raise NotImplementedError diff --git a/hathor/transaction/storage/migrations/remove_first_nop_features.py b/hathor/transaction/storage/migrations/remove_first_nop_features.py index 555bcf741..c5ede9997 100644 --- a/hathor/transaction/storage/migrations/remove_first_nop_features.py +++ b/hathor/transaction/storage/migrations/remove_first_nop_features.py @@ -17,6 +17,7 @@ from structlog import get_logger from hathor.conf.get_settings import get_global_settings +from hathor.transaction import Block from hathor.transaction.storage.migrations import BaseMigration from hathor.util import progress @@ -48,11 +49,10 @@ def run(self, storage: 'TransactionStorage') -> None: topological_iterator = storage.topological_iterator() for vertex in progress(topological_iterator, log=log, total=None): - if vertex.is_block: + if isinstance(vertex, Block): meta = vertex.get_metadata() - assert meta.height is not None # This is the start_height of the **second** Phased Testing, so we clear anything before it. - if meta.height < 3_386_880: + if vertex.static_metadata.height < 3_386_880: meta.feature_states = None storage.save_transaction(vertex, only_metadata=True) diff --git a/hathor/transaction/storage/rocksdb_storage.py b/hathor/transaction/storage/rocksdb_storage.py index faa97b590..3e64379b3 100644 --- a/hathor/transaction/storage/rocksdb_storage.py +++ b/hathor/transaction/storage/rocksdb_storage.py @@ -15,22 +15,23 @@ from typing import TYPE_CHECKING, Iterator, Optional from structlog import get_logger +from structlog.stdlib import BoundLogger from typing_extensions import override from hathor.conf.settings import HathorSettings from hathor.indexes import IndexesManager from hathor.storage import RocksDBStorage -from hathor.transaction.static_metadata import VertexStaticMetadata +from hathor.transaction.static_metadata import BlockStaticMetadata, TransactionStaticMetadata, VertexStaticMetadata from hathor.transaction.storage.exceptions import TransactionDoesNotExist from hathor.transaction.storage.migrations import MigrationState from hathor.transaction.storage.transaction_storage import BaseTransactionStorage from hathor.transaction.vertex_parser import VertexParser -from hathor.util import json_dumpb, json_loadb +from hathor.util import json_loadb, progress if TYPE_CHECKING: import rocksdb - from hathor.transaction import BaseTransaction, TransactionMetadata + from hathor.transaction import BaseTransaction logger = get_logger() @@ -71,16 +72,13 @@ def _load_from_bytes(self, tx_data: bytes, meta_data: bytes) -> 'BaseTransaction from hathor.transaction.transaction_metadata import TransactionMetadata tx = self.vertex_parser.deserialize(tx_data) - tx._metadata = TransactionMetadata.create_from_json(json_loadb(meta_data)) + tx._metadata = TransactionMetadata.from_bytes(meta_data) tx.storage = self return tx def _tx_to_bytes(self, tx: 'BaseTransaction') -> bytes: return bytes(tx) - def _meta_to_bytes(self, meta: 'TransactionMetadata') -> bytes: - return json_dumpb(meta.to_json()) - def get_migration_state(self, migration_name: str) -> MigrationState: key = migration_name.encode('ascii') value = self._db.get((self._cf_migrations, key)) @@ -110,17 +108,22 @@ def _save_transaction(self, tx: 'BaseTransaction', *, only_metadata: bool = Fals if not only_metadata: tx_data = self._tx_to_bytes(tx) self._db.put((self._cf_tx, key), tx_data) - meta_data = self._meta_to_bytes(tx.get_metadata(use_storage=False)) + meta_data = tx.get_metadata(use_storage=False).to_bytes() self._db.put((self._cf_meta, key), meta_data) @override def _save_static_metadata(self, tx: 'BaseTransaction') -> None: self._db.put((self._cf_static_meta, tx.hash), tx.static_metadata.to_bytes()) - @override - def _get_static_metadata(self, vertex: 'BaseTransaction') -> VertexStaticMetadata | None: + def _load_static_metadata(self, vertex: 'BaseTransaction') -> None: + """Set vertex static metadata loaded from what's saved in this storage.""" + if vertex.is_genesis: + vertex.init_static_metadata_from_storage(self._settings, self) + return data = self._db.get((self._cf_static_meta, vertex.hash)) - return VertexStaticMetadata.from_bytes(data, target=vertex) if data else None + assert data is not None, f'static metadata not found for vertex {vertex.hash_hex}' + static_metadata = VertexStaticMetadata.from_bytes(data, target=vertex) + vertex.set_static_metadata(static_metadata) def transaction_exists(self, hash_bytes: bytes) -> bool: may_exist, _ = self._db.key_may_exist((self._cf_tx, hash_bytes)) @@ -139,6 +142,7 @@ def _get_transaction(self, hash_bytes: bytes) -> 'BaseTransaction': raise TransactionDoesNotExist(hash_bytes.hex()) assert tx._metadata is not None + assert tx._static_metadata is not None assert tx.hash == hash_bytes self._save_to_weakref(tx) @@ -152,6 +156,7 @@ def _get_transaction_from_db(self, hash_bytes: bytes) -> Optional['BaseTransacti return None assert meta_data is not None, 'expected metadata to exist when tx exists' tx = self._load_from_bytes(tx_data, meta_data) + self._load_static_metadata(tx) return tx def _get_tx(self, hash_bytes: bytes, tx_data: bytes) -> 'BaseTransaction': @@ -159,6 +164,7 @@ def _get_tx(self, hash_bytes: bytes, tx_data: bytes) -> 'BaseTransaction': if tx is None: meta_data = self._db.get((self._cf_meta, hash_bytes)) tx = self._load_from_bytes(tx_data, meta_data) + self._load_static_metadata(tx) assert tx.hash == hash_bytes self._save_to_weakref(tx) return tx @@ -227,3 +233,43 @@ def get_value(self, key: str) -> Optional[str]: return None else: return data.decode() + + @override + def migrate_static_metadata(self, log: BoundLogger) -> None: + metadata_iter = self._db.iteritems(self._cf_meta) + metadata_iter.seek_to_first() + + # We have to iterate over metadata instead of vertices because the storage doesn't allow us to get a vertex if + # its static metadata is not set. We also use raw dict metadata because `metadata.create_from_json()` doesn't + # include attributes that should be static, which are exactly the ones we need for this migration. + for (_, vertex_id), metadata_bytes in progress(metadata_iter, log=log, total=None): + raw_metadata = json_loadb(metadata_bytes) + height = raw_metadata['height'] + min_height = raw_metadata['min_height'] + bit_counts = raw_metadata.get('feature_activation_bit_counts') + + assert isinstance(height, int) + assert isinstance(min_height, int) + + static_metadata: VertexStaticMetadata + is_block = (vertex_id == self._settings.GENESIS_BLOCK_HASH or height != 0) + + if is_block: + assert isinstance(bit_counts, list) + for item in bit_counts: + assert isinstance(item, int) + + static_metadata = BlockStaticMetadata( + height=height, + min_height=min_height, + feature_activation_bit_counts=bit_counts, + feature_states={}, # This will be populated in the next PR + ) + else: + assert bit_counts is None or bit_counts == [] + static_metadata = TransactionStaticMetadata( + min_height=min_height + ) + + # Save it manually to the CF + self._db.put((self._cf_static_meta, vertex_id), static_metadata.to_bytes()) diff --git a/hathor/transaction/storage/transaction_storage.py b/hathor/transaction/storage/transaction_storage.py index 32326c02c..ec1492939 100644 --- a/hathor/transaction/storage/transaction_storage.py +++ b/hathor/transaction/storage/transaction_storage.py @@ -22,6 +22,7 @@ from intervaltree.interval import Interval from structlog import get_logger +from structlog.stdlib import BoundLogger from hathor.conf.settings import HathorSettings from hathor.execution_manager import ExecutionManager @@ -32,7 +33,6 @@ from hathor.transaction.base_transaction import BaseTransaction, TxOutput from hathor.transaction.block import Block from hathor.transaction.exceptions import RewardLocked -from hathor.transaction.static_metadata import VertexStaticMetadata from hathor.transaction.storage.exceptions import ( TransactionDoesNotExist, TransactionIsNotABlock, @@ -44,6 +44,7 @@ add_feature_activation_bit_counts_metadata, add_feature_activation_bit_counts_metadata2, add_min_height_metadata, + migrate_static_metadata, remove_first_nop_features, remove_second_nop_features, ) @@ -101,6 +102,7 @@ class TransactionStorage(ABC): remove_first_nop_features.Migration, add_feature_activation_bit_counts_metadata2.Migration, remove_second_nop_features.Migration, + migrate_static_metadata.Migration, ] _migrations: list[BaseMigration] @@ -332,6 +334,7 @@ def _save_or_verify_genesis(self) -> None: ] for tx in genesis_txs: + tx.init_static_metadata_from_storage(self._settings, self) try: tx2 = self.get_transaction(tx.hash) assert tx == tx2 @@ -425,6 +428,7 @@ def save_transaction(self: 'TransactionStorage', tx: BaseTransaction, *, only_me """ meta = tx.get_metadata() self.pre_save_validation(tx, meta) + self._save_static_metadata(tx) @abstractmethod def _save_static_metadata(self, vertex: BaseTransaction) -> None: @@ -444,7 +448,6 @@ def pre_save_validation(self, tx: BaseTransaction, tx_meta: TransactionMetadata) assert tx.hash == tx_meta.hash, f'{tx.hash.hex()} != {tx_meta.hash.hex()}' self._validate_partial_marker_consistency(tx_meta) self._validate_transaction_in_scope(tx) - self._validate_block_height_metadata(tx) def post_get_validation(self, tx: BaseTransaction) -> None: """ Must be run before every save, will raise AssertionError or TransactionNotInAllowedScopeError @@ -455,7 +458,6 @@ def post_get_validation(self, tx: BaseTransaction) -> None: tx_meta = tx.get_metadata() self._validate_partial_marker_consistency(tx_meta) self._validate_transaction_in_scope(tx) - self._validate_block_height_metadata(tx) def _validate_partial_marker_consistency(self, tx_meta: TransactionMetadata) -> None: voided_by = tx_meta.get_frozen_voided_by() @@ -470,11 +472,6 @@ def _validate_transaction_in_scope(self, tx: BaseTransaction) -> None: tx_meta = tx.get_metadata() raise TransactionNotInAllowedScopeError(tx.hash_hex, self.get_allow_scope().name, tx_meta.validation.name) - def _validate_block_height_metadata(self, tx: BaseTransaction) -> None: - if tx.is_block: - tx_meta = tx.get_metadata() - assert tx_meta.height is not None - @abstractmethod def remove_transaction(self, tx: BaseTransaction) -> None: """Remove the tx. @@ -570,11 +567,6 @@ def get_metadata(self, hash_bytes: bytes) -> Optional[TransactionMetadata]: except TransactionDoesNotExist: return None - @abstractmethod - def _get_static_metadata(self, vertex: BaseTransaction) -> VertexStaticMetadata | None: - """Get a vertex's static metadata from this storage.""" - raise NotImplementedError - def get_all_transactions(self) -> Iterator[BaseTransaction]: """Return all vertices (transactions and blocks) within the allowed scope. """ @@ -1134,6 +1126,13 @@ def get_block(self, block_id: VertexId) -> Block: assert isinstance(block, Block) return block + @abstractmethod + def migrate_static_metadata(self, log: BoundLogger) -> None: + """ + Migrate metadata attributes to static metadata. This is only used for the `migrate_static_metadata` migration. + """ + raise NotImplementedError + class BaseTransactionStorage(TransactionStorage): indexes: Optional[IndexesManager] diff --git a/hathor/transaction/transaction.py b/hathor/transaction/transaction.py index a826b6a6b..a51eaeffe 100644 --- a/hathor/transaction/transaction.py +++ b/hathor/transaction/transaction.py @@ -15,7 +15,6 @@ from __future__ import annotations import hashlib -from itertools import chain from struct import pack from typing import TYPE_CHECKING, Any, NamedTuple, Optional @@ -23,14 +22,12 @@ from hathor.checkpoint import Checkpoint from hathor.exception import InvalidNewTransaction -from hathor.reward_lock import iter_spent_rewards from hathor.transaction import TxInput, TxOutput, TxVersion from hathor.transaction.base_transaction import TX_HASH_SIZE, GenericVertex from hathor.transaction.exceptions import InvalidToken from hathor.transaction.static_metadata import TransactionStaticMetadata from hathor.transaction.util import VerboseCallback, unpack, unpack_len from hathor.types import TokenUid, VertexId -from hathor.util import not_none if TYPE_CHECKING: from hathor.conf.settings import HathorSettings @@ -122,41 +119,6 @@ def create_from_struct(cls, struct_bytes: bytes, storage: Optional['TransactionS return tx - def calculate_height(self) -> int: - # XXX: transactions don't have height, using 0 as a placeholder - return 0 - - def calculate_min_height(self) -> int: - """Calculates the min height the first block confirming this tx needs to have for reward lock verification. - - Assumes tx has been fully verified (parents and inputs exist and have complete metadata). - """ - if self.is_genesis: - return 0 - return max( - # 1) don't drop the min height of any parent tx or input tx - self._calculate_inherited_min_height(), - # 2) include the min height for any reward being spent - self._calculate_my_min_height(), - ) - - def _calculate_inherited_min_height(self) -> int: - """ Calculates min height inherited from any input or parent""" - assert self.storage is not None - min_height = 0 - iter_parents = map(self.storage.get_transaction, self.get_tx_parents()) - iter_inputs = map(self.get_spent_tx, self.inputs) - for tx in chain(iter_parents, iter_inputs): - min_height = max(min_height, not_none(tx.get_metadata().min_height)) - return min_height - - def _calculate_my_min_height(self) -> int: - """ Calculates min height derived from own spent block rewards""" - min_height = 0 - for blk in iter_spent_rewards(self, not_none(self.storage)): - min_height = max(min_height, blk.get_height() + self._settings.REWARD_SPEND_MIN_BLOCKS + 1) - return min_height - def get_funds_fields_from_struct(self, buf: bytes, *, verbose: VerboseCallback = None) -> bytes: """ Gets all funds fields for a transaction from a buffer. @@ -394,5 +356,6 @@ def is_spending_voided_tx(self) -> bool: return False @override - def init_static_metadata_from_storage(self, storage: 'TransactionStorage') -> None: - raise NotImplementedError('this will be implemented') + def init_static_metadata_from_storage(self, settings: HathorSettings, storage: 'TransactionStorage') -> None: + static_metadata = TransactionStaticMetadata.create_from_storage(self, settings, storage) + self.set_static_metadata(static_metadata) diff --git a/hathor/transaction/transaction_metadata.py b/hathor/transaction/transaction_metadata.py index 999691638..db4279e96 100644 --- a/hathor/transaction/transaction_metadata.py +++ b/hathor/transaction/transaction_metadata.py @@ -21,7 +21,7 @@ from hathor.feature_activation.feature import Feature from hathor.feature_activation.model.feature_state import FeatureState from hathor.transaction.validation_state import ValidationState -from hathor.util import practically_equal +from hathor.util import json_dumpb, json_loadb, practically_equal if TYPE_CHECKING: from weakref import ReferenceType # noqa: F401 @@ -43,17 +43,7 @@ class TransactionMetadata: accumulated_weight: float score: float first_block: Optional[bytes] - height: Optional[int] validation: ValidationState - # XXX: this is only used to defer the reward-lock verification from the transaction spending a reward to the first - # block that confirming this transaction, it is important to always have this set to be able to distinguish an old - # metadata (that does not have this calculated, from a tx with a new format that does have this calculated) - min_height: Optional[int] - - # A list of feature activation bit counts. Must only be used by Blocks, is None otherwise. - # Each list index corresponds to a bit position, and its respective value is the rolling count of active bits from - # the previous boundary block up to this block, including it. LSB is on the left. - feature_activation_bit_counts: Optional[list[int]] # A dict of features in the feature activation process and their respective state. Must only be used by Blocks, # is None otherwise. This is only used for caching, so it can be safely cleared up, as it would be recalculated @@ -72,9 +62,6 @@ def __init__( hash: Optional[bytes] = None, accumulated_weight: float = 0, score: float = 0, - height: Optional[int] = None, - min_height: Optional[int] = None, - feature_activation_bit_counts: Optional[list[int]] = None, settings: HathorSettings | None = None, ) -> None: from hathor.transaction.genesis import is_genesis @@ -122,17 +109,9 @@ def __init__( # If two blocks verify the same parent block and have the same score, both are valid. self.first_block = None - # Height - self.height = height - - # Min height - self.min_height = min_height - # Validation self.validation = ValidationState.INITIAL - self.feature_activation_bit_counts = feature_activation_bit_counts - settings = settings or get_global_settings() # Genesis specific: @@ -196,7 +175,7 @@ def __eq__(self, other: Any) -> bool: return False for field in ['hash', 'conflict_with', 'voided_by', 'received_by', 'children', 'accumulated_weight', 'twins', 'score', 'first_block', 'validation', - 'min_height', 'feature_activation_bit_counts', 'feature_states']: + 'feature_states']: if (getattr(self, field) or None) != (getattr(other, field) or None): return False @@ -229,9 +208,19 @@ def to_json(self) -> dict[str, Any]: data['twins'] = [x.hex() for x in self.twins] data['accumulated_weight'] = self.accumulated_weight data['score'] = self.score - data['height'] = self.height - data['min_height'] = self.min_height - data['feature_activation_bit_counts'] = self.feature_activation_bit_counts + + vertex = self.get_tx() + data['min_height'] = vertex.static_metadata.min_height + + from hathor.transaction import Block + if isinstance(vertex, Block): + data['height'] = vertex.static_metadata.height + data['feature_activation_bit_counts'] = vertex.static_metadata.feature_activation_bit_counts + else: + # TODO: This is kept here backwards compatibility with transactions, + # but should be removed in the future. + data['height'] = 0 + data['feature_activation_bit_counts'] = [] if self.feature_states is not None: data['feature_states'] = {feature.value: state.value for feature, state in self.feature_states.items()} @@ -247,8 +236,8 @@ def to_json_extended(self, tx_storage: 'TransactionStorage') -> dict[str, Any]: data = self.to_json() first_block_height: Optional[int] if self.first_block is not None: - first_block = tx_storage.get_transaction(self.first_block) - first_block_height = first_block.get_metadata().height + first_block = tx_storage.get_block(self.first_block) + first_block_height = first_block.static_metadata.height else: first_block_height = None data['first_block_height'] = first_block_height @@ -281,9 +270,6 @@ def create_from_json(cls, data: dict[str, Any]) -> 'TransactionMetadata': meta.accumulated_weight = data['accumulated_weight'] meta.score = data.get('score', 0) - meta.height = data.get('height', 0) # XXX: should we calculate the height if it's not defined? - meta.min_height = data.get('min_height') - meta.feature_activation_bit_counts = data.get('feature_activation_bit_counts', []) feature_states_raw = data.get('feature_states') if feature_states_raw: @@ -301,6 +287,29 @@ def create_from_json(cls, data: dict[str, Any]) -> 'TransactionMetadata': return meta + @classmethod + def from_bytes(cls, data: bytes) -> 'TransactionMetadata': + """Deserialize a TransactionMetadata instance from bytes.""" + return cls.create_from_json(json_loadb(data)) + + def to_bytes(self) -> bytes: + """Serialize a TransactionMetadata instance to bytes. This should be used for storage.""" + json_dict = self.to_json() + + # The `to_json()` method includes these fields for backwards compatibility with APIs, but since they're not + # part of metadata, they should not be serialized. + if 'height' in json_dict: + del json_dict['height'] + if 'min_height' in json_dict: + del json_dict['min_height'] + if 'feature_activation_bit_counts' in json_dict: + del json_dict['feature_activation_bit_counts'] + # TODO: This one has not been migrated yet, but will be in the next PR + # if 'feature_states' in json_dict: + # del json_dict['feature_states'] + + return json_dumpb(json_dict) + def clone(self) -> 'TransactionMetadata': """Return exact copy without sharing memory. diff --git a/hathor/util.py b/hathor/util.py index cd1f0b090..13a14c20f 100644 --- a/hathor/util.py +++ b/hathor/util.py @@ -478,9 +478,9 @@ def _tx_progress(iter_tx: Iterator['BaseTransaction'], *, log: 'structlog.stdlib log.warn('iterator was slow to yield', took_sec=dt_next) # XXX: this is only informative and made to work with either partially/fully validated blocks/transactions - meta = tx.get_metadata() - if meta.height: - h = max(h, meta.height) + from hathor.transaction import Block + if isinstance(tx, Block): + h = max(h, tx.static_metadata.height) ts_tx = max(ts_tx, tx.timestamp) t_log = time.time() diff --git a/hathor/verification/block_verifier.py b/hathor/verification/block_verifier.py index 2935e24b4..1f84ea24b 100644 --- a/hathor/verification/block_verifier.py +++ b/hathor/verification/block_verifier.py @@ -43,11 +43,10 @@ def __init__( def verify_height(self, block: Block) -> None: """Validate that the block height is enough to confirm all transactions being confirmed.""" - meta = block.get_metadata() - assert meta.height is not None - assert meta.min_height is not None - if meta.height < meta.min_height: - raise RewardLocked(f'Block needs {meta.min_height} height but has {meta.height}') + height = block.static_metadata.height + min_height = block.static_metadata.min_height + if height < min_height: + raise RewardLocked(f'Block needs {min_height} height but has {height}') def verify_weight(self, block: Block) -> None: """Validate minimum block difficulty.""" diff --git a/hathor/verification/transaction_verifier.py b/hathor/verification/transaction_verifier.py index 05f5f3189..6efd28fc3 100644 --- a/hathor/verification/transaction_verifier.py +++ b/hathor/verification/transaction_verifier.py @@ -178,14 +178,13 @@ def verify_reward_locked_for_height( if info is not None: raise RewardLocked(f'Reward {info.block_hash.hex()} still needs {info.blocks_needed} to be unlocked.') - meta = tx.get_metadata() - assert meta.min_height is not None + min_height = tx.static_metadata.min_height # We use +1 here because a tx is valid if it can be confirmed by the next block - if best_height + 1 < meta.min_height: + if best_height + 1 < min_height: if assert_min_height_verification: raise AssertionError('a new tx should never be invalid by its inherited min_height.') raise RewardLocked( - f'Tx {tx.hash_hex} has min_height={meta.min_height}, but the best_height={best_height}.' + f'Tx {tx.hash_hex} has min_height={min_height}, but the best_height={best_height}.' ) def verify_number_of_inputs(self, tx: Transaction) -> None: diff --git a/hathor/verification/verification_service.py b/hathor/verification/verification_service.py index 09b3563f9..ca89c47e1 100644 --- a/hathor/verification/verification_service.py +++ b/hathor/verification/verification_service.py @@ -62,15 +62,19 @@ def validate_full( *, skip_block_weight_verification: bool = False, sync_checkpoints: bool = False, - reject_locked_reward: bool = True + reject_locked_reward: bool = True, + init_static_metadata: bool = True, ) -> bool: """ Run full validations (these need access to all dependencies) and update the validation state. If no exception is raised, the ValidationState will end up as `FULL` or `CHECKPOINT_FULL` and return `True`. """ + assert self._tx_storage is not None from hathor.transaction.transaction_metadata import ValidationState meta = vertex.get_metadata() + if init_static_metadata: + vertex.init_static_metadata_from_storage(self._settings, self._tx_storage) # skip full validation when it is a checkpoint if meta.validation.is_checkpoint(): diff --git a/hathor/vertex_handler/vertex_handler.py b/hathor/vertex_handler/vertex_handler.py index 903af0d31..809a7e2c9 100644 --- a/hathor/vertex_handler/vertex_handler.py +++ b/hathor/vertex_handler/vertex_handler.py @@ -153,8 +153,6 @@ def _validate_vertex( if not metadata.validation.is_fully_connected(): try: - # TODO: Remove this from here after a refactor in metadata initialization - vertex.update_reward_lock_metadata() self._verification_service.validate_full(vertex, reject_locked_reward=reject_locked_reward) except HathorError as e: if not fails_silently: @@ -191,7 +189,8 @@ def _post_consensus( assert self._verification_service.validate_full( vertex, skip_block_weight_verification=True, - reject_locked_reward=reject_locked_reward + reject_locked_reward=reject_locked_reward, + init_static_metadata=False, ) self._tx_storage.indexes.update(vertex) if self._tx_storage.indexes.mempool_tips: diff --git a/hathor/wallet/resources/send_tokens.py b/hathor/wallet/resources/send_tokens.py index 936faa2e9..5c6ddacc4 100644 --- a/hathor/wallet/resources/send_tokens.py +++ b/hathor/wallet/resources/send_tokens.py @@ -19,8 +19,10 @@ from hathor.api_util import Resource, render_options, set_cors from hathor.cli.openapi_files.register import register_resource +from hathor.conf.settings import HathorSettings from hathor.crypto.util import decode_address from hathor.exception import InvalidNewTransaction +from hathor.manager import HathorManager from hathor.transaction import Transaction from hathor.transaction.exceptions import TxValidationError from hathor.util import json_dumpb, json_loadb @@ -36,9 +38,10 @@ class SendTokensResource(Resource): """ isLeaf = True - def __init__(self, manager): + def __init__(self, manager: HathorManager, settings: HathorSettings) -> None: # Important to have the manager so we can know the tx_storage self.manager = manager + self._settings = settings def render_POST(self, request): """ POST request for /wallet/send_tokens/ @@ -118,6 +121,7 @@ def render_POST(self, request): return NOT_DONE_YET def _render_POST_thread(self, values: dict[str, Any], request: Request) -> Union[bytes, Transaction]: + assert self.manager.wallet is not None tx = self.manager.wallet.prepare_transaction(Transaction, values['inputs'], values['outputs'], values['timestamp']) tx.storage = values['storage'] @@ -127,7 +131,7 @@ def _render_POST_thread(self, values: dict[str, Any], request: Request) -> Union weight = self.manager.daa.minimum_tx_weight(tx) tx.weight = weight self.manager.cpu_mining_service.resolve(tx) - tx.update_reward_lock_metadata() + tx.init_static_metadata_from_storage(self._settings, self.manager.tx_storage) self.manager.verification_service.verify(tx) return tx diff --git a/hathor/wallet/resources/thin_wallet/send_tokens.py b/hathor/wallet/resources/thin_wallet/send_tokens.py index 83f1f4bf5..7dd141829 100644 --- a/hathor/wallet/resources/thin_wallet/send_tokens.py +++ b/hathor/wallet/resources/thin_wallet/send_tokens.py @@ -270,7 +270,7 @@ def _should_stop(): if context.should_stop_mining_thread: raise CancelledError() context.tx.update_hash() - context.tx.update_reward_lock_metadata() + context.tx.init_static_metadata_from_storage(self._settings, self.manager.tx_storage) self.manager.verification_service.verify(context.tx) return context diff --git a/tests/feature_activation/test_feature_service.py b/tests/feature_activation/test_feature_service.py index b81a6c812..7c2d513ac 100644 --- a/tests/feature_activation/test_feature_service.py +++ b/tests/feature_activation/test_feature_service.py @@ -82,6 +82,7 @@ def storage() -> TransactionStorage: block = Block(signal_bits=bits, parents=[parent.hash], storage=storage) block.update_hash() block.get_metadata().validation = ValidationState.FULL + block.init_static_metadata_from_storage(get_global_settings(), storage) storage.save_transaction(block) indexes.height.add_new(height, block.hash, block.timestamp) diff --git a/tests/feature_activation/test_feature_simulation.py b/tests/feature_activation/test_feature_simulation.py index 91b077711..2c5e9094d 100644 --- a/tests/feature_activation/test_feature_simulation.py +++ b/tests/feature_activation/test_feature_simulation.py @@ -228,7 +228,7 @@ def test_feature(self) -> None: non_signaling_block = manager.generate_mining_block() manager.cpu_mining_service.resolve(non_signaling_block) non_signaling_block.signal_bits = 0b10 - non_signaling_block.update_reward_lock_metadata() + non_signaling_block.init_static_metadata_from_storage(settings, manager.tx_storage) with pytest.raises(BlockMustSignalError): manager.verification_service.verify(non_signaling_block) diff --git a/tests/feature_activation/test_mining_simulation.py b/tests/feature_activation/test_mining_simulation.py index f65056ff1..cf4c7a626 100644 --- a/tests/feature_activation/test_mining_simulation.py +++ b/tests/feature_activation/test_mining_simulation.py @@ -70,7 +70,7 @@ def test_signal_bits_in_mining(self) -> None: miner.start() # There are 3 resources available for miners, and all of them should contain the correct signal_bits - get_block_template_resource = GetBlockTemplateResource(manager) + get_block_template_resource = GetBlockTemplateResource(manager, settings) get_block_template_client = StubSite(get_block_template_resource) mining_resource = MiningResource(manager) diff --git a/tests/p2p/test_sync.py b/tests/p2p/test_sync.py index 889715f7b..fc7712495 100644 --- a/tests/p2p/test_sync.py +++ b/tests/p2p/test_sync.py @@ -711,8 +711,8 @@ def test_block_sync_checkpoints(self) -> None: conn.run_one_step(debug=False) self.clock.advance(0.1) - self.assertEqual(self.manager1.tx_storage.get_best_block().get_metadata().height, TOTAL_BLOCKS) - self.assertEqual(manager2.tx_storage.get_best_block().get_metadata().height, TOTAL_BLOCKS) + self.assertEqual(self.manager1.tx_storage.get_best_block().static_metadata.height, TOTAL_BLOCKS) + self.assertEqual(manager2.tx_storage.get_best_block().static_metadata.height, TOTAL_BLOCKS) node_sync1 = conn.proto1.state.sync_agent node_sync2 = conn.proto2.state.sync_agent diff --git a/tests/poa/test_poa.py b/tests/poa/test_poa.py index 3ab188975..9534a7d9c 100644 --- a/tests/poa/test_poa.py +++ b/tests/poa/test_poa.py @@ -26,6 +26,7 @@ from hathor.transaction import Block, TxOutput from hathor.transaction.exceptions import PoaValidationError from hathor.transaction.poa import PoaBlock +from hathor.transaction.static_metadata import BlockStaticMetadata from hathor.verification.poa_block_verifier import PoaBlockVerifier @@ -107,8 +108,14 @@ def get_signer() -> tuple[PoaSigner, bytes]: weight=poa.BLOCK_WEIGHT_IN_TURN, parents=[b'parent1', b'parent2'], ) - block._metadata = Mock() - block._metadata.height = 2 + block.set_static_metadata( + BlockStaticMetadata( + min_height=0, + height=2, + feature_activation_bit_counts=[], + feature_states={}, + ) + ) # Test no rewards block.outputs = [TxOutput(123, b'')] @@ -190,7 +197,21 @@ def get_signer() -> tuple[PoaSigner, bytes]: assert str(e.value) == 'block weight is 1.0, expected 2.0' # When we increment the height, the turn inverts - block._metadata.height += 1 + block = PoaBlock( + storage=storage, + timestamp=153, + signal_bits=0b1010, + weight=poa.BLOCK_WEIGHT_IN_TURN, + parents=[b'parent1', b'parent2'], + ) + block.set_static_metadata( + BlockStaticMetadata( + min_height=0, + height=3, + feature_activation_bit_counts=[], + feature_states={}, + ) + ) # Test valid signature with two signers, in turn block.weight = poa.BLOCK_WEIGHT_IN_TURN diff --git a/tests/poa/test_poa_verification.py b/tests/poa/test_poa_verification.py index 88508c7fc..062a0c599 100644 --- a/tests/poa/test_poa_verification.py +++ b/tests/poa/test_poa_verification.py @@ -64,7 +64,7 @@ def _get_valid_poa_block(self) -> PoaBlock: ], ) self.signer.sign_block(block) - block.update_reward_lock_metadata() + block.init_static_metadata_from_storage(self._settings, self.manager.tx_storage) return block def test_poa_block_verify_basic(self) -> None: diff --git a/tests/resources/feature/test_feature.py b/tests/resources/feature/test_feature.py index bc6a9083e..b2caa9099 100644 --- a/tests/resources/feature/test_feature.py +++ b/tests/resources/feature/test_feature.py @@ -24,20 +24,25 @@ from hathor.feature_activation.resources.feature import FeatureResource from hathor.feature_activation.settings import Settings as FeatureSettings from hathor.transaction import Block +from hathor.transaction.static_metadata import BlockStaticMetadata from hathor.transaction.storage import TransactionStorage from tests.resources.base_resource import StubSite @pytest.fixture -def web(): - block_mock = Mock(wraps=Block(), spec_set=Block) - block_mock.get_feature_activation_bit_counts = Mock(return_value=[0, 1, 0, 0]) - block_mock.hash_hex = 'some_hash' - block_mock.get_height = Mock(return_value=123) +def web() -> StubSite: + block = Block(hash=b'some_hash') + static_metadata = BlockStaticMetadata( + height=123, + min_height=0, + feature_activation_bit_counts=[0, 1, 0, 0], + feature_states={}, + ) + block.set_static_metadata(static_metadata) tx_storage = Mock(spec_set=TransactionStorage) - tx_storage.get_best_block = Mock(return_value=block_mock) - tx_storage.get_transaction = Mock(return_value=block_mock) + tx_storage.get_best_block = Mock(return_value=block) + tx_storage.get_transaction = Mock(return_value=block) def get_state(*, block: Block, feature: Feature) -> FeatureState: return FeatureState.ACTIVE if feature is Feature.NOP_FEATURE_1 else FeatureState.STARTED @@ -81,11 +86,11 @@ def get_state(*, block: Block, feature: Feature) -> FeatureState: return StubSite(feature_resource) -def test_get_features(web): +def test_get_features(web: StubSite) -> None: response = web.get('feature') result = response.result.json_value() expected = dict( - block_hash='some_hash', + block_hash=b'some_hash'.hex(), block_height=123, features=[ dict( @@ -116,7 +121,7 @@ def test_get_features(web): assert result == expected -def test_get_block_features(web): +def test_get_block_features(web: StubSite) -> None: response = web.get('feature', args={b'block': b'1234'}) result = response.result.json_value() expected = dict( diff --git a/tests/resources/transaction/test_mining.py b/tests/resources/transaction/test_mining.py index caae54ee2..80ba0bbaa 100644 --- a/tests/resources/transaction/test_mining.py +++ b/tests/resources/transaction/test_mining.py @@ -11,7 +11,7 @@ class BaseMiningApiTest(_BaseResourceTest._ResourceTest): def setUp(self): super().setUp() - self.get_block_template = StubSite(mining.GetBlockTemplateResource(self.manager)) + self.get_block_template = StubSite(mining.GetBlockTemplateResource(self.manager, self.manager._settings)) self.submit_block = StubSite(mining.SubmitBlockResource(self.manager)) @inlineCallbacks @@ -38,9 +38,9 @@ def test_get_block_template_with_address(self): 'accumulated_weight': 1.0, 'score': 0, 'height': 1, - 'min_height': None, + 'min_height': 0, 'first_block': None, - 'feature_activation_bit_counts': None + 'feature_activation_bit_counts': [0, 0, 0, 0] }, 'tokens': [], 'data': '', @@ -71,9 +71,9 @@ def test_get_block_template_without_address(self): 'accumulated_weight': 1.0, 'score': 0, 'height': 1, - 'min_height': None, + 'min_height': 0, 'first_block': None, - 'feature_activation_bit_counts': None + 'feature_activation_bit_counts': [0, 0, 0, 0] }, 'tokens': [], 'data': '', diff --git a/tests/resources/transaction/test_pushtx.py b/tests/resources/transaction/test_pushtx.py index 7ed5b3e36..25d5e6976 100644 --- a/tests/resources/transaction/test_pushtx.py +++ b/tests/resources/transaction/test_pushtx.py @@ -25,7 +25,7 @@ class BasePushTxTest(_BaseResourceTest._ResourceTest): def setUp(self): super().setUp() self.web = StubSite(PushTxResource(self.manager)) - self.web_tokens = StubSite(SendTokensResource(self.manager)) + self.web_tokens = StubSite(SendTokensResource(self.manager, self._settings)) def get_tx(self, inputs: Optional[list[WalletInputInfo]] = None, outputs: Optional[list[WalletOutputInfo]] = None) -> Transaction: @@ -47,6 +47,7 @@ def get_tx(self, inputs: Optional[list[WalletInputInfo]] = None, tx.timestamp = max(max_ts_spent_tx + 1, int(self.manager.reactor.seconds())) tx.parents = self.manager.get_new_tx_parents(tx.timestamp) self.manager.cpu_mining_service.resolve(tx) + tx.init_static_metadata_from_storage(self._settings, self.manager.tx_storage) return tx def push_tx(self, data=None): diff --git a/tests/resources/transaction/test_tx.py b/tests/resources/transaction/test_tx.py index 4d4344c0e..01cac3f8c 100644 --- a/tests/resources/transaction/test_tx.py +++ b/tests/resources/transaction/test_tx.py @@ -3,6 +3,7 @@ from hathor.simulator.utils import add_new_blocks from hathor.transaction import Transaction from hathor.transaction.resources import TransactionResource +from hathor.transaction.static_metadata import TransactionStaticMetadata from hathor.transaction.token_creation_tx import TokenCreationTransaction from hathor.transaction.validation_state import ValidationState from tests import unittest @@ -31,7 +32,7 @@ def test_get_one(self): dict_test['raw'] = genesis_tx.get_struct().hex() dict_test['nonce'] = str(dict_test['nonce']) if genesis_tx.is_block: - dict_test['height'] = genesis_tx.calculate_height() + dict_test['height'] = genesis_tx.static_metadata.height self.assertEqual(data_success['tx'], dict_test) # Test sending hash that does not exist @@ -87,6 +88,7 @@ def test_get_one_known_tx(self): '0248b9e7d6a626f45dec86975b00f4dd53f84f1f0091125250b044e49023fbbd0f74f6093cdd2226fdff3e09a1000002be') tx = Transaction.create_from_struct(bytes.fromhex(tx_hex), self.manager.tx_storage) tx.get_metadata().validation = ValidationState.FULL + tx.set_static_metadata(TransactionStaticMetadata(min_height=0)) self.manager.tx_storage.save_transaction(tx) tx_parent1_hex = ('0001010102001c382847d8440d05da95420bee2ebeb32bc437f82a9ae47b0745c8a29a7b0d001c382847d844' @@ -99,6 +101,7 @@ def test_get_one_known_tx(self): '8fb080f53a0c9c57ddb000000120') tx_parent1 = Transaction.create_from_struct(bytes.fromhex(tx_parent1_hex), self.manager.tx_storage) tx_parent1.get_metadata().validation = ValidationState.FULL + tx_parent1.set_static_metadata(TransactionStaticMetadata(min_height=0)) self.manager.tx_storage.save_transaction(tx_parent1) tx_parent2_hex = ('0001000103001f16fe62e3433bcc74b262c11a1fa94fcb38484f4d8fb080f53a0c9c57ddb001006946304402' @@ -111,6 +114,7 @@ def test_get_one_known_tx(self): 'd57709926b76e64763bf19c3f13eeac30000016d') tx_parent2 = Transaction.create_from_struct(bytes.fromhex(tx_parent2_hex), self.manager.tx_storage) tx_parent2.get_metadata().validation = ValidationState.FULL + tx_parent2.set_static_metadata(TransactionStaticMetadata(min_height=0)) self.manager.tx_storage.save_transaction(tx_parent2) tx_input_hex = ('0001010203007231eee3cb6160d95172a409d634d0866eafc8775f5729fff6a61e7850aba500b3ab76c5337b55' @@ -126,6 +130,7 @@ def test_get_one_known_tx(self): 'cfaf6e7ceb2ba91c9c84009c8174d4a46ebcc789d1989e3dec5b68cffeef239fd8cf86ef62728e2eacee000001b6') tx_input = Transaction.create_from_struct(bytes.fromhex(tx_input_hex), self.manager.tx_storage) tx_input.get_metadata().validation = ValidationState.FULL + tx_input.set_static_metadata(TransactionStaticMetadata(min_height=0)) self.manager.tx_storage.save_transaction(tx_input) # XXX: this is completely dependant on MemoryTokensIndex implementation, hence use_memory_storage=True @@ -193,6 +198,7 @@ def test_get_one_known_tx_with_authority(self): '5114256caacfb8f6dd13db33000020393') tx = Transaction.create_from_struct(bytes.fromhex(tx_hex), self.manager.tx_storage) tx.get_metadata().validation = ValidationState.FULL + tx.set_static_metadata(TransactionStaticMetadata(min_height=0)) self.manager.tx_storage.save_transaction(tx) tx_parent1_hex = ('0001010203000023b318c91dcfd4b967b205dc938f9f5e2fd5114256caacfb8f6dd13db330000023b318c91dcfd' @@ -208,6 +214,7 @@ def test_get_one_known_tx_with_authority(self): 'd13db3300038c3d3b69ce90bb88c0c4d6a87b9f0c349e5b10c9b7ce6714f996e512ac16400021261') tx_parent1 = Transaction.create_from_struct(bytes.fromhex(tx_parent1_hex), self.manager.tx_storage) tx_parent1.get_metadata().validation = ValidationState.FULL + tx_parent1.set_static_metadata(TransactionStaticMetadata(min_height=0)) self.manager.tx_storage.save_transaction(tx_parent1) tx_parent2_hex = ('000201040000476810205cb3625d62897fcdad620e01d66649869329640f5504d77e960d01006a473045022100c' @@ -222,6 +229,7 @@ def test_get_one_known_tx_with_authority(self): tx_parent2_bytes = bytes.fromhex(tx_parent2_hex) tx_parent2 = TokenCreationTransaction.create_from_struct(tx_parent2_bytes, self.manager.tx_storage) tx_parent2.get_metadata().validation = ValidationState.FULL + tx_parent2.set_static_metadata(TransactionStaticMetadata(min_height=0)) self.manager.tx_storage.save_transaction(tx_parent2) # Both inputs are the same as the last parent, so no need to manually add them @@ -271,7 +279,7 @@ def test_first_block(self): self.assertEqual(data['meta']['first_block'], block.hash_hex) # now check that the first_block_height was correctly included - self.assertEqual(data['meta']['first_block_height'], block.get_metadata().height) + self.assertEqual(data['meta']['first_block_height'], block.static_metadata.height) @inlineCallbacks def test_get_many(self): @@ -514,6 +522,7 @@ def test_partially_validated_not_found(self): '0248b9e7d6a626f45dec86975b00f4dd53f84f1f0091125250b044e49023fbbd0f74f6093cdd2226fdff3e09a1000002be') tx = Transaction.create_from_struct(bytes.fromhex(tx_hex), self.manager.tx_storage) tx.set_validation(ValidationState.BASIC) + tx.set_static_metadata(TransactionStaticMetadata(min_height=0)) with self.manager.tx_storage.allow_partially_validated_context(): self.manager.tx_storage.save_transaction(tx) diff --git a/tests/resources/transaction/test_utxo_search.py b/tests/resources/transaction/test_utxo_search.py index 5929f7314..e906c1592 100644 --- a/tests/resources/transaction/test_utxo_search.py +++ b/tests/resources/transaction/test_utxo_search.py @@ -59,7 +59,7 @@ def test_simple_gets(self): 'index': 0, 'amount': 6400, 'timelock': None, - 'heightlock': b.get_metadata().height + self._settings.REWARD_SPEND_MIN_BLOCKS, + 'heightlock': b.static_metadata.height + self._settings.REWARD_SPEND_MIN_BLOCKS, } for b in blocks[:1]]) # Success non-empty address with medium amount, will require more than one output @@ -72,7 +72,7 @@ def test_simple_gets(self): 'index': 0, 'amount': 6400, 'timelock': None, - 'heightlock': b.get_metadata().height + self._settings.REWARD_SPEND_MIN_BLOCKS, + 'heightlock': b.static_metadata.height + self._settings.REWARD_SPEND_MIN_BLOCKS, } for b in blocks[4:1:-1]]) # Success non-empty address with exact amount, will require all UTXOs @@ -85,7 +85,7 @@ def test_simple_gets(self): 'index': 0, 'amount': 6400, 'timelock': None, - 'heightlock': b.get_metadata().height + self._settings.REWARD_SPEND_MIN_BLOCKS, + 'heightlock': b.static_metadata.height + self._settings.REWARD_SPEND_MIN_BLOCKS, } for b in blocks[::-1]]) # Success non-empty address with excessive amount, will require all UTXOs, even if it's not enough @@ -98,7 +98,7 @@ def test_simple_gets(self): 'index': 0, 'amount': 6400, 'timelock': None, - 'heightlock': b.get_metadata().height + self._settings.REWARD_SPEND_MIN_BLOCKS, + 'heightlock': b.static_metadata.height + self._settings.REWARD_SPEND_MIN_BLOCKS, } for b in blocks[::-1]]) diff --git a/tests/resources/wallet/test_send_tokens.py b/tests/resources/wallet/test_send_tokens.py index 3c98bf3df..f0eb54427 100644 --- a/tests/resources/wallet/test_send_tokens.py +++ b/tests/resources/wallet/test_send_tokens.py @@ -17,7 +17,7 @@ class BaseSendTokensTest(_BaseResourceTest._ResourceTest): def setUp(self): super().setUp() - self.web = StubSite(SendTokensResource(self.manager)) + self.web = StubSite(SendTokensResource(self.manager, self._settings)) self.web_mining = StubSite(MiningResource(self.manager)) self.web_balance = StubSite(BalanceResource(self.manager)) self.web_history = StubSite(HistoryResource(self.manager)) @@ -194,7 +194,7 @@ def test_tx_weight(self): self.assertFalse(data['success']) def test_error_request(self): - resource = SendTokensResource(self.manager) + resource = SendTokensResource(self.manager, self._settings) request = TestDummyRequest('POST', 'wallet/send_tokens', {}) self.assertIsNotNone(request._finishedDeferreds) diff --git a/tests/tx/test_block.py b/tests/tx/test_block.py index 9996d9f55..1e103ed2f 100644 --- a/tests/tx/test_block.py +++ b/tests/tx/test_block.py @@ -21,8 +21,9 @@ from hathor.feature_activation.feature import Feature from hathor.feature_activation.feature_service import BlockIsMissingSignal, BlockIsSignaling, FeatureService from hathor.indexes import MemoryIndexesManager -from hathor.transaction import Block, TransactionMetadata +from hathor.transaction import Block from hathor.transaction.exceptions import BlockMustSignalError +from hathor.transaction.static_metadata import BlockStaticMetadata from hathor.transaction.storage import TransactionMemoryStorage, TransactionStorage from hathor.transaction.validation_state import ValidationState from hathor.util import not_none @@ -33,7 +34,7 @@ def test_calculate_feature_activation_bit_counts_genesis(): settings = get_global_settings() storage = TransactionMemoryStorage(settings=settings) genesis_block = storage.get_block(settings.GENESIS_BLOCK_HASH) - result = genesis_block.get_feature_activation_bit_counts() + result = genesis_block.static_metadata.feature_activation_bit_counts assert result == [0, 0, 0, 0] @@ -64,9 +65,8 @@ def tx_storage() -> TransactionStorage: parent = not_none(storage.get_block_by_height(height - 1)) block = Block(signal_bits=bits, parents=[parent.hash], storage=storage) block.update_hash() - meta = block.get_metadata() - meta.validation = ValidationState.FULL - meta.height = height + block.get_metadata().validation = ValidationState.FULL + block.init_static_metadata_from_storage(get_global_settings(), storage) storage.save_transaction(block) indexes.height.add_new(height, block.hash, block.timestamp) @@ -94,20 +94,20 @@ def test_calculate_feature_activation_bit_counts( expected_counts: list[int] ) -> None: block = not_none(tx_storage.get_block_by_height(block_height)) - assert block.get_feature_activation_bit_counts() == expected_counts + assert block.static_metadata.feature_activation_bit_counts == expected_counts -def test_get_height(): - block_hash = b'some_hash' - block_height = 10 - metadata = TransactionMetadata(hash=block_hash, height=block_height) - - storage = Mock(spec_set=TransactionStorage) - storage.get_metadata = Mock(side_effect=lambda _hash: metadata if _hash == block_hash else None) - - block = Block(hash=block_hash, storage=storage) +def test_get_height() -> None: + static_metadata = BlockStaticMetadata( + min_height=0, + height=10, + feature_activation_bit_counts=[], + feature_states={}, + ) + block = Block() + block.set_static_metadata(static_metadata) - assert block.get_height() == block_height + assert block.get_height() == 10 @pytest.mark.parametrize( diff --git a/tests/tx/test_blockchain.py b/tests/tx/test_blockchain.py index b14a16f56..923cf3f96 100644 --- a/tests/tx/test_blockchain.py +++ b/tests/tx/test_blockchain.py @@ -336,7 +336,7 @@ def test_multiple_forks(self): def test_block_height(self): genesis_block = self.genesis_blocks[0] - self.assertEqual(genesis_block.get_metadata().height, 0) + self.assertEqual(genesis_block.static_metadata.height, 0) manager = self.create_peer('testnet', tx_storage=self.tx_storage) @@ -345,7 +345,7 @@ def test_block_height(self): for i, block in enumerate(blocks): expected_height = i + 1 - self.assertEqual(block.get_metadata().height, expected_height) + self.assertEqual(block.static_metadata.height, expected_height) def test_tokens_issued_per_block(self): manager = self.create_peer('testnet', tx_storage=self.tx_storage) @@ -378,7 +378,7 @@ def test_block_rewards(self): outputs = block.outputs self.assertEqual(len(outputs), 1) output = outputs[0] - height = block.get_metadata().height + height = block.static_metadata.height self.assertEqual(output.value, manager.get_tokens_issued_per_block(height)) def test_daa_sanity(self): diff --git a/tests/tx/test_cache_storage.py b/tests/tx/test_cache_storage.py index d9aac999c..d2698d84e 100644 --- a/tests/tx/test_cache_storage.py +++ b/tests/tx/test_cache_storage.py @@ -36,6 +36,7 @@ def _get_new_tx(self, nonce): from hathor.transaction.validation_state import ValidationState tx = Transaction(nonce=nonce, storage=self.cache_storage) tx.update_hash() + tx.init_static_metadata_from_storage(self._settings, self.cache_storage) meta = TransactionMetadata(hash=tx.hash) meta.validation = ValidationState.FULL tx._metadata = meta diff --git a/tests/tx/test_indexes.py b/tests/tx/test_indexes.py index d119e9580..2dd4457d3 100644 --- a/tests/tx/test_indexes.py +++ b/tests/tx/test_indexes.py @@ -307,7 +307,7 @@ def test_utxo_index_simple(self): address=address, amount=6400, timelock=None, - heightlock=b.get_metadata().height + self._settings.REWARD_SPEND_MIN_BLOCKS, + heightlock=b.static_metadata.height + self._settings.REWARD_SPEND_MIN_BLOCKS, ) for b in blocks[:1] ] ) @@ -322,7 +322,7 @@ def test_utxo_index_simple(self): address=address, amount=6400, timelock=None, - heightlock=b.get_metadata().height + self._settings.REWARD_SPEND_MIN_BLOCKS, + heightlock=b.static_metadata.height + self._settings.REWARD_SPEND_MIN_BLOCKS, ) for b in blocks[4:1:-1] ] ) @@ -337,7 +337,7 @@ def test_utxo_index_simple(self): address=address, amount=6400, timelock=None, - heightlock=b.get_metadata().height + self._settings.REWARD_SPEND_MIN_BLOCKS, + heightlock=b.static_metadata.height + self._settings.REWARD_SPEND_MIN_BLOCKS, ) for b in blocks[::-1] ] ) @@ -352,7 +352,7 @@ def test_utxo_index_simple(self): address=address, amount=6400, timelock=None, - heightlock=b.get_metadata().height + self._settings.REWARD_SPEND_MIN_BLOCKS, + heightlock=b.static_metadata.height + self._settings.REWARD_SPEND_MIN_BLOCKS, ) for b in blocks[::-1] ] ) @@ -464,7 +464,7 @@ def test_utxo_index_after_push_tx(self): address=address, amount=6400, timelock=None, - heightlock=b.get_metadata().height + self._settings.REWARD_SPEND_MIN_BLOCKS, + heightlock=b.static_metadata.height + self._settings.REWARD_SPEND_MIN_BLOCKS, ) for b in blocks ] ) @@ -536,7 +536,7 @@ def test_utxo_index_last(self): address=address, amount=6400, timelock=None, - heightlock=b.get_metadata().height + self._settings.REWARD_SPEND_MIN_BLOCKS, + heightlock=b.static_metadata.height + self._settings.REWARD_SPEND_MIN_BLOCKS, ) for b in blocks ] ) diff --git a/tests/tx/test_reward_lock.py b/tests/tx/test_reward_lock.py index 5f8c943fa..99b9678a8 100644 --- a/tests/tx/test_reward_lock.py +++ b/tests/tx/test_reward_lock.py @@ -39,7 +39,7 @@ def _add_reward_block(self): self.manager.cpu_mining_service.resolve(reward_block) self.assertTrue(self.manager.propagate_tx(reward_block)) # XXX: calculate unlock height AFTER adding the block so the height is correctly calculated - unlock_height = reward_block.get_metadata().height + self._settings.REWARD_SPEND_MIN_BLOCKS + 1 + unlock_height = reward_block.static_metadata.height + self._settings.REWARD_SPEND_MIN_BLOCKS + 1 return reward_block, unlock_height def _spend_reward_tx(self, manager, reward_block): @@ -61,6 +61,7 @@ def _spend_reward_tx(self, manager, reward_block): input_.data = P2PKH.create_input_data(public_bytes, signature) self.manager.cpu_mining_service.resolve(tx) tx.update_initial_metadata(save=False) + tx.init_static_metadata_from_storage(self._settings, self.tx_storage) return tx def test_classic_reward_lock(self): @@ -70,14 +71,14 @@ def test_classic_reward_lock(self): # reward cannot be spent while not enough blocks are added for _ in range(self._settings.REWARD_SPEND_MIN_BLOCKS): tx = self._spend_reward_tx(self.manager, reward_block) - self.assertEqual(tx.get_metadata().min_height, unlock_height) + self.assertEqual(tx.static_metadata.min_height, unlock_height) with self.assertRaises(RewardLocked): self.manager.verification_service.verify(tx) add_new_blocks(self.manager, 1, advance_clock=1) # now it should be spendable tx = self._spend_reward_tx(self.manager, reward_block) - self.assertEqual(tx.get_metadata().min_height, unlock_height) + self.assertEqual(tx.static_metadata.min_height, unlock_height) self.assertTrue(self.manager.propagate_tx(tx, fails_silently=False)) def test_block_with_not_enough_height(self): @@ -91,7 +92,7 @@ def test_block_with_not_enough_height(self): # XXX: this situation is impossible in practice, but we force it to test that when a block tries to confirm a # transaction before it can the RewardLocked exception is raised tx = self._spend_reward_tx(self.manager, reward_block) - self.assertEqual(tx.get_metadata().min_height, unlock_height) + self.assertEqual(tx.static_metadata.min_height, unlock_height) self.assertTrue(self.manager.on_new_tx(tx, fails_silently=False, reject_locked_reward=False)) # new block will try to confirm it and fail @@ -113,7 +114,7 @@ def test_block_with_enough_height(self): # add tx that spends the reward tx = self._spend_reward_tx(self.manager, reward_block) - self.assertEqual(tx.get_metadata().min_height, unlock_height) + self.assertEqual(tx.static_metadata.min_height, unlock_height) self.assertTrue(self.manager.on_new_tx(tx, fails_silently=False)) # new block will be able to confirm it @@ -130,7 +131,7 @@ def test_mempool_tx_with_not_enough_height(self): # add tx to mempool, must fail reward-lock verification tx = self._spend_reward_tx(self.manager, reward_block) - self.assertEqual(tx.get_metadata().min_height, unlock_height) + self.assertEqual(tx.static_metadata.min_height, unlock_height) with self.assertRaises(RewardLocked): self.manager.verification_service.verify(tx) with self.assertRaises(InvalidNewTransaction): @@ -145,7 +146,7 @@ def test_mempool_tx_with_enough_height(self): # add tx that spends the reward, must not fail tx = self._spend_reward_tx(self.manager, reward_block) - self.assertEqual(tx.get_metadata().min_height, unlock_height) + self.assertEqual(tx.static_metadata.min_height, unlock_height) self.assertTrue(self.manager.on_new_tx(tx, fails_silently=False)) def test_mempool_tx_invalid_after_reorg(self): @@ -157,7 +158,7 @@ def test_mempool_tx_invalid_after_reorg(self): # add tx that spends the reward, must not fail tx = self._spend_reward_tx(self.manager, reward_block) - self.assertEqual(tx.get_metadata().min_height, unlock_height) + self.assertEqual(tx.static_metadata.min_height, unlock_height) self.assertTrue(self.manager.on_new_tx(tx, fails_silently=False)) # re-org: replace last two blocks with one block, new height will be just one short of enough @@ -203,7 +204,7 @@ def test_classic_reward_lock_timestamp_expected_to_fail(self): tx = self._spend_reward_tx(self.manager, reward_block) tx.timestamp = blocks[-1].timestamp self.manager.cpu_mining_service.resolve(tx) - self.assertEqual(tx.get_metadata().min_height, unlock_height) + self.assertEqual(tx.static_metadata.min_height, unlock_height) with self.assertRaises(RewardLocked): self.manager.verification_service.verify(tx) diff --git a/tests/tx/test_tokens.py b/tests/tx/test_tokens.py index a54e4f765..ebcb4aafd 100644 --- a/tests/tx/test_tokens.py +++ b/tests/tx/test_tokens.py @@ -112,7 +112,7 @@ def test_token_transfer(self): public_bytes, signature = wallet.get_input_aux_data(data_to_sign, wallet.get_private_key(self.address_b58)) tx2.inputs[0].data = P2PKH.create_input_data(public_bytes, signature) self.manager.cpu_mining_service.resolve(tx2) - tx2.update_reward_lock_metadata() + tx2.init_static_metadata_from_storage(self._settings, self.manager.tx_storage) self.manager.verification_service.verify(tx2) # missing tokens diff --git a/tests/tx/test_tx.py b/tests/tx/test_tx.py index 349731ffd..48e1ad6e8 100644 --- a/tests/tx/test_tx.py +++ b/tests/tx/test_tx.py @@ -189,6 +189,7 @@ def test_children_update(self): children_len.append(len(metadata.children)) # update metadata + tx.init_static_metadata_from_storage(self._settings, self.tx_storage) tx.update_initial_metadata() # genesis transactions should have only this tx in their children set @@ -250,6 +251,7 @@ def test_merge_mined_no_magic(self): ) ) + b.init_static_metadata_from_storage(self._settings, self.tx_storage) with self.assertRaises(AuxPowNoMagicError): self._verifiers.merge_mined_block.verify_aux_pow(b) @@ -323,6 +325,8 @@ def test_merge_mined_multiple_magic(self): assert bytes(b1) != bytes(b2) assert b1.calculate_hash() == b2.calculate_hash() + b1.init_static_metadata_from_storage(self._settings, self.tx_storage) + b2.init_static_metadata_from_storage(self._settings, self.tx_storage) self._verifiers.merge_mined_block.verify_aux_pow(b1) # OK with self.assertRaises(AuxPowUnexpectedMagicError): self._verifiers.merge_mined_block.verify_aux_pow(b2) @@ -568,7 +572,7 @@ def test_regular_tx(self): _input.data = P2PKH.create_input_data(public_bytes, signature) self.manager.cpu_mining_service.resolve(tx) - tx.update_reward_lock_metadata() + tx.init_static_metadata_from_storage(self._settings, self.manager.tx_storage) self.manager.verification_service.verify(tx) def test_tx_weight_too_high(self): @@ -907,11 +911,14 @@ def test_tx_version_and_signal_bits(self): self.assertEqual(str(cm.exception), 'version 0x200 must not be larger than one byte') # test serialization doesn't mess up with version + genesis_block = self.genesis_blocks[0] block = Block( signal_bits=0xF0, version=0x0F, nonce=100, - weight=1) + weight=1, + parents=[genesis_block.hash] + ) block2 = block.clone() self.assertEqual(block.signal_bits, block2.signal_bits) self.assertEqual(block.version, block2.version) diff --git a/tests/tx/test_tx_storage.py b/tests/tx/test_tx_storage.py index de377cb9b..420412fce 100644 --- a/tests/tx/test_tx_storage.py +++ b/tests/tx/test_tx_storage.py @@ -62,7 +62,7 @@ def setUp(self): self.block = Block(timestamp=previous_timestamp + 1, weight=12, outputs=[output], parents=block_parents, nonce=100781, storage=self.tx_storage) self.manager.cpu_mining_service.resolve(self.block) - self.block.update_reward_lock_metadata() + self.block.init_static_metadata_from_storage(self._settings, self.tx_storage) self.manager.verification_service.verify(self.block) self.block.get_metadata().validation = ValidationState.FULL @@ -81,6 +81,7 @@ def setUp(self): parents=tx_parents, storage=self.tx_storage) self.manager.cpu_mining_service.resolve(self.tx) self.tx.get_metadata().validation = ValidationState.FULL + self.tx.init_static_metadata_from_storage(self._settings, self.tx_storage) # Disable weakref to test the internal methods. Otherwise, most methods return objects from weakref. self.tx_storage._disable_weakref() @@ -374,6 +375,7 @@ def test_allow_scope_context_stacking(self): def test_save_token_creation_tx(self): tx = create_tokens(self.manager, propagate=False) tx.get_metadata().validation = ValidationState.FULL + tx.init_static_metadata_from_storage(self._settings, self.tx_storage) self.validate_save(tx) def _validate_not_in_index(self, tx, index): diff --git a/tests/tx/test_verification.py b/tests/tx/test_verification.py index 90622eae6..b33430455 100644 --- a/tests/tx/test_verification.py +++ b/tests/tx/test_verification.py @@ -54,7 +54,7 @@ def _get_valid_block(self) -> Block: self._settings.GENESIS_TX2_HASH ] ) - block.update_reward_lock_metadata() + block.init_static_metadata_from_storage(self._settings, self.manager.tx_storage) return block def _get_valid_merge_mined_block(self) -> MergeMinedBlock: @@ -70,7 +70,7 @@ def _get_valid_merge_mined_block(self) -> MergeMinedBlock: self._settings.GENESIS_TX2_HASH ], ) - block.update_reward_lock_metadata() + block.init_static_metadata_from_storage(self._settings, self.manager.tx_storage) return block def _get_valid_tx(self) -> Transaction: @@ -95,7 +95,7 @@ def _get_valid_tx(self) -> Transaction: self._settings.GENESIS_TX2_HASH, ] ) - tx.update_reward_lock_metadata() + tx.init_static_metadata_from_storage(self._settings, self.manager.tx_storage) data_to_sign = tx.get_sighash_all() assert self.manager.wallet @@ -108,7 +108,7 @@ def _get_valid_token_creation_tx(self) -> TokenCreationTransaction: add_blocks_unlock_reward(self.manager) assert self.manager.wallet tx = create_tokens(self.manager, self.manager.wallet.get_unused_address()) - tx.update_reward_lock_metadata() + tx.init_static_metadata_from_storage(self._settings, self.manager.tx_storage) return tx def test_block_verify_basic(self) -> None: diff --git a/tests/wallet/test_wallet.py b/tests/wallet/test_wallet.py index ab87d299e..72d6bf698 100644 --- a/tests/wallet/test_wallet.py +++ b/tests/wallet/test_wallet.py @@ -85,6 +85,7 @@ def test_wallet_create_transaction(self): tx1.storage = self.storage tx1.update_hash() tx1.get_metadata().validation = ValidationState.FULL + tx1.init_static_metadata_from_storage(self._settings, self.storage) self.storage.save_transaction(tx1) w.on_new_tx(tx1) self.assertEqual(len(w.spent_txs), 1) @@ -101,6 +102,7 @@ def test_wallet_create_transaction(self): tx2.storage = self.storage tx2.update_hash() tx2.get_metadata().validation = ValidationState.FULL + tx2.init_static_metadata_from_storage(self._settings, self.storage) self.storage.save_transaction(tx2) w.on_new_tx(tx2) self.assertEqual(len(w.spent_txs), 2) @@ -206,7 +208,7 @@ def test_create_token_transaction(self): tx2.timestamp = tx.timestamp + 1 tx2.parents = self.manager.get_new_tx_parents() self.manager.cpu_mining_service.resolve(tx2) - tx2.update_reward_lock_metadata() + tx2.init_static_metadata_from_storage(self._settings, self.manager.tx_storage) self.manager.verification_service.verify(tx2) self.assertNotEqual(len(tx2.inputs), 0) diff --git a/tests/wallet/test_wallet_hd.py b/tests/wallet/test_wallet_hd.py index d006b18ae..3366df47c 100644 --- a/tests/wallet/test_wallet_hd.py +++ b/tests/wallet/test_wallet_hd.py @@ -45,6 +45,7 @@ def test_transaction_and_balance(self): tx1.storage = self.tx_storage tx1.get_metadata().validation = ValidationState.FULL self.wallet.on_new_tx(tx1) + tx1.init_static_metadata_from_storage(self._settings, self.tx_storage) self.tx_storage.save_transaction(tx1) self.assertEqual(len(self.wallet.spent_txs), 1) utxo = self.wallet.unspent_txs[self._settings.HATHOR_TOKEN_UID].get((tx1.hash, 0)) @@ -63,6 +64,7 @@ def test_transaction_and_balance(self): tx2.storage = self.tx_storage verifier.verify_script(tx=tx2, input_tx=tx2.inputs[0], spent_tx=tx1) tx2.get_metadata().validation = ValidationState.FULL + tx2.init_static_metadata_from_storage(self._settings, self.tx_storage) self.tx_storage.save_transaction(tx2) self.wallet.on_new_tx(tx2) self.assertEqual(len(self.wallet.spent_txs), 2) From da32f8092a1ae038046864a4edc6e448b9e1b811 Mon Sep 17 00:00:00 2001 From: Luis Helder Date: Thu, 22 Aug 2024 17:51:03 -0300 Subject: [PATCH 17/61] fix: Healthcheck content-type (#1110) --- hathor/healthcheck/resources/healthcheck.py | 1 + tests/resources/healthcheck/test_healthcheck.py | 1 + 2 files changed, 2 insertions(+) diff --git a/hathor/healthcheck/resources/healthcheck.py b/hathor/healthcheck/resources/healthcheck.py index eb1de7eed..bcc872e4f 100644 --- a/hathor/healthcheck/resources/healthcheck.py +++ b/hathor/healthcheck/resources/healthcheck.py @@ -53,6 +53,7 @@ def _render_success(self, result: HealthcheckResponse, request: Request) -> None status_code = result.get_http_status_code() request.setResponseCode(status_code) + request.setHeader(b'content-type', b'application/json; charset=utf-8') request.write(json_dumpb(result.to_json())) request.finish() diff --git a/tests/resources/healthcheck/test_healthcheck.py b/tests/resources/healthcheck/test_healthcheck.py index 5beff1f24..c857e5877 100644 --- a/tests/resources/healthcheck/test_healthcheck.py +++ b/tests/resources/healthcheck/test_healthcheck.py @@ -191,6 +191,7 @@ def test_get_ready(self): response = yield self.web.get('/health') data = response.json_value() + self.assertTrue('application/json; charset=utf-8' in response.responseHeaders.getRawHeaders('content-type')) self.assertEqual(response.responseCode, 200) self.assertEqual(data, { 'status': 'pass', From 5bb5dba4cf13043a71c6f98464f8c6c5237258b1 Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Tue, 27 Aug 2024 11:23:46 -0300 Subject: [PATCH 18/61] refactor(metadata): pre-refactors for migrating feature states to static metadata (#1016) --- hathor/builder/builder.py | 7 +- hathor/builder/cli_builder.py | 9 +- hathor/builder/resources_builder.py | 2 +- .../bit_signaling_service.py | 18 +- hathor/feature_activation/feature_service.py | 28 +- ...feature_description.py => feature_info.py} | 2 +- .../feature_activation/model/feature_state.py | 9 +- .../feature_activation/resources/feature.py | 24 +- hathor/transaction/__init__.py | 2 + hathor/transaction/static_metadata.py | 14 +- hathor/transaction/storage/rocksdb_storage.py | 4 +- hathor/verification/block_verifier.py | 5 +- hathor/verification/verification_service.py | 2 +- hathor/vertex_handler/vertex_handler.py | 6 +- .../test_bit_signaling_service.py | 54 +-- .../test_feature_service.py | 331 +++++++----------- .../test_feature_simulation.py | 8 +- tests/resources/feature/test_feature.py | 33 +- tests/tx/test_verification.py | 6 - 19 files changed, 237 insertions(+), 327 deletions(-) rename hathor/feature_activation/model/{feature_description.py => feature_info.py} (95%) diff --git a/hathor/builder/builder.py b/hathor/builder/builder.py index cffa2c08e..b4977820d 100644 --- a/hathor/builder/builder.py +++ b/hathor/builder/builder.py @@ -535,10 +535,7 @@ def _get_or_create_feature_service(self) -> FeatureService: if self._feature_service is None: settings = self._get_or_create_settings() tx_storage = self._get_or_create_tx_storage() - self._feature_service = FeatureService( - feature_settings=settings.FEATURE_ACTIVATION, - tx_storage=tx_storage - ) + self._feature_service = FeatureService(settings=settings, tx_storage=tx_storage) return self._feature_service @@ -549,7 +546,7 @@ def _get_or_create_bit_signaling_service(self) -> BitSignalingService: feature_service = self._get_or_create_feature_service() feature_storage = self._get_or_create_feature_storage() self._bit_signaling_service = BitSignalingService( - feature_settings=settings.FEATURE_ACTIVATION, + settings=settings, feature_service=feature_service, tx_storage=tx_storage, support_features=self._support_features, diff --git a/hathor/builder/cli_builder.py b/hathor/builder/cli_builder.py index f13b88dad..3f10d7454 100644 --- a/hathor/builder/cli_builder.py +++ b/hathor/builder/cli_builder.py @@ -149,7 +149,7 @@ def create_manager(self, reactor: Reactor) -> HathorManager: else: indexes = RocksDBIndexesManager(self.rocksdb_storage) - kwargs = {} + kwargs: dict[str, Any] = {} if not self._args.cache: # We should only pass indexes if cache is disabled. Otherwise, # only TransactionCacheStorage should have indexes. @@ -268,13 +268,10 @@ def create_manager(self, reactor: Reactor) -> HathorManager: self.log.info('--x-enable-event-queue flag provided. ' 'The events detected by the full node will be stored and can be retrieved by clients') - self.feature_service = FeatureService( - feature_settings=settings.FEATURE_ACTIVATION, - tx_storage=tx_storage - ) + self.feature_service = FeatureService(settings=settings, tx_storage=tx_storage) bit_signaling_service = BitSignalingService( - feature_settings=settings.FEATURE_ACTIVATION, + settings=settings, feature_service=self.feature_service, tx_storage=tx_storage, support_features=self._args.signal_support, diff --git a/hathor/builder/resources_builder.py b/hathor/builder/resources_builder.py index 9d48b082a..ce453aef6 100644 --- a/hathor/builder/resources_builder.py +++ b/hathor/builder/resources_builder.py @@ -237,7 +237,7 @@ def create_resources(self) -> server.Site: ( b'feature', FeatureResource( - feature_settings=settings.FEATURE_ACTIVATION, + settings=settings, feature_service=self._feature_service, tx_storage=self.manager.tx_storage ), diff --git a/hathor/feature_activation/bit_signaling_service.py b/hathor/feature_activation/bit_signaling_service.py index 639eb1a5c..3d21c32e5 100644 --- a/hathor/feature_activation/bit_signaling_service.py +++ b/hathor/feature_activation/bit_signaling_service.py @@ -14,11 +14,11 @@ from structlog import get_logger +from hathor.conf.settings import HathorSettings from hathor.feature_activation.feature import Feature from hathor.feature_activation.feature_service import FeatureService from hathor.feature_activation.model.criteria import Criteria from hathor.feature_activation.model.feature_state import FeatureState -from hathor.feature_activation.settings import Settings as FeatureSettings from hathor.feature_activation.storage.feature_activation_storage import FeatureActivationStorage from hathor.transaction import Block from hathor.transaction.storage import TransactionStorage @@ -29,7 +29,7 @@ class BitSignalingService: __slots__ = ( '_log', - '_feature_settings', + '_settings', '_feature_service', '_tx_storage', '_support_features', @@ -40,7 +40,7 @@ class BitSignalingService: def __init__( self, *, - feature_settings: FeatureSettings, + settings: HathorSettings, feature_service: FeatureService, tx_storage: TransactionStorage, support_features: set[Feature], @@ -48,7 +48,7 @@ def __init__( feature_storage: FeatureActivationStorage | None, ) -> None: self._log = logger.new() - self._feature_settings = feature_settings + self._settings = settings self._feature_service = feature_service self._tx_storage = tx_storage self._support_features = support_features @@ -163,14 +163,14 @@ def _log_signal_bits(self, feature: Feature, enable_bit: bool, support: bool, no def _get_signaling_features(self, block: Block) -> dict[Feature, Criteria]: """Given a specific block, return all features that are in a signaling state for that block.""" - feature_descriptions = self._feature_service.get_bits_description(block=block) + feature_infos = self._feature_service.get_feature_infos(block=block) signaling_features = { - feature: description.criteria - for feature, description in feature_descriptions.items() - if description.state in FeatureState.get_signaling_states() + feature: feature_info.criteria + for feature, feature_info in feature_infos.items() + if feature_info.state in FeatureState.get_signaling_states() } - assert len(signaling_features) <= self._feature_settings.max_signal_bits, ( + assert len(signaling_features) <= self._settings.FEATURE_ACTIVATION.max_signal_bits, ( 'Invalid state. Signaling more features than the allowed maximum.' ) diff --git a/hathor/feature_activation/feature_service.py b/hathor/feature_activation/feature_service.py index bc3003825..2b8212cef 100644 --- a/hathor/feature_activation/feature_service.py +++ b/hathor/feature_activation/feature_service.py @@ -15,10 +15,10 @@ from dataclasses import dataclass from typing import TYPE_CHECKING, Optional, TypeAlias +from hathor.conf.settings import HathorSettings from hathor.feature_activation.feature import Feature -from hathor.feature_activation.model.feature_description import FeatureDescription +from hathor.feature_activation.model.feature_info import FeatureInfo from hathor.feature_activation.model.feature_state import FeatureState -from hathor.feature_activation.settings import Settings as FeatureSettings if TYPE_CHECKING: from hathor.feature_activation.bit_signaling_service import BitSignalingService @@ -44,8 +44,8 @@ class BlockIsMissingSignal: class FeatureService: __slots__ = ('_feature_settings', '_tx_storage', 'bit_signaling_service') - def __init__(self, *, feature_settings: FeatureSettings, tx_storage: 'TransactionStorage') -> None: - self._feature_settings = feature_settings + def __init__(self, *, settings: HathorSettings, tx_storage: 'TransactionStorage') -> None: + self._feature_settings = settings.FEATURE_ACTIVATION self._tx_storage = tx_storage self.bit_signaling_service: Optional['BitSignalingService'] = None @@ -64,11 +64,11 @@ def is_signaling_mandatory_features(self, block: 'Block') -> BlockSignalingState height = block.static_metadata.height offset_to_boundary = height % self._feature_settings.evaluation_interval remaining_blocks = self._feature_settings.evaluation_interval - offset_to_boundary - 1 - descriptions = self.get_bits_description(block=block) + feature_infos = self.get_feature_infos(block=block) must_signal_features = ( - feature for feature, description in descriptions.items() - if description.state is FeatureState.MUST_SIGNAL + feature for feature, feature_info in feature_infos.items() + if feature_info.state is FeatureState.MUST_SIGNAL ) for feature in must_signal_features: @@ -192,12 +192,12 @@ def _calculate_new_state( if previous_state is FeatureState.FAILED: return FeatureState.FAILED - raise ValueError(f'Unknown previous state: {previous_state}') + raise NotImplementedError(f'Unknown previous state: {previous_state}') - def get_bits_description(self, *, block: 'Block') -> dict[Feature, FeatureDescription]: + def get_feature_infos(self, *, block: 'Block') -> dict[Feature, FeatureInfo]: """Returns the criteria definition and feature state for all features at a certain block.""" return { - feature: FeatureDescription( + feature: FeatureInfo( criteria=criteria, state=self.get_state(block=block, feature=feature) ) @@ -223,9 +223,11 @@ def _get_ancestor_at_height(self, *, block: 'Block', ancestor_height: int) -> 'B if parent_block.static_metadata.height == ancestor_height: return parent_block - if not parent_metadata.voided_by and (ancestor := self._tx_storage.get_block_by_height(ancestor_height)): - from hathor.transaction import Block - assert isinstance(ancestor, Block) + if not parent_metadata.voided_by: + ancestor = self._tx_storage.get_block_by_height(ancestor_height) + assert ancestor is not None, ( + 'it is guaranteed that the ancestor of a fully connected and non-voided block is in the height index' + ) return ancestor return self._get_ancestor_iteratively(block=parent_block, ancestor_height=ancestor_height) diff --git a/hathor/feature_activation/model/feature_description.py b/hathor/feature_activation/model/feature_info.py similarity index 95% rename from hathor/feature_activation/model/feature_description.py rename to hathor/feature_activation/model/feature_info.py index a7f461c21..e2b8e7dda 100644 --- a/hathor/feature_activation/model/feature_description.py +++ b/hathor/feature_activation/model/feature_info.py @@ -18,7 +18,7 @@ from hathor.feature_activation.model.feature_state import FeatureState -class FeatureDescription(NamedTuple): +class FeatureInfo(NamedTuple): """Represents all information related to one feature, that is, its criteria and state.""" criteria: Criteria state: FeatureState diff --git a/hathor/feature_activation/model/feature_state.py b/hathor/feature_activation/model/feature_state.py index bb781f5eb..6020a9aa4 100644 --- a/hathor/feature_activation/model/feature_state.py +++ b/hathor/feature_activation/model/feature_state.py @@ -12,10 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -from enum import Enum +from enum import Enum, unique -class FeatureState(Enum): +@unique +class FeatureState(str, Enum): """ Possible states a feature can be in, for each block. @@ -35,6 +36,10 @@ class FeatureState(Enum): ACTIVE = 'ACTIVE' FAILED = 'FAILED' + def is_active(self) -> bool: + """Return whether the state is active.""" + return self is FeatureState.ACTIVE + @staticmethod def get_signaling_states() -> set['FeatureState']: """ diff --git a/hathor/feature_activation/resources/feature.py b/hathor/feature_activation/resources/feature.py index f39fb1a37..75e7c16bf 100644 --- a/hathor/feature_activation/resources/feature.py +++ b/hathor/feature_activation/resources/feature.py @@ -18,10 +18,10 @@ from hathor.api_util import Resource, set_cors from hathor.cli.openapi_files.register import register_resource +from hathor.conf.settings import HathorSettings from hathor.feature_activation.feature import Feature from hathor.feature_activation.feature_service import FeatureService from hathor.feature_activation.model.feature_state import FeatureState -from hathor.feature_activation.settings import Settings as FeatureSettings from hathor.transaction import Block from hathor.transaction.storage import TransactionStorage from hathor.utils.api import ErrorResponse, QueryParams, Response @@ -36,12 +36,12 @@ class FeatureResource(Resource): def __init__( self, *, - feature_settings: FeatureSettings, + settings: HathorSettings, feature_service: FeatureService, tx_storage: TransactionStorage ) -> None: super().__init__() - self._feature_settings = feature_settings + self._feature_settings = settings.FEATURE_ACTIVATION self._feature_service = feature_service self.tx_storage = tx_storage @@ -68,17 +68,17 @@ def get_block_features(self, request: Request) -> bytes: return error.json_dumpb() signal_bits = [] - feature_descriptions = self._feature_service.get_bits_description(block=block) + feature_infos = self._feature_service.get_feature_infos(block=block) - for feature, description in feature_descriptions.items(): - if description.state not in FeatureState.get_signaling_states(): + for feature, feature_info in feature_infos.items(): + if feature_info.state not in FeatureState.get_signaling_states(): continue block_feature = GetBlockFeatureResponse( - bit=description.criteria.bit, - signal=block.get_feature_activation_bit_value(description.criteria.bit), + bit=feature_info.criteria.bit, + signal=block.get_feature_activation_bit_value(feature_info.criteria.bit), feature=feature, - feature_state=description.state.name + feature_state=feature_info.state.name ) signal_bits.append(block_feature) @@ -90,10 +90,12 @@ def get_block_features(self, request: Request) -> bytes: def get_features(self) -> bytes: best_block = self.tx_storage.get_best_block() bit_counts = best_block.static_metadata.feature_activation_bit_counts + feature_infos = self._feature_service.get_feature_infos(block=best_block) features = [] - for feature, criteria in self._feature_settings.features.items(): - state = self._feature_service.get_state(block=best_block, feature=feature) + for feature, feature_info in feature_infos.items(): + state = feature_info.state + criteria = feature_info.criteria threshold_count = criteria.get_threshold(self._feature_settings) threshold_percentage = threshold_count / self._feature_settings.evaluation_interval acceptance_percentage = None diff --git a/hathor/transaction/__init__.py b/hathor/transaction/__init__.py index 9b803cbd2..23e98d7ae 100644 --- a/hathor/transaction/__init__.py +++ b/hathor/transaction/__init__.py @@ -19,6 +19,7 @@ TxInput, TxOutput, TxVersion, + Vertex, sum_weights, ) from hathor.transaction.block import Block @@ -29,6 +30,7 @@ __all__ = [ 'Transaction', 'BitcoinAuxPow', + 'Vertex', 'BaseTransaction', 'Block', 'MergeMinedBlock', diff --git a/hathor/transaction/static_metadata.py b/hathor/transaction/static_metadata.py index 22473e5af..d52c435e8 100644 --- a/hathor/transaction/static_metadata.py +++ b/hathor/transaction/static_metadata.py @@ -14,9 +14,7 @@ from __future__ import annotations -import dataclasses from abc import ABC -from dataclasses import dataclass from itertools import chain, starmap, zip_longest from operator import add from typing import TYPE_CHECKING, Callable @@ -26,7 +24,8 @@ from hathor.feature_activation.feature import Feature from hathor.feature_activation.model.feature_state import FeatureState from hathor.types import VertexId -from hathor.util import json_dumpb, json_loadb +from hathor.util import json_loadb +from hathor.utils.pydantic import BaseModel if TYPE_CHECKING: from hathor.conf.settings import HathorSettings @@ -34,8 +33,7 @@ from hathor.transaction.storage import TransactionStorage -@dataclass(slots=True, frozen=True, kw_only=True) -class VertexStaticMetadata(ABC): +class VertexStaticMetadata(ABC, BaseModel): """ Static Metadata represents vertex attributes that are not intrinsic to the vertex data, but can be calculated from only the vertex itself and its dependencies, and whose values never change. @@ -49,10 +47,6 @@ class VertexStaticMetadata(ABC): # metadata (that does not have this calculated, from a tx with a new format that does have this calculated) min_height: int - def to_bytes(self) -> bytes: - """Convert this static metadata instance to a json bytes representation.""" - return json_dumpb(dataclasses.asdict(self)) - @classmethod def from_bytes(cls, data: bytes, *, target: 'BaseTransaction') -> 'VertexStaticMetadata': """Create a static metadata instance from a json bytes representation, with a known vertex type target.""" @@ -68,7 +62,6 @@ def from_bytes(cls, data: bytes, *, target: 'BaseTransaction') -> 'VertexStaticM raise NotImplementedError -@dataclass(slots=True, frozen=True, kw_only=True) class BlockStaticMetadata(VertexStaticMetadata): height: int @@ -181,7 +174,6 @@ def _get_previous_feature_activation_bit_counts( return parent_block.static_metadata.feature_activation_bit_counts -@dataclass(slots=True, frozen=True, kw_only=True) class TransactionStaticMetadata(VertexStaticMetadata): @classmethod def create_from_storage(cls, tx: 'Transaction', settings: HathorSettings, storage: 'TransactionStorage') -> Self: diff --git a/hathor/transaction/storage/rocksdb_storage.py b/hathor/transaction/storage/rocksdb_storage.py index 3e64379b3..efe8f607e 100644 --- a/hathor/transaction/storage/rocksdb_storage.py +++ b/hathor/transaction/storage/rocksdb_storage.py @@ -113,7 +113,7 @@ def _save_transaction(self, tx: 'BaseTransaction', *, only_metadata: bool = Fals @override def _save_static_metadata(self, tx: 'BaseTransaction') -> None: - self._db.put((self._cf_static_meta, tx.hash), tx.static_metadata.to_bytes()) + self._db.put((self._cf_static_meta, tx.hash), tx.static_metadata.json_dumpb()) def _load_static_metadata(self, vertex: 'BaseTransaction') -> None: """Set vertex static metadata loaded from what's saved in this storage.""" @@ -272,4 +272,4 @@ def migrate_static_metadata(self, log: BoundLogger) -> None: ) # Save it manually to the CF - self._db.put((self._cf_static_meta, vertex_id), static_metadata.to_bytes()) + self._db.put((self._cf_static_meta, vertex_id), static_metadata.json_dumpb()) diff --git a/hathor/verification/block_verifier.py b/hathor/verification/block_verifier.py index 1f84ea24b..21e50eedd 100644 --- a/hathor/verification/block_verifier.py +++ b/hathor/verification/block_verifier.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +from typing_extensions import assert_never + from hathor.conf.settings import HathorSettings from hathor.daa import DifficultyAdjustmentAlgorithm from hathor.feature_activation.feature_service import BlockIsMissingSignal, BlockIsSignaling, FeatureService @@ -93,5 +95,4 @@ def verify_mandatory_signaling(self, block: Block) -> None: f"Block must signal support for feature '{feature.value}' during MUST_SIGNAL phase." ) case _: - # TODO: This will be changed to assert_never() so mypy can check it. - raise NotImplementedError + assert_never(signaling_state) diff --git a/hathor/verification/verification_service.py b/hathor/verification/verification_service.py index ca89c47e1..e966692ec 100644 --- a/hathor/verification/verification_service.py +++ b/hathor/verification/verification_service.py @@ -194,6 +194,7 @@ def _verify_block(self, block: Block) -> None: self.verifiers.block.verify_mandatory_signaling(block) def _verify_merge_mined_block(self, block: MergeMinedBlock) -> None: + self.verifiers.merge_mined_block.verify_aux_pow(block) self._verify_block(block) def _verify_poa_block(self, block: PoaBlock) -> None: @@ -274,7 +275,6 @@ def _verify_without_storage_block(self, block: Block) -> None: self._verify_without_storage_base_block(block) def _verify_without_storage_merge_mined_block(self, block: MergeMinedBlock) -> None: - self.verifiers.merge_mined_block.verify_aux_pow(block) self._verify_without_storage_block(block) def _verify_without_storage_poa_block(self, block: PoaBlock) -> None: diff --git a/hathor/vertex_handler/vertex_handler.py b/hathor/vertex_handler/vertex_handler.py index 809a7e2c9..473516a56 100644 --- a/hathor/vertex_handler/vertex_handler.py +++ b/hathor/vertex_handler/vertex_handler.py @@ -228,10 +228,10 @@ def _log_new_object(self, tx: BaseTransaction, message_fmt: str, *, quiet: bool) if tx.is_block: message = message_fmt.format('block') if isinstance(tx, Block): - feature_descriptions = self._feature_service.get_bits_description(block=tx) + feature_infos = self._feature_service.get_feature_infos(block=tx) feature_states = { - feature.value: description.state.value - for feature, description in feature_descriptions.items() + feature.value: info.state.value + for feature, info in feature_infos.items() } kwargs['_height'] = tx.get_height() kwargs['feature_states'] = feature_states diff --git a/tests/feature_activation/test_bit_signaling_service.py b/tests/feature_activation/test_bit_signaling_service.py index 930ca39f2..8b487be92 100644 --- a/tests/feature_activation/test_bit_signaling_service.py +++ b/tests/feature_activation/test_bit_signaling_service.py @@ -16,11 +16,12 @@ import pytest +from hathor.conf.settings import HathorSettings from hathor.feature_activation.bit_signaling_service import BitSignalingService from hathor.feature_activation.feature import Feature from hathor.feature_activation.feature_service import FeatureService from hathor.feature_activation.model.criteria import Criteria -from hathor.feature_activation.model.feature_description import FeatureDescription +from hathor.feature_activation.model.feature_info import FeatureInfo from hathor.feature_activation.model.feature_state import FeatureState from hathor.feature_activation.settings import Settings as FeatureSettings from hathor.transaction import Block @@ -28,15 +29,15 @@ @pytest.mark.parametrize( - 'features_description', + 'features_infos', [ {}, { - Feature.NOP_FEATURE_1: FeatureDescription(state=FeatureState.DEFINED, criteria=Mock()) + Feature.NOP_FEATURE_1: FeatureInfo(state=FeatureState.DEFINED, criteria=Mock()) }, { - Feature.NOP_FEATURE_1: FeatureDescription(state=FeatureState.FAILED, criteria=Mock()), - Feature.NOP_FEATURE_2: FeatureDescription(state=FeatureState.ACTIVE, criteria=Mock()) + Feature.NOP_FEATURE_1: FeatureInfo(state=FeatureState.FAILED, criteria=Mock()), + Feature.NOP_FEATURE_2: FeatureInfo(state=FeatureState.ACTIVE, criteria=Mock()) } ] ) @@ -50,11 +51,11 @@ ] ) def test_generate_signal_bits_no_signaling_features( - features_description: dict[Feature, FeatureDescription], + features_infos: dict[Feature, FeatureInfo], support_features: set[Feature], not_support_features: set[Feature] ) -> None: - signal_bits = _test_generate_signal_bits(features_description, support_features, not_support_features) + signal_bits = _test_generate_signal_bits(features_infos, support_features, not_support_features) assert signal_bits == 0 @@ -74,7 +75,7 @@ def test_generate_signal_bits_signaling_features( expected_signal_bits: int, ) -> None: features_description = { - Feature.NOP_FEATURE_1: FeatureDescription( + Feature.NOP_FEATURE_1: FeatureInfo( state=FeatureState.STARTED, criteria=Criteria( bit=0, @@ -83,7 +84,7 @@ def test_generate_signal_bits_signaling_features( version='0.0.0' ) ), - Feature.NOP_FEATURE_2: FeatureDescription( + Feature.NOP_FEATURE_2: FeatureInfo( state=FeatureState.MUST_SIGNAL, criteria=Criteria( bit=1, @@ -92,7 +93,7 @@ def test_generate_signal_bits_signaling_features( version='0.0.0' ) ), - Feature.NOP_FEATURE_3: FeatureDescription( + Feature.NOP_FEATURE_3: FeatureInfo( state=FeatureState.LOCKED_IN, criteria=Criteria( bit=3, @@ -123,8 +124,8 @@ def test_generate_signal_bits_signaling_features_with_defaults( not_support_features: set[Feature], expected_signal_bits: int, ) -> None: - features_description = { - Feature.NOP_FEATURE_1: FeatureDescription( + feature_infos = { + Feature.NOP_FEATURE_1: FeatureInfo( state=FeatureState.STARTED, criteria=Criteria( bit=0, @@ -134,7 +135,7 @@ def test_generate_signal_bits_signaling_features_with_defaults( signal_support_by_default=True ) ), - Feature.NOP_FEATURE_2: FeatureDescription( + Feature.NOP_FEATURE_2: FeatureInfo( state=FeatureState.MUST_SIGNAL, criteria=Criteria( bit=1, @@ -144,7 +145,7 @@ def test_generate_signal_bits_signaling_features_with_defaults( signal_support_by_default=True ) ), - Feature.NOP_FEATURE_3: FeatureDescription( + Feature.NOP_FEATURE_3: FeatureInfo( state=FeatureState.LOCKED_IN, criteria=Criteria( bit=3, @@ -155,21 +156,23 @@ def test_generate_signal_bits_signaling_features_with_defaults( ) } - signal_bits = _test_generate_signal_bits(features_description, support_features, not_support_features) + signal_bits = _test_generate_signal_bits(feature_infos, support_features, not_support_features) assert signal_bits == expected_signal_bits def _test_generate_signal_bits( - features_description: dict[Feature, FeatureDescription], + feature_infos: dict[Feature, FeatureInfo], support_features: set[Feature], not_support_features: set[Feature] ) -> int: + settings = Mock(spec_set=HathorSettings) + settings.FEATURE_ACTIVATION = FeatureSettings() feature_service = Mock(spec_set=FeatureService) - feature_service.get_bits_description = lambda block: features_description + feature_service.get_feature_infos = lambda block: feature_infos service = BitSignalingService( - feature_settings=FeatureSettings(), + settings=settings, feature_service=feature_service, tx_storage=Mock(), support_features=support_features, @@ -212,7 +215,7 @@ def test_support_intersection_validation( ) -> None: with pytest.raises(ValueError) as e: BitSignalingService( - feature_settings=Mock(), + settings=Mock(), feature_service=Mock(), tx_storage=Mock(), support_features=support_features, @@ -252,22 +255,25 @@ def test_non_signaling_features_warning( not_support_features: set[Feature], non_signaling_features: set[str], ) -> None: + settings = Mock(spec_set=HathorSettings) + settings.FEATURE_ACTIVATION = FeatureSettings() + best_block = Mock(spec_set=Block) best_block.get_height = Mock(return_value=123) best_block.hash_hex = 'abc' tx_storage = Mock(spec_set=TransactionStorage) tx_storage.get_best_block = lambda: best_block - def get_bits_description_mock(block: Block) -> dict[Feature, FeatureDescription]: + def get_feature_infos_mock(block: Block) -> dict[Feature, FeatureInfo]: if block == best_block: return {} raise NotImplementedError feature_service = Mock(spec_set=FeatureService) - feature_service.get_bits_description = get_bits_description_mock + feature_service.get_feature_infos = get_feature_infos_mock service = BitSignalingService( - feature_settings=FeatureSettings(), + settings=settings, feature_service=feature_service, tx_storage=tx_storage, support_features=support_features, @@ -290,7 +296,7 @@ def get_bits_description_mock(block: Block) -> dict[Feature, FeatureDescription] def test_on_must_signal_not_supported() -> None: service = BitSignalingService( - feature_settings=Mock(), + settings=Mock(), feature_service=Mock(), tx_storage=Mock(), support_features=set(), @@ -306,7 +312,7 @@ def test_on_must_signal_not_supported() -> None: def test_on_must_signal_supported() -> None: service = BitSignalingService( - feature_settings=Mock(), + settings=Mock(), feature_service=Mock(), tx_storage=Mock(), support_features=set(), diff --git a/tests/feature_activation/test_feature_service.py b/tests/feature_activation/test_feature_service.py index 7c2d513ac..a1c2f1549 100644 --- a/tests/feature_activation/test_feature_service.py +++ b/tests/feature_activation/test_feature_service.py @@ -17,6 +17,7 @@ import pytest from hathor.conf.get_settings import get_global_settings +from hathor.conf.settings import HathorSettings from hathor.feature_activation.feature import Feature from hathor.feature_activation.feature_service import ( BlockIsMissingSignal, @@ -25,7 +26,7 @@ FeatureService, ) from hathor.feature_activation.model.criteria import Criteria -from hathor.feature_activation.model.feature_description import FeatureDescription +from hathor.feature_activation.model.feature_info import FeatureInfo from hathor.feature_activation.model.feature_state import FeatureState from hathor.feature_activation.settings import Settings as FeatureSettings from hathor.indexes import MemoryIndexesManager @@ -35,9 +36,7 @@ from hathor.util import not_none -@pytest.fixture -def storage() -> TransactionStorage: - settings = get_global_settings() +def get_storage(settings: HathorSettings, *, up_to_height: int) -> TransactionStorage: indexes = MemoryIndexesManager() storage = TransactionMemoryStorage(indexes=indexes, settings=settings) feature_activation_bits = [ @@ -75,40 +74,34 @@ def storage() -> TransactionStorage: 0b0000, ] - for height, bits in enumerate(feature_activation_bits): + for height, bits in enumerate(feature_activation_bits[:up_to_height + 1]): if height == 0: continue parent = not_none(storage.get_block_by_height(height - 1)) block = Block(signal_bits=bits, parents=[parent.hash], storage=storage) block.update_hash() block.get_metadata().validation = ValidationState.FULL - block.init_static_metadata_from_storage(get_global_settings(), storage) + block.init_static_metadata_from_storage(settings, storage) storage.save_transaction(block) indexes.height.add_new(height, block.hash, block.timestamp) return storage -@pytest.fixture -def feature_settings() -> FeatureSettings: - return FeatureSettings( +def get_settings(*, features: dict[Feature, Criteria]) -> HathorSettings: + feature_settings = FeatureSettings.construct( evaluation_interval=4, - default_threshold=3 + default_threshold=3, + features=features, ) + settings = get_global_settings()._replace(FEATURE_ACTIVATION=feature_settings) + return settings -@pytest.fixture -def service(feature_settings: FeatureSettings, storage: TransactionStorage) -> FeatureService: - service = FeatureService( - feature_settings=feature_settings, - tx_storage=storage - ) - service.bit_signaling_service = Mock() - - return service - - -def test_get_state_genesis(storage: TransactionStorage, service: FeatureService) -> None: +def test_get_state_genesis() -> None: + settings = get_settings(features={}) + storage = get_storage(settings, up_to_height=0) + service = FeatureService(settings=settings, tx_storage=storage) block = not_none(storage.get_block_by_height(0)) result = service.get_state(block=block, feature=Mock()) @@ -116,9 +109,14 @@ def test_get_state_genesis(storage: TransactionStorage, service: FeatureService) @pytest.mark.parametrize('block_height', [0, 1, 2, 3]) -def test_get_state_first_interval(storage: TransactionStorage, service: FeatureService, block_height: int) -> None: +def test_get_state_first_interval(block_height: int) -> None: + settings = get_settings(features={ + Feature.NOP_FEATURE_1: Mock() + }) + storage = get_storage(settings, up_to_height=block_height) + service = FeatureService(settings=settings, tx_storage=storage) block = not_none(storage.get_block_by_height(block_height)) - result = service.get_state(block=block, feature=Mock()) + result = service.get_state(block=block, feature=Feature.NOP_FEATURE_1) assert result == FeatureState.DEFINED @@ -132,27 +130,18 @@ def test_get_state_first_interval(storage: TransactionStorage, service: FeatureS (8, FeatureState.DEFINED) ] ) -def test_get_state_from_defined( - storage: TransactionStorage, - block_height: int, - start_height: int, - expected_state: FeatureState -) -> None: - feature_settings = FeatureSettings.construct( - evaluation_interval=4, - features={ - Feature.NOP_FEATURE_1: Criteria.construct( - bit=Mock(), - start_height=start_height, - timeout_height=Mock(), - version=Mock() - ) - } - ) - service = FeatureService( - feature_settings=feature_settings, - tx_storage=storage - ) +def test_get_state_from_defined(block_height: int, start_height: int, expected_state: FeatureState) -> None: + features = { + Feature.NOP_FEATURE_1: Criteria.construct( + bit=Mock(), + start_height=start_height, + timeout_height=Mock(), + version=Mock() + ) + } + settings = get_settings(features=features) + storage = get_storage(settings, up_to_height=block_height) + service = FeatureService(settings=settings, tx_storage=storage) service.bit_signaling_service = Mock() block = not_none(storage.get_block_by_height(block_height)) @@ -164,13 +153,10 @@ def test_get_state_from_defined( @pytest.mark.parametrize('block_height', [12, 13, 14, 15, 16, 17]) @pytest.mark.parametrize('timeout_height', [8, 12]) def test_get_state_from_started_to_failed( - storage: TransactionStorage, block_height: int, timeout_height: int, ) -> None: - feature_settings = FeatureSettings.construct( - evaluation_interval=4, - features={ + features = { Feature.NOP_FEATURE_1: Criteria.construct( bit=3, start_height=0, @@ -179,11 +165,9 @@ def test_get_state_from_started_to_failed( version=Mock() ) } - ) - service = FeatureService( - feature_settings=feature_settings, - tx_storage=storage - ) + settings = get_settings(features=features) + storage = get_storage(settings, up_to_height=block_height) + service = FeatureService(settings=settings, tx_storage=storage) service.bit_signaling_service = Mock() block = not_none(storage.get_block_by_height(block_height)) @@ -195,13 +179,10 @@ def test_get_state_from_started_to_failed( @pytest.mark.parametrize('block_height', [8, 9, 10, 11]) @pytest.mark.parametrize('timeout_height', [8, 12]) def test_get_state_from_started_to_must_signal_on_timeout( - storage: TransactionStorage, block_height: int, timeout_height: int, ) -> None: - feature_settings = FeatureSettings.construct( - evaluation_interval=4, - features={ + features = { Feature.NOP_FEATURE_1: Criteria.construct( bit=3, start_height=0, @@ -210,11 +191,9 @@ def test_get_state_from_started_to_must_signal_on_timeout( version=Mock() ) } - ) - service = FeatureService( - feature_settings=feature_settings, - tx_storage=storage - ) + settings = get_settings(features=features) + storage = get_storage(settings, up_to_height=block_height) + service = FeatureService(settings=settings, tx_storage=storage) service.bit_signaling_service = Mock() block = not_none(storage.get_block_by_height(block_height)) @@ -227,7 +206,6 @@ def test_get_state_from_started_to_must_signal_on_timeout( @pytest.mark.parametrize('block_height', [8, 9, 10, 11]) @pytest.mark.parametrize('default_threshold', [0, 1, 2, 3]) def test_get_state_from_started_to_locked_in_on_default_threshold( - storage: TransactionStorage, block_height: int, default_threshold: int ) -> None: @@ -244,10 +222,9 @@ def test_get_state_from_started_to_locked_in_on_default_threshold( ) } ) - service = FeatureService( - feature_settings=feature_settings, - tx_storage=storage - ) + settings = get_global_settings()._replace(FEATURE_ACTIVATION=feature_settings) + storage = get_storage(settings, up_to_height=block_height) + service = FeatureService(settings=settings, tx_storage=storage) service.bit_signaling_service = Mock() block = not_none(storage.get_block_by_height(block_height)) @@ -259,13 +236,10 @@ def test_get_state_from_started_to_locked_in_on_default_threshold( @pytest.mark.parametrize('block_height', [8, 9, 10, 11]) @pytest.mark.parametrize('custom_threshold', [0, 1, 2, 3]) def test_get_state_from_started_to_locked_in_on_custom_threshold( - storage: TransactionStorage, block_height: int, custom_threshold: int ) -> None: - feature_settings = FeatureSettings.construct( - evaluation_interval=4, - features={ + features = { Feature.NOP_FEATURE_1: Criteria.construct( bit=1, start_height=0, @@ -274,11 +248,9 @@ def test_get_state_from_started_to_locked_in_on_custom_threshold( version=Mock() ) } - ) - service = FeatureService( - feature_settings=feature_settings, - tx_storage=storage - ) + settings = get_settings(features=features) + storage = get_storage(settings, up_to_height=block_height) + service = FeatureService(settings=settings, tx_storage=storage) service.bit_signaling_service = Mock() block = not_none(storage.get_block_by_height(block_height)) @@ -297,14 +269,11 @@ def test_get_state_from_started_to_locked_in_on_custom_threshold( ] ) def test_get_state_from_started_to_started( - storage: TransactionStorage, block_height: int, lock_in_on_timeout: bool, timeout_height: int, ) -> None: - feature_settings = FeatureSettings.construct( - evaluation_interval=4, - features={ + features = { Feature.NOP_FEATURE_1: Criteria.construct( bit=3, start_height=0, @@ -313,11 +282,9 @@ def test_get_state_from_started_to_started( version=Mock() ) } - ) - service = FeatureService( - feature_settings=feature_settings, - tx_storage=storage - ) + settings = get_settings(features=features) + storage = get_storage(settings, up_to_height=block_height) + service = FeatureService(settings=settings, tx_storage=storage) service.bit_signaling_service = Mock() block = not_none(storage.get_block_by_height(block_height)) @@ -328,12 +295,9 @@ def test_get_state_from_started_to_started( @pytest.mark.parametrize('block_height', [12, 13, 14, 15]) def test_get_state_from_must_signal_to_locked_in( - storage: TransactionStorage, block_height: int, ) -> None: - feature_settings = FeatureSettings.construct( - evaluation_interval=4, - features={ + features = { Feature.NOP_FEATURE_1: Criteria.construct( bit=3, start_height=0, @@ -342,11 +306,9 @@ def test_get_state_from_must_signal_to_locked_in( version=Mock() ) } - ) - service = FeatureService( - feature_settings=feature_settings, - tx_storage=storage - ) + settings = get_settings(features=features) + storage = get_storage(settings, up_to_height=block_height) + service = FeatureService(settings=settings, tx_storage=storage) service.bit_signaling_service = Mock() block = not_none(storage.get_block_by_height(block_height)) @@ -358,13 +320,10 @@ def test_get_state_from_must_signal_to_locked_in( @pytest.mark.parametrize('block_height', [16, 17, 18, 19]) @pytest.mark.parametrize('minimum_activation_height', [0, 4, 8, 12, 16]) def test_get_state_from_locked_in_to_active( - storage: TransactionStorage, block_height: int, minimum_activation_height: int, ) -> None: - feature_settings = FeatureSettings.construct( - evaluation_interval=4, - features={ + features = { Feature.NOP_FEATURE_1: Criteria.construct( bit=3, start_height=0, @@ -374,11 +333,9 @@ def test_get_state_from_locked_in_to_active( version=Mock() ) } - ) - service = FeatureService( - feature_settings=feature_settings, - tx_storage=storage - ) + settings = get_settings(features=features) + storage = get_storage(settings, up_to_height=block_height) + service = FeatureService(settings=settings, tx_storage=storage) service.bit_signaling_service = Mock() block = not_none(storage.get_block_by_height(block_height)) @@ -390,13 +347,10 @@ def test_get_state_from_locked_in_to_active( @pytest.mark.parametrize('block_height', [16, 17, 18, 19]) @pytest.mark.parametrize('minimum_activation_height', [17, 20, 100]) def test_get_state_from_locked_in_to_locked_in( - storage: TransactionStorage, block_height: int, minimum_activation_height: int, ) -> None: - feature_settings = FeatureSettings.construct( - evaluation_interval=4, - features={ + features = { Feature.NOP_FEATURE_1: Criteria.construct( bit=3, start_height=0, @@ -406,11 +360,9 @@ def test_get_state_from_locked_in_to_locked_in( version=Mock() ) } - ) - service = FeatureService( - feature_settings=feature_settings, - tx_storage=storage - ) + settings = get_settings(features=features) + storage = get_storage(settings, up_to_height=block_height) + service = FeatureService(settings=settings, tx_storage=storage) service.bit_signaling_service = Mock() block = not_none(storage.get_block_by_height(block_height)) @@ -420,10 +372,8 @@ def test_get_state_from_locked_in_to_locked_in( @pytest.mark.parametrize('block_height', [20, 21, 22, 23]) -def test_get_state_from_active(storage: TransactionStorage, block_height: int) -> None: - feature_settings = FeatureSettings.construct( - evaluation_interval=4, - features={ +def test_get_state_from_active(block_height: int) -> None: + features = { Feature.NOP_FEATURE_1: Criteria.construct( bit=3, start_height=0, @@ -432,11 +382,9 @@ def test_get_state_from_active(storage: TransactionStorage, block_height: int) - version=Mock() ) } - ) - service = FeatureService( - feature_settings=feature_settings, - tx_storage=storage - ) + settings = get_settings(features=features) + storage = get_storage(settings, up_to_height=block_height) + service = FeatureService(settings=settings, tx_storage=storage) service.bit_signaling_service = Mock() block = not_none(storage.get_block_by_height(block_height)) @@ -446,10 +394,8 @@ def test_get_state_from_active(storage: TransactionStorage, block_height: int) - @pytest.mark.parametrize('block_height', [16, 17, 18, 19]) -def test_caching_mechanism(storage: TransactionStorage, block_height: int) -> None: - feature_settings = FeatureSettings.construct( - evaluation_interval=4, - features={ +def test_is_feature_active(block_height: int) -> None: + features = { Feature.NOP_FEATURE_1: Criteria.construct( bit=3, start_height=0, @@ -458,41 +404,10 @@ def test_caching_mechanism(storage: TransactionStorage, block_height: int) -> No version=Mock() ) } - ) - service = FeatureService(feature_settings=feature_settings, tx_storage=storage) - service.bit_signaling_service = Mock() - block = not_none(storage.get_block_by_height(block_height)) - calculate_new_state_mock = Mock(wraps=service._calculate_new_state) - - with patch.object(FeatureService, '_calculate_new_state', calculate_new_state_mock): - result1 = service.get_state(block=block, feature=Feature.NOP_FEATURE_1) - - assert result1 == FeatureState.ACTIVE - assert calculate_new_state_mock.call_count == 4 - - calculate_new_state_mock.reset_mock() - result2 = service.get_state(block=block, feature=Feature.NOP_FEATURE_1) - - assert result2 == FeatureState.ACTIVE - assert calculate_new_state_mock.call_count == 0 - - -@pytest.mark.parametrize('block_height', [16, 17, 18, 19]) -def test_is_feature_active(storage: TransactionStorage, block_height: int) -> None: - feature_settings = FeatureSettings.construct( - evaluation_interval=4, - features={ - Feature.NOP_FEATURE_1: Criteria.construct( - bit=3, - start_height=0, - timeout_height=8, - lock_in_on_timeout=True, - version=Mock() - ) - } - ) + settings = get_settings(features=features) + storage = get_storage(settings, up_to_height=block_height) service = FeatureService( - feature_settings=feature_settings, + settings=settings, tx_storage=storage ) service.bit_signaling_service = Mock() @@ -504,10 +419,8 @@ def test_is_feature_active(storage: TransactionStorage, block_height: int) -> No @pytest.mark.parametrize('block_height', [12, 13, 14, 15]) -def test_get_state_from_failed(storage: TransactionStorage, block_height: int) -> None: - feature_settings = FeatureSettings.construct( - evaluation_interval=4, - features={ +def test_get_state_from_failed(block_height: int) -> None: + features = { Feature.NOP_FEATURE_1: Criteria.construct( bit=Mock(), start_height=0, @@ -515,11 +428,9 @@ def test_get_state_from_failed(storage: TransactionStorage, block_height: int) - version=Mock() ) } - ) - service = FeatureService( - feature_settings=feature_settings, - tx_storage=storage - ) + settings = get_settings(features=features) + storage = get_storage(settings, up_to_height=block_height) + service = FeatureService(settings=settings, tx_storage=storage) service.bit_signaling_service = Mock() block = not_none(storage.get_block_by_height(block_height)) @@ -528,24 +439,26 @@ def test_get_state_from_failed(storage: TransactionStorage, block_height: int) - assert result == FeatureState.FAILED -def test_get_state_undefined_feature(storage: TransactionStorage, service: FeatureService) -> None: +def test_get_state_undefined_feature() -> None: + settings = get_settings(features={}) + storage = get_storage(settings, up_to_height=10) block = not_none(storage.get_block_by_height(10)) + service = FeatureService(settings=settings, tx_storage=storage) result = service.get_state(block=block, feature=Feature.NOP_FEATURE_1) assert result == FeatureState.DEFINED -def test_get_bits_description(storage: TransactionStorage) -> None: +def test_get_feature_info() -> None: criteria_mock_1 = Criteria.construct(bit=Mock(), start_height=Mock(), timeout_height=Mock(), version=Mock()) criteria_mock_2 = Criteria.construct(bit=Mock(), start_height=Mock(), timeout_height=Mock(), version=Mock()) - feature_settings = FeatureSettings.construct( - features={ - Feature.NOP_FEATURE_1: criteria_mock_1, - Feature.NOP_FEATURE_2: criteria_mock_2 - } - ) + settings = get_settings(features={ + Feature.NOP_FEATURE_1: criteria_mock_1, + Feature.NOP_FEATURE_2: criteria_mock_2 + }) + storage = get_storage(settings, up_to_height=0) service = FeatureService( - feature_settings=feature_settings, + settings=settings, tx_storage=storage ) service.bit_signaling_service = Mock() @@ -558,11 +471,11 @@ def get_state(self: FeatureService, *, block: Block, feature: Feature) -> Featur return states[feature] with patch('hathor.feature_activation.feature_service.FeatureService.get_state', get_state): - result = service.get_bits_description(block=Mock()) + result = service.get_feature_infos(block=Mock()) expected = { - Feature.NOP_FEATURE_1: FeatureDescription(criteria_mock_1, FeatureState.STARTED), - Feature.NOP_FEATURE_2: FeatureDescription(criteria_mock_2, FeatureState.FAILED), + Feature.NOP_FEATURE_1: FeatureInfo(criteria_mock_1, FeatureState.STARTED), + Feature.NOP_FEATURE_2: FeatureInfo(criteria_mock_2, FeatureState.FAILED), } assert result == expected @@ -578,13 +491,10 @@ def get_state(self: FeatureService, *, block: Block, feature: Feature) -> Featur (0, 0), ] ) -def test_get_ancestor_at_height_invalid( - feature_settings: FeatureSettings, - storage: TransactionStorage, - block_height: int, - ancestor_height: int -) -> None: - service = FeatureService(feature_settings=feature_settings, tx_storage=storage) +def test_get_ancestor_at_height_invalid(block_height: int, ancestor_height: int) -> None: + settings = get_settings(features={}) + storage = get_storage(settings, up_to_height=block_height) + service = FeatureService(settings=settings, tx_storage=storage) service.bit_signaling_service = Mock() block = not_none(storage.get_block_by_height(block_height)) @@ -600,26 +510,26 @@ def test_get_ancestor_at_height_invalid( ['block_height', 'ancestor_height'], [ (21, 20), - (21, 10), - (21, 0), - (15, 10), - (15, 0), + (21, 18), + (21, 17), + (15, 12), + (15, 11), (1, 0), ] ) -def test_get_ancestor_at_height( - feature_settings: FeatureSettings, - storage: TransactionStorage, - block_height: int, - ancestor_height: int -) -> None: - service = FeatureService(feature_settings=feature_settings, tx_storage=storage) - service.bit_signaling_service = Mock() +def test_get_ancestor_at_height(block_height: int, ancestor_height: int) -> None: + settings = get_settings(features={}) + storage = get_storage(settings, up_to_height=block_height) block = not_none(storage.get_block_by_height(block_height)) get_block_by_height_wrapped = Mock(wraps=storage.get_block_by_height) with patch.object(storage, 'get_block_by_height', get_block_by_height_wrapped): - result = service._get_ancestor_at_height(block=block, ancestor_height=ancestor_height) + service = FeatureService(settings=settings, tx_storage=storage) + service.bit_signaling_service = Mock() + result = service._get_ancestor_at_height( + block=block, + ancestor_height=ancestor_height + ) assert get_block_by_height_wrapped.call_count == ( 0 if block_height - ancestor_height <= 1 else 1 @@ -634,19 +544,17 @@ def test_get_ancestor_at_height( (21, 20), (21, 18), (15, 12), - (15, 10), + (15, 11), (1, 0), ] ) -def test_get_ancestor_at_height_voided( - feature_settings: FeatureSettings, - storage: TransactionStorage, - block_height: int, - ancestor_height: int -) -> None: - service = FeatureService(feature_settings=feature_settings, tx_storage=storage) +def test_get_ancestor_at_height_voided(block_height: int, ancestor_height: int) -> None: + settings = get_settings(features={}) + storage = get_storage(settings, up_to_height=block_height) + service = FeatureService(settings=settings, tx_storage=storage) service.bit_signaling_service = Mock() block = not_none(storage.get_block_by_height(block_height)) + parent_block = not_none(storage.get_block_by_height(block_height - 1)) parent_block.get_metadata().voided_by = {b'some'} @@ -686,7 +594,6 @@ def test_get_ancestor_at_height_voided( ] ) def test_check_must_signal( - storage: TransactionStorage, bit: int, threshold: int, block_height: int, @@ -705,7 +612,9 @@ def test_check_must_signal( ) } ) - service = FeatureService(feature_settings=feature_settings, tx_storage=storage) + settings = get_global_settings()._replace(FEATURE_ACTIVATION=feature_settings) + storage = get_storage(settings, up_to_height=block_height) + service = FeatureService(settings=settings, tx_storage=storage) service.bit_signaling_service = Mock() block = not_none(storage.get_block_by_height(block_height)) diff --git a/tests/feature_activation/test_feature_simulation.py b/tests/feature_activation/test_feature_simulation.py index 2c5e9094d..6bbeb9e35 100644 --- a/tests/feature_activation/test_feature_simulation.py +++ b/tests/feature_activation/test_feature_simulation.py @@ -82,7 +82,7 @@ def test_feature(self) -> None: manager = artifacts.manager feature_resource = FeatureResource( - feature_settings=feature_settings, + settings=settings, feature_service=feature_service, tx_storage=artifacts.tx_storage ) @@ -358,7 +358,7 @@ def test_reorg(self) -> None: manager = artifacts.manager feature_resource = FeatureResource( - feature_settings=feature_settings, + settings=settings, feature_service=feature_service, tx_storage=artifacts.tx_storage ) @@ -573,7 +573,7 @@ def test_feature_from_existing_storage(self) -> None: manager1 = artifacts1.manager feature_resource = FeatureResource( - feature_settings=feature_settings, + settings=settings, feature_service=feature_service1, tx_storage=artifacts1.tx_storage ) @@ -626,7 +626,7 @@ def test_feature_from_existing_storage(self) -> None: feature_service = artifacts2.feature_service feature_resource = FeatureResource( - feature_settings=feature_settings, + settings=settings, feature_service=feature_service, tx_storage=artifacts2.tx_storage ) diff --git a/tests/resources/feature/test_feature.py b/tests/resources/feature/test_feature.py index b2caa9099..fec031dc9 100644 --- a/tests/resources/feature/test_feature.py +++ b/tests/resources/feature/test_feature.py @@ -16,10 +16,11 @@ import pytest +from hathor.conf.get_settings import get_global_settings from hathor.feature_activation.feature import Feature from hathor.feature_activation.feature_service import FeatureService from hathor.feature_activation.model.criteria import Criteria -from hathor.feature_activation.model.feature_description import FeatureDescription +from hathor.feature_activation.model.feature_info import FeatureInfo from hathor.feature_activation.model.feature_state import FeatureState from hathor.feature_activation.resources.feature import FeatureResource from hathor.feature_activation.settings import Settings as FeatureSettings @@ -63,22 +64,24 @@ def get_state(*, block: Block, feature: Feature) -> FeatureState: feature_service = Mock(spec_set=FeatureService) feature_service.get_state = Mock(side_effect=get_state) - feature_service.get_bits_description = Mock(return_value={ - Feature.NOP_FEATURE_1: FeatureDescription(state=FeatureState.DEFINED, criteria=nop_feature_1_criteria), - Feature.NOP_FEATURE_2: FeatureDescription(state=FeatureState.LOCKED_IN, criteria=nop_feature_2_criteria), + feature_service.get_feature_infos = Mock(return_value={ + Feature.NOP_FEATURE_1: FeatureInfo(state=FeatureState.DEFINED, criteria=nop_feature_1_criteria), + Feature.NOP_FEATURE_2: FeatureInfo(state=FeatureState.LOCKED_IN, criteria=nop_feature_2_criteria), }) - feature_settings = FeatureSettings( - evaluation_interval=4, - default_threshold=3, - features={ - Feature.NOP_FEATURE_1: nop_feature_1_criteria, - Feature.NOP_FEATURE_2: nop_feature_2_criteria - } + settings = get_global_settings()._replace( + FEATURE_ACTIVATION=FeatureSettings( + evaluation_interval=4, + default_threshold=3, + features={ + Feature.NOP_FEATURE_1: nop_feature_1_criteria, + Feature.NOP_FEATURE_2: nop_feature_2_criteria + } + ) ) feature_resource = FeatureResource( - feature_settings=feature_settings, + settings=settings, feature_service=feature_service, tx_storage=tx_storage ) @@ -95,7 +98,7 @@ def test_get_features(web: StubSite) -> None: features=[ dict( name='NOP_FEATURE_1', - state='ACTIVE', + state='DEFINED', acceptance=None, threshold=0.75, start_height=0, @@ -106,8 +109,8 @@ def test_get_features(web: StubSite) -> None: ), dict( name='NOP_FEATURE_2', - state='STARTED', - acceptance=0.25, + state='LOCKED_IN', + acceptance=None, threshold=0.5, start_height=200, minimum_activation_height=0, diff --git a/tests/tx/test_verification.py b/tests/tx/test_verification.py index b33430455..ee92a3f22 100644 --- a/tests/tx/test_verification.py +++ b/tests/tx/test_verification.py @@ -342,8 +342,6 @@ def test_merge_mined_block_verify_without_storage(self) -> None: verify_data_wrapped = Mock(wraps=self.verifiers.block.verify_data) verify_sigops_output_wrapped = Mock(wraps=self.verifiers.vertex.verify_sigops_output) - verify_aux_pow_wrapped = Mock(wraps=self.verifiers.merge_mined_block.verify_aux_pow) - with ( patch.object(VertexVerifier, 'verify_outputs', verify_outputs_wrapped), patch.object(VertexVerifier, 'verify_pow', verify_pow_wrapped), @@ -352,7 +350,6 @@ def test_merge_mined_block_verify_without_storage(self) -> None: patch.object(VertexVerifier, 'verify_number_of_outputs', verify_number_of_outputs_wrapped), patch.object(BlockVerifier, 'verify_data', verify_data_wrapped), patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped), - patch.object(MergeMinedBlockVerifier, 'verify_aux_pow', verify_aux_pow_wrapped), ): self.manager.verification_service.verify_without_storage(block) @@ -367,9 +364,6 @@ def test_merge_mined_block_verify_without_storage(self) -> None: verify_data_wrapped.assert_called_once() verify_sigops_output_wrapped.assert_called_once() - # MergeMinedBlock methods - verify_aux_pow_wrapped.assert_called_once() - def test_merge_mined_block_verify(self) -> None: block = self._get_valid_merge_mined_block() From 4f807863094bc05f72a6379fe4294302228912e9 Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Tue, 27 Aug 2024 12:14:33 -0300 Subject: [PATCH 19/61] refactor(metadata): pre-refactors for migrating feature states to static metadata [part 2/2] (#1017) --- .../test_feature_simulation.py | 577 ++++++++---------- 1 file changed, 250 insertions(+), 327 deletions(-) diff --git a/tests/feature_activation/test_feature_simulation.py b/tests/feature_activation/test_feature_simulation.py index 6bbeb9e35..5aac3a4de 100644 --- a/tests/feature_activation/test_feature_simulation.py +++ b/tests/feature_activation/test_feature_simulation.py @@ -13,14 +13,12 @@ # limitations under the License. from typing import Any -from unittest.mock import Mock, patch import pytest from hathor.builder import Builder from hathor.conf.get_settings import get_global_settings from hathor.feature_activation.feature import Feature -from hathor.feature_activation.feature_service import FeatureService from hathor.feature_activation.model.criteria import Criteria from hathor.feature_activation.resources.feature import FeatureResource from hathor.feature_activation.settings import Settings as FeatureSettings @@ -49,15 +47,9 @@ def _get_result(web_client: StubSite) -> dict[str, Any]: return result - @staticmethod - def _calculate_new_state_mock_block_height_calls(calculate_new_state_mock: Mock) -> list[int]: - """Return the heights of blocks that calculate_new_state_mock was called with.""" - return [call.kwargs['boundary_block'].get_height() for call in calculate_new_state_mock.call_args_list] - def test_feature(self) -> None: """ - Tests that a feature goes through all possible states in the correct block heights, and also assert internal - method calls to make sure we're executing it in the intended, most performatic way. + Tests that a feature goes through all possible states in the correct block heights. """ feature_settings = FeatureSettings( evaluation_interval=4, @@ -88,252 +80,207 @@ def test_feature(self) -> None: ) web_client = StubSite(feature_resource) - calculate_new_state_mock = Mock(wraps=feature_service._calculate_new_state) - get_ancestor_iteratively_mock = Mock(wraps=feature_service._get_ancestor_iteratively) - - with ( - patch.object(FeatureService, '_calculate_new_state', calculate_new_state_mock), - patch.object(FeatureService, '_get_ancestor_iteratively', get_ancestor_iteratively_mock), - ): - # at the beginning, the feature is DEFINED: - add_new_blocks(manager, 10) - self.simulator.run(60) - result = self._get_result(web_client) - assert result == dict( - block_height=10, - features=[ - dict( - name='NOP_FEATURE_1', - state='DEFINED', - acceptance=None, - threshold=0.75, - start_height=20, - timeout_height=60, - minimum_activation_height=72, - lock_in_on_timeout=True, - version='0.0.0' - ) - ] - ) - # so we calculate states all the way down to the first evaluation boundary (after genesis): - assert min(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 4 - # no blocks are voided, so we only use the height index, and not get_ancestor_iteratively: - assert get_ancestor_iteratively_mock.call_count == 0 - calculate_new_state_mock.reset_mock() - - # at block 19, the feature is DEFINED, just before becoming STARTED: - add_new_blocks(manager, 9) - self.simulator.run(60) - result = self._get_result(web_client) - assert result == dict( - block_height=19, - features=[ - dict( - name='NOP_FEATURE_1', - state='DEFINED', - acceptance=None, - threshold=0.75, - start_height=20, - timeout_height=60, - minimum_activation_height=72, - lock_in_on_timeout=True, - version='0.0.0' - ) - ] - ) - # so we calculate states down to block 12, as block 8's state is saved: - assert min(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 12 - assert get_ancestor_iteratively_mock.call_count == 0 - calculate_new_state_mock.reset_mock() - - # at block 20, the feature becomes STARTED: - add_new_blocks(manager, 1) - self.simulator.run(60) - result = self._get_result(web_client) - assert result == dict( - block_height=20, - features=[ - dict( - name='NOP_FEATURE_1', - state='STARTED', - acceptance=0, - threshold=0.75, - start_height=20, - timeout_height=60, - minimum_activation_height=72, - lock_in_on_timeout=True, - version='0.0.0' - ) - ] - ) - assert min(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 20 - assert get_ancestor_iteratively_mock.call_count == 0 - - # we add one block before resetting the mock, just to make sure block 20 gets a chance to be saved - add_new_blocks(manager, 1) - calculate_new_state_mock.reset_mock() - - # at block 55, the feature is STARTED, just before becoming MUST_SIGNAL: - add_new_blocks(manager, 34) - self.simulator.run(60) - result = self._get_result(web_client) - assert result == dict( - block_height=55, - features=[ - dict( - name='NOP_FEATURE_1', - state='STARTED', - acceptance=0, - threshold=0.75, - start_height=20, - timeout_height=60, - minimum_activation_height=72, - lock_in_on_timeout=True, - version='0.0.0' - ) - ] - ) - assert min(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 24 - assert get_ancestor_iteratively_mock.call_count == 0 - calculate_new_state_mock.reset_mock() - - # at block 56, the feature becomes MUST_SIGNAL: - add_new_blocks(manager, 1) - self.simulator.run(60) - result = self._get_result(web_client) - assert result == dict( - block_height=56, - features=[ - dict( - name='NOP_FEATURE_1', - state='MUST_SIGNAL', - acceptance=0, - threshold=0.75, - start_height=20, - timeout_height=60, - minimum_activation_height=72, - lock_in_on_timeout=True, - version='0.0.0' - ) - ] - ) - assert min(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 56 - assert get_ancestor_iteratively_mock.call_count == 0 - - # we add one block before resetting the mock, just to make sure block 56 gets a chance to be saved - add_new_blocks(manager, 1, signal_bits=0b1) - calculate_new_state_mock.reset_mock() - - # if we try to propagate a non-signaling block, it is not accepted - non_signaling_block = manager.generate_mining_block() - manager.cpu_mining_service.resolve(non_signaling_block) - non_signaling_block.signal_bits = 0b10 - non_signaling_block.init_static_metadata_from_storage(settings, manager.tx_storage) - - with pytest.raises(BlockMustSignalError): - manager.verification_service.verify(non_signaling_block) - - assert not manager.propagate_tx(non_signaling_block) - - # at block 59, the feature is MUST_SIGNAL, just before becoming LOCKED_IN: - add_new_blocks(manager, num_blocks=2, signal_bits=0b1) - self.simulator.run(60) - result = self._get_result(web_client) - assert result == dict( - block_height=59, - features=[ - dict( - name='NOP_FEATURE_1', - state='MUST_SIGNAL', - acceptance=0.75, - threshold=0.75, - start_height=20, - timeout_height=60, - minimum_activation_height=72, - lock_in_on_timeout=True, - version='0.0.0' - ) - ] - ) - # we don't need to calculate any new state, as block 56's state is saved: - assert len(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 0 - assert get_ancestor_iteratively_mock.call_count == 0 - calculate_new_state_mock.reset_mock() - - # at block 60, the feature becomes LOCKED_IN: - add_new_blocks(manager, 1) - self.simulator.run(60) - result = self._get_result(web_client) - assert result == dict( - block_height=60, - features=[ - dict( - name='NOP_FEATURE_1', - state='LOCKED_IN', - acceptance=None, - threshold=0.75, - start_height=20, - timeout_height=60, - minimum_activation_height=72, - lock_in_on_timeout=True, - version='0.0.0' - ) - ] - ) - assert min(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 60 - assert get_ancestor_iteratively_mock.call_count == 0 - - # we add one block before resetting the mock, just to make sure block 60 gets a chance to be saved - add_new_blocks(manager, 1) - calculate_new_state_mock.reset_mock() - - # at block 71, the feature is LOCKED_IN, just before becoming ACTIVE: - add_new_blocks(manager, 10) - self.simulator.run(60) - result = self._get_result(web_client) - assert result == dict( - block_height=71, - features=[ - dict( - name='NOP_FEATURE_1', - state='LOCKED_IN', - acceptance=None, - threshold=0.75, - start_height=20, - timeout_height=60, - minimum_activation_height=72, - lock_in_on_timeout=True, - version='0.0.0' - ) - ] - ) - assert min(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 64 - assert get_ancestor_iteratively_mock.call_count == 0 - calculate_new_state_mock.reset_mock() - - # at block 72, the feature becomes ACTIVE, forever: - add_new_blocks(manager, 1) - self.simulator.run(60) - result = self._get_result(web_client) - assert result == dict( - block_height=72, - features=[ - dict( - name='NOP_FEATURE_1', - state='ACTIVE', - acceptance=None, - threshold=0.75, - start_height=20, - timeout_height=60, - minimum_activation_height=72, - lock_in_on_timeout=True, - version='0.0.0' - ) - ] - ) - assert min(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 72 - assert get_ancestor_iteratively_mock.call_count == 0 - calculate_new_state_mock.reset_mock() + # at the beginning, the feature is DEFINED: + add_new_blocks(manager, 10) + self.simulator.run(60) + result = self._get_result(web_client) + assert result == dict( + block_height=10, + features=[ + dict( + name='NOP_FEATURE_1', + state='DEFINED', + acceptance=None, + threshold=0.75, + start_height=20, + timeout_height=60, + minimum_activation_height=72, + lock_in_on_timeout=True, + version='0.0.0' + ) + ] + ) + + # at block 19, the feature is DEFINED, just before becoming STARTED: + add_new_blocks(manager, 9) + self.simulator.run(60) + result = self._get_result(web_client) + assert result == dict( + block_height=19, + features=[ + dict( + name='NOP_FEATURE_1', + state='DEFINED', + acceptance=None, + threshold=0.75, + start_height=20, + timeout_height=60, + minimum_activation_height=72, + lock_in_on_timeout=True, + version='0.0.0' + ) + ] + ) + + # at block 20, the feature becomes STARTED: + add_new_blocks(manager, 1) + self.simulator.run(60) + result = self._get_result(web_client) + assert result == dict( + block_height=20, + features=[ + dict( + name='NOP_FEATURE_1', + state='STARTED', + acceptance=0, + threshold=0.75, + start_height=20, + timeout_height=60, + minimum_activation_height=72, + lock_in_on_timeout=True, + version='0.0.0' + ) + ] + ) + + # at block 55, the feature is STARTED, just before becoming MUST_SIGNAL: + add_new_blocks(manager, 35) + self.simulator.run(60) + result = self._get_result(web_client) + assert result == dict( + block_height=55, + features=[ + dict( + name='NOP_FEATURE_1', + state='STARTED', + acceptance=0, + threshold=0.75, + start_height=20, + timeout_height=60, + minimum_activation_height=72, + lock_in_on_timeout=True, + version='0.0.0' + ) + ] + ) + + # at block 56, the feature becomes MUST_SIGNAL: + add_new_blocks(manager, 1) + self.simulator.run(60) + result = self._get_result(web_client) + assert result == dict( + block_height=56, + features=[ + dict( + name='NOP_FEATURE_1', + state='MUST_SIGNAL', + acceptance=0, + threshold=0.75, + start_height=20, + timeout_height=60, + minimum_activation_height=72, + lock_in_on_timeout=True, + version='0.0.0' + ) + ] + ) + + add_new_blocks(manager, 1, signal_bits=0b1) + + # if we try to propagate a non-signaling block, it is not accepted + non_signaling_block = manager.generate_mining_block() + manager.cpu_mining_service.resolve(non_signaling_block) + non_signaling_block.signal_bits = 0b10 + non_signaling_block.init_static_metadata_from_storage(settings, manager.tx_storage) + + with pytest.raises(BlockMustSignalError): + manager.verification_service.verify(non_signaling_block) + + assert not manager.propagate_tx(non_signaling_block) + + # at block 59, the feature is MUST_SIGNAL, just before becoming LOCKED_IN: + add_new_blocks(manager, num_blocks=2, signal_bits=0b1) + self.simulator.run(60) + result = self._get_result(web_client) + assert result == dict( + block_height=59, + features=[ + dict( + name='NOP_FEATURE_1', + state='MUST_SIGNAL', + acceptance=0.75, + threshold=0.75, + start_height=20, + timeout_height=60, + minimum_activation_height=72, + lock_in_on_timeout=True, + version='0.0.0' + ) + ] + ) + + # at block 60, the feature becomes LOCKED_IN: + add_new_blocks(manager, 1) + self.simulator.run(60) + result = self._get_result(web_client) + assert result == dict( + block_height=60, + features=[ + dict( + name='NOP_FEATURE_1', + state='LOCKED_IN', + acceptance=None, + threshold=0.75, + start_height=20, + timeout_height=60, + minimum_activation_height=72, + lock_in_on_timeout=True, + version='0.0.0' + ) + ] + ) + + # at block 71, the feature is LOCKED_IN, just before becoming ACTIVE: + add_new_blocks(manager, 11) + self.simulator.run(60) + result = self._get_result(web_client) + assert result == dict( + block_height=71, + features=[ + dict( + name='NOP_FEATURE_1', + state='LOCKED_IN', + acceptance=None, + threshold=0.75, + start_height=20, + timeout_height=60, + minimum_activation_height=72, + lock_in_on_timeout=True, + version='0.0.0' + ) + ] + ) + + # at block 72, the feature becomes ACTIVE, forever: + add_new_blocks(manager, 1) + self.simulator.run(60) + result = self._get_result(web_client) + assert result == dict( + block_height=72, + features=[ + dict( + name='NOP_FEATURE_1', + state='ACTIVE', + acceptance=None, + threshold=0.75, + start_height=20, + timeout_height=60, + minimum_activation_height=72, + lock_in_on_timeout=True, + version='0.0.0' + ) + ] + ) def test_reorg(self) -> None: feature_settings = FeatureSettings( @@ -579,43 +526,30 @@ def test_feature_from_existing_storage(self) -> None: ) web_client = StubSite(feature_resource) - calculate_new_state_mock = Mock(wraps=feature_service1._calculate_new_state) - get_ancestor_iteratively_mock = Mock(wraps=feature_service1._get_ancestor_iteratively) - - with ( - patch.object(FeatureService, '_calculate_new_state', calculate_new_state_mock), - patch.object(FeatureService, '_get_ancestor_iteratively', get_ancestor_iteratively_mock), - ): - assert artifacts1.tx_storage.get_vertices_count() == 3 # genesis vertices in the storage - - # we add 64 blocks so the feature becomes active. It would be active by timeout anyway, - # we just set signal bits to conform with the MUST_SIGNAL phase. - add_new_blocks(manager1, 64, signal_bits=0b1) - self.simulator.run(60) - result = self._get_result(web_client) - assert result == dict( - block_height=64, - features=[ - dict( - name='NOP_FEATURE_1', - state='ACTIVE', - acceptance=None, - threshold=0.75, - start_height=20, - timeout_height=60, - minimum_activation_height=0, - lock_in_on_timeout=True, - version='0.0.0' - ) - ] - ) - # feature states have to be calculated for all blocks in evaluation interval boundaries, - # down to the first one (after genesis), as this is the first run: - assert min(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 4 - # no blocks are voided, so we only use the height index: - assert get_ancestor_iteratively_mock.call_count == 0 - assert artifacts1.tx_storage.get_vertices_count() == 67 - calculate_new_state_mock.reset_mock() + assert artifacts1.tx_storage.get_vertices_count() == 3 # genesis vertices in the storage + + # we add 64 blocks so the feature becomes active. It would be active by timeout anyway, + # we just set signal bits to conform with the MUST_SIGNAL phase. + add_new_blocks(manager1, 64, signal_bits=0b1) + self.simulator.run(60) + result = self._get_result(web_client) + assert result == dict( + block_height=64, + features=[ + dict( + name='NOP_FEATURE_1', + state='ACTIVE', + acceptance=None, + threshold=0.75, + start_height=20, + timeout_height=60, + minimum_activation_height=0, + lock_in_on_timeout=True, + version='0.0.0' + ) + ] + ) + assert artifacts1.tx_storage.get_vertices_count() == 67 manager1.stop() not_none(artifacts1.rocksdb_storage).close() @@ -632,41 +566,30 @@ def test_feature_from_existing_storage(self) -> None: ) web_client = StubSite(feature_resource) - calculate_new_state_mock = Mock(wraps=feature_service._calculate_new_state) - get_ancestor_iteratively_mock = Mock(wraps=feature_service._get_ancestor_iteratively) - - with ( - patch.object(FeatureService, '_calculate_new_state', calculate_new_state_mock), - patch.object(FeatureService, '_get_ancestor_iteratively', get_ancestor_iteratively_mock), - ): - # the new storage starts populated - assert artifacts2.tx_storage.get_vertices_count() == 67 - self.simulator.run(60) - - result = self._get_result(web_client) - - # the result should be the same as before - assert result == dict( - block_height=64, - features=[ - dict( - name='NOP_FEATURE_1', - state='ACTIVE', - acceptance=None, - threshold=0.75, - start_height=20, - timeout_height=60, - minimum_activation_height=0, - lock_in_on_timeout=True, - version='0.0.0' - ) - ] - ) - # features states are not calculate for any block, as they're all saved: - assert len(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 0 - assert get_ancestor_iteratively_mock.call_count == 0 - assert artifacts2.tx_storage.get_vertices_count() == 67 - calculate_new_state_mock.reset_mock() + # the new storage starts populated + assert artifacts2.tx_storage.get_vertices_count() == 67 + self.simulator.run(60) + + result = self._get_result(web_client) + + # the result should be the same as before + assert result == dict( + block_height=64, + features=[ + dict( + name='NOP_FEATURE_1', + state='ACTIVE', + acceptance=None, + threshold=0.75, + start_height=20, + timeout_height=60, + minimum_activation_height=0, + lock_in_on_timeout=True, + version='0.0.0' + ) + ] + ) + assert artifacts2.tx_storage.get_vertices_count() == 67 class SyncV1MemoryStorageFeatureSimulationTest(unittest.SyncV1Params, BaseMemoryStorageFeatureSimulationTest): From f3e3ab55570fc1f0a02397707598a2ab03154ab3 Mon Sep 17 00:00:00 2001 From: Marcelo Salhab Brogliato Date: Thu, 29 Aug 2024 11:54:56 -0500 Subject: [PATCH 20/61] feat(ws): Add flow control to the tx history streamer --- hathor/websocket/messages.py | 5 +- hathor/websocket/protocol.py | 60 +++++++++++++++++++-- hathor/websocket/streamer.py | 92 ++++++++++++++++++++++++++++---- tests/websocket/test_streamer.py | 8 ++- 4 files changed, 150 insertions(+), 15 deletions(-) diff --git a/hathor/websocket/messages.py b/hathor/websocket/messages.py index f8d2e6c9a..01b3b4f45 100644 --- a/hathor/websocket/messages.py +++ b/hathor/websocket/messages.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any +from typing import Any, Optional from pydantic import Field @@ -41,6 +41,7 @@ class StreamErrorMessage(StreamBase): class StreamBeginMessage(StreamBase): type: str = Field('stream:history:begin', const=True) id: str + window_size: Optional[int] class StreamEndMessage(StreamBase): @@ -51,12 +52,14 @@ class StreamEndMessage(StreamBase): class StreamVertexMessage(StreamBase): type: str = Field('stream:history:vertex', const=True) id: str + seq: int data: dict[str, Any] class StreamAddressMessage(StreamBase): type: str = Field('stream:history:address', const=True) id: str + seq: int index: int address: str subscribed: bool diff --git a/hathor/websocket/protocol.py b/hathor/websocket/protocol.py index e506901f9..319eee555 100644 --- a/hathor/websocket/protocol.py +++ b/hathor/websocket/protocol.py @@ -122,6 +122,8 @@ def onMessage(self, payload: Union[bytes, str], isBinary: bool) -> None: self._handle_history_manual_streamer(message) elif _type == 'request:history:stop': self._stop_streamer(message) + elif _type == 'request:history:ack': + self._ack_streamer(message) def _handle_ping(self, message: dict[Any, Any]) -> None: """Handle ping message, should respond with a simple {"type": "pong"}""" @@ -140,9 +142,14 @@ def fail_if_history_streaming_is_disabled(self) -> bool: )) return True - def _create_streamer(self, stream_id: str, search: AddressSearch) -> None: + def _create_streamer(self, stream_id: str, search: AddressSearch, window_size: int | None) -> None: """Create the streamer and handle its callbacks.""" self._history_streamer = HistoryStreamer(protocol=self, stream_id=stream_id, search=search) + if window_size is not None: + if window_size < 0: + self._history_streamer.set_sliding_window_size(None) + else: + self._history_streamer.set_sliding_window_size(window_size) deferred = self._history_streamer.start() deferred.addBoth(self._streamer_callback) return @@ -181,7 +188,8 @@ def _open_history_xpub_streamer(self, message: dict[Any, Any]) -> None: return search = gap_limit_search(self.factory.manager, address_iter, gap_limit) - self._create_streamer(stream_id, search) + window_size = message.get('window-size', None) + self._create_streamer(stream_id, search, window_size) self.log.info('opening a websocket xpub streaming', stream_id=stream_id, xpub=xpub, @@ -237,7 +245,8 @@ def _handle_history_manual_streamer(self, message: dict[Any, Any]) -> None: return search = gap_limit_search(self.factory.manager, address_iter, gap_limit) - self._create_streamer(stream_id, search) + window_size = message.get('window-size', None) + self._create_streamer(stream_id, search, window_size) self.log.info('opening a websocket manual streaming', stream_id=stream_id, addresses=addresses, @@ -279,6 +288,51 @@ def _stop_streamer(self, message: dict[Any, Any]) -> None: self._history_streamer.stop(success=False) self.log.info('stopping a websocket xpub streaming', stream_id=stream_id) + def _ack_streamer(self, message: dict[Any, Any]) -> None: + """Handle request to set the ack number in the current streamer.""" + stream_id: str = message.get('id', '') + + if self._history_streamer is None: + self.send_message(StreamErrorMessage( + id=stream_id, + errmsg='No streaming opened.' + )) + return + + assert self._history_streamer is not None + + if self._history_streamer.stream_id != stream_id: + self.send_message(StreamErrorMessage( + id=stream_id, + errmsg='Current stream has a different id.' + )) + return + + ack = message.get('ack', None) + if ack is not None: + if not isinstance(ack, int): + self.send_message(StreamErrorMessage( + id=stream_id, + errmsg='Invalid ack.' + )) + return + self.log.info('ack received', stream_id=stream_id, ack=ack) + self._history_streamer.set_ack(ack) + + window = message.get('window', None) + if window is not None: + if not isinstance(window, int): + self.send_message(StreamErrorMessage( + id=stream_id, + errmsg='Invalid window.' + )) + return + self.log.info('sliding window size updated', stream_id=stream_id, sliding_window_size=window) + if window < 0: + self._history_streamer.set_sliding_window_size(None) + else: + self._history_streamer.set_sliding_window_size(window) + def send_message(self, message: WebSocketMessage) -> None: """Send a typed message.""" payload = message.json_dumpb() diff --git a/hathor/websocket/streamer.py b/hathor/websocket/streamer.py index f116fc36e..828d12472 100644 --- a/hathor/websocket/streamer.py +++ b/hathor/websocket/streamer.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Optional from twisted.internet.defer import Deferred from twisted.internet.interfaces import IPushProducer @@ -55,6 +55,7 @@ class HistoryStreamer: """ STATS_LOG_INTERVAL = 10_000 + DEFAULT_SLIDING_WINDOW_SIZE = None def __init__(self, *, @@ -69,18 +70,62 @@ def __init__(self, self.max_seconds_locking_event_loop = 1 + self.deferred: Deferred[bool] = Deferred() + + # Statistics. self.stats_log_interval = self.STATS_LOG_INTERVAL self.stats_total_messages: int = 0 self.stats_sent_addresses: int = 0 self.stats_sent_vertices: int = 0 + # Execution control. + self._started = False + self._is_running = False self._paused = False self._stop = False + # Flow control. + self._next_sequence_number: int = 0 + self._last_ack: int = -1 + self._sliding_window_size: Optional[int] = self.DEFAULT_SLIDING_WINDOW_SIZE + + def set_sliding_window_size(self, size: Optional[int]) -> None: + """Set a new sliding window size for flow control. If size is none, disables flow control. + """ + if size == self._sliding_window_size: + return + self._sliding_window_size = size + self.resume_if_possible() + + def set_ack(self, ack: int) -> None: + """Set the ack value for flow control. + + If the new value is bigger than the previous value, the streaming might be resumed. + """ + if ack <= self._last_ack: + # We might receive outdated or duplicate ACKs, and we can safely ignore them. + self.send_message(StreamErrorMessage( + id=self.stream_id, + errmsg=f'Outdated ACK received. Skipping it... (ack={ack})' + )) + return + if ack >= self._next_sequence_number: + self.send_message(StreamErrorMessage( + id=self.stream_id, + errmsg=f'Received ACK is higher than the last sent message. Skipping it... (ack={ack})' + )) + return + self._last_ack = ack + self.resume_if_possible() + + def resume_if_possible(self) -> None: + if not self._started: + return + if not self.should_pause_streaming() and not self._is_running: + self.resumeProducing() + def start(self) -> Deferred[bool]: """Start streaming items.""" - self.send_message(StreamBeginMessage(id=self.stream_id)) - # The websocket connection somehow instantiates an twisted.web.http.HTTPChannel object # which register a producer. It seems the HTTPChannel is not used anymore after switching # to websocket but it keep registered. So we have to unregister before registering a new @@ -89,13 +134,18 @@ def start(self) -> Deferred[bool]: self.protocol.unregisterProducer() self.protocol.registerProducer(self, True) - self.deferred: Deferred[bool] = Deferred() + + assert not self._started + self._started = True + self.send_message(StreamBeginMessage(id=self.stream_id, window_size=self._sliding_window_size)) self.resumeProducing() return self.deferred def stop(self, success: bool) -> None: """Stop streaming items.""" + assert self._started self._stop = True + self._started = False self.protocol.unregisterProducer() self.deferred.callback(success) @@ -118,17 +168,27 @@ def _run(self) -> None: coro = self._async_run() Deferred.fromCoroutine(coro) + def should_pause_streaming(self) -> bool: + if self._sliding_window_size is None: + return False + stop_value = self._last_ack + self._sliding_window_size + 1 + if self._next_sequence_number < stop_value: + return False + return True + async def _async_run(self): + assert not self._is_running + self._is_running = True + try: + await self._async_run_unsafe() + finally: + self._is_running = False + + async def _async_run_unsafe(self): """Internal method that runs the streaming main loop.""" t0 = self.reactor.seconds() async for item in self.search_iter: - # The methods `pauseProducing()` and `stopProducing()` might be called during the - # call to `self.protocol.sendMessage()`. So both `_paused` and `_stop` might change - # during the loop. - if self._paused or self._stop: - break - match item: case AddressItem(): subscribed, errmsg = self.protocol.subscribe_address(item.address) @@ -144,6 +204,7 @@ async def _async_run(self): self.stats_sent_addresses += 1 self.send_message(StreamAddressMessage( id=self.stream_id, + seq=self._next_sequence_number, index=item.index, address=item.address, subscribed=subscribed, @@ -153,12 +214,23 @@ async def _async_run(self): self.stats_sent_vertices += 1 self.send_message(StreamVertexMessage( id=self.stream_id, + seq=self._next_sequence_number, data=item.vertex.to_json_extended(), )) case _: assert False + self._next_sequence_number += 1 + if self.should_pause_streaming(): + break + + # The methods `pauseProducing()` and `stopProducing()` might be called during the + # call to `self.protocol.sendMessage()`. So both `_paused` and `_stop` might change + # during the loop. + if self._paused or self._stop: + break + self.stats_total_messages += 1 if self.stats_total_messages % self.stats_log_interval == 0: self.protocol.log.info('websocket streaming statistics', diff --git a/tests/websocket/test_streamer.py b/tests/websocket/test_streamer.py index a76148952..e83a81438 100644 --- a/tests/websocket/test_streamer.py +++ b/tests/websocket/test_streamer.py @@ -39,7 +39,11 @@ def test_streamer(self) -> None: addresses.append(AddressItem(idx, wallet.get_address(wallet.get_key_at_index(idx)))) # Create the expected result. - expected_result: list[dict[str, Any]] = [{'type': 'stream:history:begin', 'id': stream_id}] + expected_result: list[dict[str, Any]] = [{ + 'type': 'stream:history:begin', + 'id': stream_id, + 'window_size': None, + }] expected_result += [ { 'type': 'stream:history:address', @@ -56,6 +60,8 @@ def test_streamer(self) -> None: 'data': genesis.to_json_extended(), }) expected_result.append({'type': 'stream:history:end', 'id': stream_id}) + for index, item in enumerate(expected_result[1:-1]): + item['seq'] = index # Create both the address iterator and the GAP limit searcher. address_iter = ManualAddressSequencer() From 50ae3b2a15d820255b51682b82c005b9ba30466d Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Wed, 4 Sep 2024 15:34:46 -0300 Subject: [PATCH 21/61] feat: enable cache by default (#1125) --- hathor/builder/cli_builder.py | 23 +++++++++++++++++++---- hathor/cli/run_node.py | 4 +++- hathor/cli/run_node_args.py | 1 + tests/others/test_cli_builder.py | 20 +++++++++++--------- 4 files changed, 34 insertions(+), 14 deletions(-) diff --git a/hathor/builder/cli_builder.py b/hathor/builder/cli_builder.py index 3f10d7454..4548c5077 100644 --- a/hathor/builder/cli_builder.py +++ b/hathor/builder/cli_builder.py @@ -50,6 +50,8 @@ logger = get_logger() +DEFAULT_CACHE_SIZE: int = 100000 + class SyncChoice(Enum): V1_DEFAULT = auto() # v1 enabled, v2 disabled but can be enabled in runtime @@ -150,7 +152,7 @@ def create_manager(self, reactor: Reactor) -> HathorManager: indexes = RocksDBIndexesManager(self.rocksdb_storage) kwargs: dict[str, Any] = {} - if not self._args.cache: + if self._args.disable_cache: # We should only pass indexes if cache is disabled. Otherwise, # only TransactionCacheStorage should have indexes. kwargs['indexes'] = indexes @@ -161,14 +163,27 @@ def create_manager(self, reactor: Reactor) -> HathorManager: feature_storage = FeatureActivationStorage(settings=settings, rocksdb_storage=self.rocksdb_storage) self.log.info('with storage', storage_class=type(tx_storage).__name__, path=self._args.data) + if self._args.cache: - self.check_or_raise(not self._args.memory_storage, '--cache should not be used with --memory-storage') - tx_storage = TransactionCacheStorage(tx_storage, reactor, indexes=indexes, settings=settings) + self.log.warn('--cache is now the default and will be removed') + + if self._args.disable_cache: + self.check_or_raise(self._args.cache_size is None, 'cannot use --disable-cache with --cache-size') + self.check_or_raise(self._args.cache_interval is None, 'cannot use --disable-cache with --cache-interval') + + if self._args.memory_storage: if self._args.cache_size: - tx_storage.capacity = self._args.cache_size + self.log.warn('using --cache-size with --memory-storage has no effect') + if self._args.cache_interval: + self.log.warn('using --cache-interval with --memory-storage has no effect') + + if not self._args.disable_cache and not self._args.memory_storage: + tx_storage = TransactionCacheStorage(tx_storage, reactor, indexes=indexes, settings=settings) + tx_storage.capacity = self._args.cache_size if self._args.cache_size is not None else DEFAULT_CACHE_SIZE if self._args.cache_interval: tx_storage.interval = self._args.cache_interval self.log.info('with cache', capacity=tx_storage.capacity, interval=tx_storage.interval) + self.tx_storage = tx_storage self.log.info('with indexes', indexes_class=type(tx_storage.indexes).__name__) diff --git a/hathor/cli/run_node.py b/hathor/cli/run_node.py index a7d7be0d6..88489fa8a 100644 --- a/hathor/cli/run_node.py +++ b/hathor/cli/run_node.py @@ -116,7 +116,9 @@ def create_parser(cls) -> ArgumentParser: parser.add_argument('--prometheus', action='store_true', help='Send metric data to Prometheus') parser.add_argument('--prometheus-prefix', default='', help='A prefix that will be added in all Prometheus metrics') - parser.add_argument('--cache', action='store_true', help='Use cache for tx storage') + cache_args = parser.add_mutually_exclusive_group() + cache_args.add_argument('--cache', action='store_true', help=SUPPRESS) # moved to --disable-cache + cache_args.add_argument('--disable-cache', action='store_true', help='Disable cache for tx storage') parser.add_argument('--cache-size', type=int, help='Number of txs to keep on cache') parser.add_argument('--cache-interval', type=int, help='Cache flush interval') parser.add_argument('--recursion-limit', type=int, help='Set python recursion limit') diff --git a/hathor/cli/run_node_args.py b/hathor/cli/run_node_args.py index 36b137e3f..f493a7d33 100644 --- a/hathor/cli/run_node_args.py +++ b/hathor/cli/run_node_args.py @@ -52,6 +52,7 @@ class RunNodeArgs(BaseModel, extra=Extra.allow): prometheus: bool prometheus_prefix: str cache: bool + disable_cache: bool cache_size: Optional[int] cache_interval: Optional[int] recursion_limit: Optional[int] diff --git a/tests/others/test_cli_builder.py b/tests/others/test_cli_builder.py index 96a4aaeca..a83f00899 100644 --- a/tests/others/test_cli_builder.py +++ b/tests/others/test_cli_builder.py @@ -53,7 +53,8 @@ def test_empty(self): def test_all_default(self): data_dir = self.mkdtemp() manager = self._build(['--data', data_dir]) - self.assertIsInstance(manager.tx_storage, TransactionRocksDBStorage) + self.assertIsInstance(manager.tx_storage, TransactionCacheStorage) + self.assertIsInstance(manager.tx_storage.store, TransactionRocksDBStorage) self.assertIsInstance(manager.tx_storage.indexes, RocksDBIndexesManager) self.assertIsNone(manager.wallet) self.assertEqual('unittests', manager.network) @@ -64,33 +65,34 @@ def test_all_default(self): self.assertFalse(manager._enable_event_queue) @pytest.mark.skipif(not HAS_ROCKSDB, reason='requires python-rocksdb') - def test_cache_storage(self): + def test_disable_cache_storage(self): data_dir = self.mkdtemp() - manager = self._build(['--cache', '--data', data_dir]) - self.assertIsInstance(manager.tx_storage, TransactionCacheStorage) - self.assertIsInstance(manager.tx_storage.store, TransactionRocksDBStorage) + manager = self._build(['--disable-cache', '--data', data_dir]) + self.assertIsInstance(manager.tx_storage, TransactionRocksDBStorage) self.assertIsInstance(manager.tx_storage.indexes, RocksDBIndexesManager) - self.assertIsNone(manager.tx_storage.store.indexes) @pytest.mark.skipif(not HAS_ROCKSDB, reason='requires python-rocksdb') def test_default_storage_memory_indexes(self): data_dir = self.mkdtemp() manager = self._build(['--memory-indexes', '--data', data_dir]) - self.assertIsInstance(manager.tx_storage, TransactionRocksDBStorage) + self.assertIsInstance(manager.tx_storage, TransactionCacheStorage) + self.assertIsInstance(manager.tx_storage.store, TransactionRocksDBStorage) self.assertIsInstance(manager.tx_storage.indexes, MemoryIndexesManager) @pytest.mark.skipif(not HAS_ROCKSDB, reason='requires python-rocksdb') def test_default_storage_with_rocksdb_indexes(self): data_dir = self.mkdtemp() manager = self._build(['--x-rocksdb-indexes', '--data', data_dir]) - self.assertIsInstance(manager.tx_storage, TransactionRocksDBStorage) + self.assertIsInstance(manager.tx_storage, TransactionCacheStorage) + self.assertIsInstance(manager.tx_storage.store, TransactionRocksDBStorage) self.assertIsInstance(manager.tx_storage.indexes, RocksDBIndexesManager) @pytest.mark.skipif(not HAS_ROCKSDB, reason='requires python-rocksdb') def test_rocksdb_storage(self): data_dir = self.mkdtemp() manager = self._build(['--rocksdb-storage', '--data', data_dir]) - self.assertIsInstance(manager.tx_storage, TransactionRocksDBStorage) + self.assertIsInstance(manager.tx_storage, TransactionCacheStorage) + self.assertIsInstance(manager.tx_storage.store, TransactionRocksDBStorage) self.assertIsInstance(manager.tx_storage.indexes, RocksDBIndexesManager) def test_memory_storage(self): From ec2563eb2fb65a269b2e918869c7c354b6a4f3f2 Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Thu, 5 Sep 2024 16:02:09 -0300 Subject: [PATCH 22/61] feat: improve load-from-logs CLI command (#1132) --- hathor/cli/load_from_logs.py | 40 ++++++--------- hathor/cli/main.py | 7 ++- hathor/cli/parse_logs.py | 99 ++++++++++++++++++++++++++++++++++++ 3 files changed, 119 insertions(+), 27 deletions(-) create mode 100644 hathor/cli/parse_logs.py diff --git a/hathor/cli/load_from_logs.py b/hathor/cli/load_from_logs.py index 1e39979f6..f0842dec4 100644 --- a/hathor/cli/load_from_logs.py +++ b/hathor/cli/load_from_logs.py @@ -12,56 +12,46 @@ # See the License for the specific language governing permissions and # limitations under the License. -import re import sys from argparse import ArgumentParser, FileType +from twisted.internet.defer import Deferred +from twisted.internet.task import deferLater + from hathor.cli.run_node import RunNode class LoadFromLogs(RunNode): - def start_manager(self) -> None: - pass - - def register_signal_handlers(self) -> None: - pass - @classmethod def create_parser(cls) -> ArgumentParser: parser = super().create_parser() parser.add_argument('--log-dump', type=FileType('r', encoding='UTF-8'), default=sys.stdin, nargs='?', - help='Where to read logs from, defaults to stdin.') + help='Where to read logs from, defaults to stdin. Should be pre-parsed with parse-logs.') return parser - def prepare(self, *, register_resources: bool = True) -> None: - super().prepare(register_resources=False) - def run(self) -> None: + self.reactor.callLater(0, lambda: Deferred.fromCoroutine(self._load_from_logs())) + super().run() + + async def _load_from_logs(self) -> None: from hathor.conf.get_settings import get_global_settings from hathor.transaction.vertex_parser import VertexParser settings = get_global_settings() parser = VertexParser(settings=settings) - pattern = r'new (tx|block) .*bytes=([^ ]*) ' - pattern = r'new (tx|block) .*bytes=([^ ]*) ' - compiled_pattern = re.compile(pattern) - while True: line_with_break = self._args.log_dump.readline() if not line_with_break: break - line = line_with_break.strip() - - matches = compiled_pattern.findall(line) - if len(matches) == 0: + if line_with_break.startswith('//'): continue - - assert len(matches) == 1 - _, vertex_bytes_hex = matches[0] - - vertex_bytes = bytes.fromhex(vertex_bytes_hex) + line = line_with_break.strip() + vertex_bytes = bytes.fromhex(line) vertex = parser.deserialize(vertex_bytes) - self.manager.on_new_tx(vertex) + await deferLater(self.reactor, 0, self.manager.on_new_tx, vertex) + + self.manager.connections.disconnect_all_peers(force=True) + self.reactor.fireSystemEvent('shutdown') def main(): diff --git a/hathor/cli/main.py b/hathor/cli/main.py index 1c1633fb2..6a597745b 100644 --- a/hathor/cli/main.py +++ b/hathor/cli/main.py @@ -48,6 +48,7 @@ def __init__(self) -> None: oracle_create_key, oracle_encode_data, oracle_get_pubkey, + parse_logs, peer_id, quick_test, replay_logs, @@ -98,8 +99,10 @@ def __init__(self) -> None: self.add_cmd('dev', 'events_simulator', events_simulator, 'Simulate integration events via websocket') self.add_cmd('dev', 'x-export', db_export, 'EXPERIMENTAL: Export database to a simple format.') self.add_cmd('dev', 'x-import', db_import, 'EXPERIMENTAL: Import database from exported format.') - self.add_cmd('dev', 'replay-logs', replay_logs, 'EXPERIMENTAL: re-play json logs as console printted') - self.add_cmd('dev', 'load-from-logs', load_from_logs, 'Load vertices as they are found in a log dump') + self.add_cmd('dev', 'replay-logs', replay_logs, 'EXPERIMENTAL: re-play json logs as console printed') + self.add_cmd('dev', 'load-from-logs', load_from_logs, + 'Load vertices as they are found in a log dump that was parsed with parse-logs') + self.add_cmd('dev', 'parse-logs', parse_logs, 'Parse a log dump to use it with load-from-logs') def add_cmd(self, group: str, cmd: str, module: ModuleType, short_description: Optional[str] = None) -> None: self.command_list[cmd] = module diff --git a/hathor/cli/parse_logs.py b/hathor/cli/parse_logs.py new file mode 100644 index 000000000..a86ce67f0 --- /dev/null +++ b/hathor/cli/parse_logs.py @@ -0,0 +1,99 @@ +# Copyright 2024 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import re +import sys +from argparse import FileType +from io import TextIOWrapper +from typing import Iterator + + +def main() -> None: + """ + Parse logs from a dump file (either as json or plain logs) into a file with only vertex hex bytes. + The logs must be generated with --log-vertex-bytes. Then, use load-from-logs to run a full node from this file. + """ + from hathor.cli.util import create_parser + parser = create_parser() + file_args = parser.add_mutually_exclusive_group(required=True) + file_args.add_argument( + '--json-logs-file', + type=FileType('r', encoding='UTF-8'), + help='Where to read json logs from.', + ) + file_args.add_argument( + '--plain-logs-file', + type=FileType('r', encoding='UTF-8'), + help='Where to read plain logs from.', + ) + parser.add_argument( + '--output-file', + type=FileType('w', encoding='UTF-8'), + required=True, + help='Output file.', + ) + args = parser.parse_args(sys.argv[1:]) + assert isinstance(args.output_file, TextIOWrapper) + + vertex_iter: Iterator[str] + if args.json_logs_file is not None: + assert isinstance(args.json_logs_file, TextIOWrapper) + print('parsing json logs file...') + vertex_iter = _parse_json_logs(args.json_logs_file) + else: + assert isinstance(args.plain_logs_file, TextIOWrapper) + print('parsing plain logs file...') + vertex_iter = _parse_plain_logs(args.plain_logs_file) + + print('writing to output file...') + for vertex in vertex_iter: + args.output_file.write(vertex + '\n') + print('done') + + +def _parse_json_logs(file: TextIOWrapper) -> Iterator[str]: + while True: + line = file.readline() + if not line: + break + + json_dict = json.loads(line) + event = json_dict.get('event') + if not event: + return + + if event in ('new block', 'new tx'): + vertex_bytes = json_dict.get('bytes') + assert vertex_bytes is not None, 'logs should be generated with --log-vertex-bytes' + yield vertex_bytes + + +def _parse_plain_logs(file: TextIOWrapper) -> Iterator[str]: + pattern = r'new (tx|block) .*bytes=([^ ]*) ' + compiled_pattern = re.compile(pattern) + + while True: + line_with_break = file.readline() + if not line_with_break: + break + line = line_with_break.strip() + + matches = compiled_pattern.findall(line) + if len(matches) == 0: + continue + + assert len(matches) == 1 + _, vertex_bytes_hex = matches[0] + yield vertex_bytes_hex From 22fc4b9387a12cda0cf8c75a283b97ab7f8bcfce Mon Sep 17 00:00:00 2001 From: Marcelo Salhab Brogliato Date: Wed, 4 Sep 2024 13:16:59 -0500 Subject: [PATCH 23/61] fix(ws): Add a graceful close mechanism to handle late messages and prevent errors --- hathor/websocket/messages.py | 2 + hathor/websocket/protocol.py | 1 + hathor/websocket/streamer.py | 173 ++++++++++++++++++++++--------- tests/websocket/test_streamer.py | 11 +- 4 files changed, 138 insertions(+), 49 deletions(-) diff --git a/hathor/websocket/messages.py b/hathor/websocket/messages.py index 01b3b4f45..86058759b 100644 --- a/hathor/websocket/messages.py +++ b/hathor/websocket/messages.py @@ -41,12 +41,14 @@ class StreamErrorMessage(StreamBase): class StreamBeginMessage(StreamBase): type: str = Field('stream:history:begin', const=True) id: str + seq: int window_size: Optional[int] class StreamEndMessage(StreamBase): type: str = Field('stream:history:end', const=True) id: str + seq: int class StreamVertexMessage(StreamBase): diff --git a/hathor/websocket/protocol.py b/hathor/websocket/protocol.py index 319eee555..e23d2b60a 100644 --- a/hathor/websocket/protocol.py +++ b/hathor/websocket/protocol.py @@ -144,6 +144,7 @@ def fail_if_history_streaming_is_disabled(self) -> bool: def _create_streamer(self, stream_id: str, search: AddressSearch, window_size: int | None) -> None: """Create the streamer and handle its callbacks.""" + assert self._history_streamer is None self._history_streamer = HistoryStreamer(protocol=self, stream_id=stream_id, search=search) if window_size is not None: if window_size < 0: diff --git a/hathor/websocket/streamer.py b/hathor/websocket/streamer.py index 828d12472..08eb6ca89 100644 --- a/hathor/websocket/streamer.py +++ b/hathor/websocket/streamer.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +from enum import Enum, auto from typing import TYPE_CHECKING, Optional from twisted.internet.defer import Deferred @@ -33,6 +34,27 @@ from hathor.websocket.protocol import HathorAdminWebsocketProtocol +class StreamerState(Enum): + NOT_STARTED = auto() + ACTIVE = auto() + PAUSED = auto() + CLOSING = auto() + CLOSED = auto() + + def can_transition_to(self, destination: 'StreamerState') -> bool: + """Checks if the transition to the destination state is valid.""" + return destination in VALID_TRANSITIONS[self] + + +VALID_TRANSITIONS = { + StreamerState.NOT_STARTED: {StreamerState.ACTIVE}, + StreamerState.ACTIVE: {StreamerState.ACTIVE, StreamerState.PAUSED, StreamerState.CLOSING, StreamerState.CLOSED}, + StreamerState.PAUSED: {StreamerState.ACTIVE, StreamerState.CLOSED}, + StreamerState.CLOSING: {StreamerState.CLOSED}, + StreamerState.CLOSED: set() +} + + @implementer(IPushProducer) class HistoryStreamer: """A producer that pushes addresses and transactions to a websocket connection. @@ -72,23 +94,32 @@ def __init__(self, self.deferred: Deferred[bool] = Deferred() - # Statistics. + # Statistics + # ---------- self.stats_log_interval = self.STATS_LOG_INTERVAL self.stats_total_messages: int = 0 self.stats_sent_addresses: int = 0 self.stats_sent_vertices: int = 0 - # Execution control. - self._started = False - self._is_running = False - self._paused = False - self._stop = False + # Execution control + # ----------------- + self._state = StreamerState.NOT_STARTED + # Used to mark that the streamer is currently running its main loop and sending messages. + self._is_main_loop_running = False - # Flow control. + # Flow control + # ------------ self._next_sequence_number: int = 0 self._last_ack: int = -1 self._sliding_window_size: Optional[int] = self.DEFAULT_SLIDING_WINDOW_SIZE + def get_next_seq(self) -> int: + assert self._state is not StreamerState.CLOSING + assert self._state is not StreamerState.CLOSED + seq = self._next_sequence_number + self._next_sequence_number += 1 + return seq + def set_sliding_window_size(self, size: Optional[int]) -> None: """Set a new sliding window size for flow control. If size is none, disables flow control. """ @@ -102,73 +133,115 @@ def set_ack(self, ack: int) -> None: If the new value is bigger than the previous value, the streaming might be resumed. """ - if ack <= self._last_ack: + if self._state is StreamerState.CLOSING: + closing_ack = self._next_sequence_number - 1 + if ack == closing_ack: + self._last_ack = ack + self.stop(True) + return + if ack == self._last_ack: # We might receive outdated or duplicate ACKs, and we can safely ignore them. + return + if ack < self._last_ack: + # ACK got smaller. Something is wrong... self.send_message(StreamErrorMessage( id=self.stream_id, - errmsg=f'Outdated ACK received. Skipping it... (ack={ack})' + errmsg=f'Outdated ACK received (ack={ack})' )) + self.stop(False) return if ack >= self._next_sequence_number: + # ACK is higher than the last message sent. Something is wrong... self.send_message(StreamErrorMessage( id=self.stream_id, - errmsg=f'Received ACK is higher than the last sent message. Skipping it... (ack={ack})' + errmsg=f'Received ACK is higher than the last sent message (ack={ack})' )) + self.stop(False) return self._last_ack = ack self.resume_if_possible() def resume_if_possible(self) -> None: - if not self._started: + """Resume sending messages if possible.""" + if self._state is StreamerState.PAUSED: + return + if not self._state.can_transition_to(StreamerState.ACTIVE): + return + if self._is_main_loop_running: + return + if self.should_pause_streaming(): return - if not self.should_pause_streaming() and not self._is_running: - self.resumeProducing() + self._run() + + def set_state(self, new_state: StreamerState) -> None: + """Set a new state for the streamer.""" + if self._state == new_state: + return + assert self._state.can_transition_to(new_state) + self._state = new_state def start(self) -> Deferred[bool]: """Start streaming items.""" + assert self._state is StreamerState.NOT_STARTED + # The websocket connection somehow instantiates an twisted.web.http.HTTPChannel object # which register a producer. It seems the HTTPChannel is not used anymore after switching # to websocket but it keep registered. So we have to unregister before registering a new # producer. if self.protocol.transport.producer: self.protocol.unregisterProducer() - self.protocol.registerProducer(self, True) - assert not self._started - self._started = True - self.send_message(StreamBeginMessage(id=self.stream_id, window_size=self._sliding_window_size)) - self.resumeProducing() + self.send_message(StreamBeginMessage( + id=self.stream_id, + seq=self.get_next_seq(), + window_size=self._sliding_window_size, + )) + self.resume_if_possible() return self.deferred def stop(self, success: bool) -> None: """Stop streaming items.""" - assert self._started - self._stop = True - self._started = False + if not self._state.can_transition_to(StreamerState.CLOSED): + # Do nothing if the streamer has already been stopped. + self.protocol.log.warn('stop called in an unexpected state', state=self._state) + return + self.set_state(StreamerState.CLOSED) self.protocol.unregisterProducer() self.deferred.callback(success) + def gracefully_close(self) -> None: + """Gracefully close the stream by sending the StreamEndMessage and waiting for its ack.""" + if not self._state.can_transition_to(StreamerState.CLOSING): + return + self.protocol.log.info('websocket streaming ended, waiting for ACK') + self.send_message(StreamEndMessage(id=self.stream_id, seq=self.get_next_seq())) + self.set_state(StreamerState.CLOSING) + def pauseProducing(self) -> None: """Pause streaming. Called by twisted.""" - self._paused = True + if not self._state.can_transition_to(StreamerState.PAUSED): + self.protocol.log.warn('pause requested in an unexpected state', state=self._state) + return + self.set_state(StreamerState.PAUSED) def stopProducing(self) -> None: """Stop streaming. Called by twisted.""" - self._stop = True + if not self._state.can_transition_to(StreamerState.CLOSED): + self.protocol.log.warn('stopped requested in an unexpected state', state=self._state) + return self.stop(False) def resumeProducing(self) -> None: """Resume streaming. Called by twisted.""" - self._paused = False - self._run() - - def _run(self) -> None: - """Run the streaming main loop.""" - coro = self._async_run() - Deferred.fromCoroutine(coro) + if not self._state.can_transition_to(StreamerState.ACTIVE): + self.protocol.log.warn('resume requested in an unexpected state', state=self._state) + return + self.set_state(StreamerState.ACTIVE) + self.resume_if_possible() def should_pause_streaming(self) -> bool: + """Return true if the streaming should pause due to the flow control mechanism.""" if self._sliding_window_size is None: return False stop_value = self._last_ack + self._sliding_window_size + 1 @@ -176,13 +249,22 @@ def should_pause_streaming(self) -> bool: return False return True + def _run(self) -> None: + """Run the streaming main loop.""" + if not self._state.can_transition_to(StreamerState.ACTIVE): + self.protocol.log.warn('_run() called in an unexpected state', state=self._state) + return + coro = self._async_run() + Deferred.fromCoroutine(coro) + async def _async_run(self): - assert not self._is_running - self._is_running = True + assert not self._is_main_loop_running + self.set_state(StreamerState.ACTIVE) + self._is_main_loop_running = True try: await self._async_run_unsafe() finally: - self._is_running = False + self._is_main_loop_running = False async def _async_run_unsafe(self): """Internal method that runs the streaming main loop.""" @@ -204,7 +286,7 @@ async def _async_run_unsafe(self): self.stats_sent_addresses += 1 self.send_message(StreamAddressMessage( id=self.stream_id, - seq=self._next_sequence_number, + seq=self.get_next_seq(), index=item.index, address=item.address, subscribed=subscribed, @@ -214,23 +296,16 @@ async def _async_run_unsafe(self): self.stats_sent_vertices += 1 self.send_message(StreamVertexMessage( id=self.stream_id, - seq=self._next_sequence_number, + seq=self.get_next_seq(), data=item.vertex.to_json_extended(), )) case _: assert False - self._next_sequence_number += 1 if self.should_pause_streaming(): break - # The methods `pauseProducing()` and `stopProducing()` might be called during the - # call to `self.protocol.sendMessage()`. So both `_paused` and `_stop` might change - # during the loop. - if self._paused or self._stop: - break - self.stats_total_messages += 1 if self.stats_total_messages % self.stats_log_interval == 0: self.protocol.log.info('websocket streaming statistics', @@ -238,6 +313,13 @@ async def _async_run_unsafe(self): sent_vertices=self.stats_sent_vertices, sent_addresses=self.stats_sent_addresses) + # The methods `pauseProducing()` and `stopProducing()` might be called during the + # call to `self.protocol.sendMessage()`. So the streamer state might change during + # the loop. + if self._state is not StreamerState.ACTIVE: + break + + # Limit blocking of the event loop to a maximum of N seconds. dt = self.reactor.seconds() - t0 if dt > self.max_seconds_locking_event_loop: # Let the event loop run at least once. @@ -245,11 +327,8 @@ async def _async_run_unsafe(self): t0 = self.reactor.seconds() else: - if self._stop: - # If the streamer has been stopped, there is nothing else to do. - return - self.send_message(StreamEndMessage(id=self.stream_id)) - self.stop(True) + # Iterator is empty so we can close the stream. + self.gracefully_close() def send_message(self, message: StreamBase) -> None: """Send a message to the websocket connection.""" diff --git a/tests/websocket/test_streamer.py b/tests/websocket/test_streamer.py index e83a81438..87c4a5407 100644 --- a/tests/websocket/test_streamer.py +++ b/tests/websocket/test_streamer.py @@ -6,7 +6,7 @@ from hathor.wallet import HDWallet from hathor.websocket.factory import HathorAdminWebsocketFactory from hathor.websocket.iterators import AddressItem, ManualAddressSequencer, gap_limit_search -from hathor.websocket.streamer import HistoryStreamer +from hathor.websocket.streamer import HistoryStreamer, StreamerState from tests.unittest import TestCase from tests.utils import GENESIS_ADDRESS_B58 @@ -60,7 +60,7 @@ def test_streamer(self) -> None: 'data': genesis.to_json_extended(), }) expected_result.append({'type': 'stream:history:end', 'id': stream_id}) - for index, item in enumerate(expected_result[1:-1]): + for index, item in enumerate(expected_result): item['seq'] = index # Create both the address iterator and the GAP limit searcher. @@ -86,6 +86,13 @@ def test_streamer(self) -> None: # Run the streamer. manager.reactor.advance(10) + # Check the streamer is waiting for the last ACK. + self.assertTrue(streamer._state, StreamerState.CLOSING) + streamer.set_ack(1) + self.assertTrue(streamer._state, StreamerState.CLOSING) + streamer.set_ack(len(expected_result) - 1) + self.assertTrue(streamer._state, StreamerState.CLOSED) + # Check the results. items_iter = self._parse_ws_raw(transport.value()) result = list(items_iter) From 7d495354a30cbf6936518fe4211b9e32890963d8 Mon Sep 17 00:00:00 2001 From: Jan Segre Date: Mon, 9 Sep 2024 20:14:08 +0200 Subject: [PATCH 24/61] fix(api): status regression after p2p refactor --- hathor/p2p/resources/status.py | 2 +- tests/resources/p2p/test_status.py | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/hathor/p2p/resources/status.py b/hathor/p2p/resources/status.py index 6556e0d2b..ac26f198c 100644 --- a/hathor/p2p/resources/status.py +++ b/hathor/p2p/resources/status.py @@ -108,7 +108,7 @@ def render_GET(self, request): 'uptime': now - self.manager.start_time, 'entrypoints': self.manager.connections.my_peer.entrypoints_as_str(), }, - 'peers_whitelist': self.manager.peers_whitelist, + 'peers_whitelist': [str(peer_id) for peer_id in self.manager.peers_whitelist], 'known_peers': known_peers, 'connections': { 'connected_peers': connected_peers, diff --git a/tests/resources/p2p/test_status.py b/tests/resources/p2p/test_status.py index 5b156326f..baa2c19eb 100644 --- a/tests/resources/p2p/test_status.py +++ b/tests/resources/p2p/test_status.py @@ -18,6 +18,8 @@ def setUp(self): self.web = StubSite(StatusResource(self.manager)) self.entrypoint = Entrypoint.parse('tcp://192.168.1.1:54321') self.manager.connections.my_peer.entrypoints.append(self.entrypoint) + self.manager.peers_whitelist.append(self.get_random_peer_from_pool().id) + self.manager.peers_whitelist.append(self.get_random_peer_from_pool().id) self.manager2 = self.create_peer('testnet') self.manager2.connections.my_peer.entrypoints.append(self.entrypoint) From 9fddd7abc1a9661d5d7ff8c28b04ec8b3037a854 Mon Sep 17 00:00:00 2001 From: Marcelo Salhab Brogliato Date: Tue, 10 Sep 2024 11:47:30 -0500 Subject: [PATCH 25/61] fix(ws): Fix tx history streamer wrong logging when pauseProducing() is called multiple times --- hathor/websocket/streamer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hathor/websocket/streamer.py b/hathor/websocket/streamer.py index 08eb6ca89..5c881a8f8 100644 --- a/hathor/websocket/streamer.py +++ b/hathor/websocket/streamer.py @@ -49,7 +49,7 @@ def can_transition_to(self, destination: 'StreamerState') -> bool: VALID_TRANSITIONS = { StreamerState.NOT_STARTED: {StreamerState.ACTIVE}, StreamerState.ACTIVE: {StreamerState.ACTIVE, StreamerState.PAUSED, StreamerState.CLOSING, StreamerState.CLOSED}, - StreamerState.PAUSED: {StreamerState.ACTIVE, StreamerState.CLOSED}, + StreamerState.PAUSED: {StreamerState.ACTIVE, StreamerState.PAUSED, StreamerState.CLOSED}, StreamerState.CLOSING: {StreamerState.CLOSED}, StreamerState.CLOSED: set() } From 7f530323c4c47329aae83b7d5523ed5fc0d36ea1 Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Thu, 3 Oct 2024 17:48:06 -0300 Subject: [PATCH 26/61] fix: event handling of txs with custom scripts (#1140) --- hathor/cli/events_simulator/scenario.py | 74 ++++++++++ hathor/event/model/event_data.py | 21 ++- .../event/test_event_simulation_scenarios.py | 131 ++++++++++++++++++ 3 files changed, 219 insertions(+), 7 deletions(-) diff --git a/hathor/cli/events_simulator/scenario.py b/hathor/cli/events_simulator/scenario.py index 315a633d2..460268c2f 100644 --- a/hathor/cli/events_simulator/scenario.py +++ b/hathor/cli/events_simulator/scenario.py @@ -27,6 +27,8 @@ class Scenario(Enum): REORG = 'REORG' UNVOIDED_TRANSACTION = 'UNVOIDED_TRANSACTION' INVALID_MEMPOOL_TRANSACTION = 'INVALID_MEMPOOL_TRANSACTION' + EMPTY_SCRIPT = 'EMPTY_SCRIPT' + CUSTOM_SCRIPT = 'CUSTOM_SCRIPT' def simulate(self, simulator: 'Simulator', manager: 'HathorManager') -> None: simulate_fns = { @@ -36,6 +38,8 @@ def simulate(self, simulator: 'Simulator', manager: 'HathorManager') -> None: Scenario.REORG: simulate_reorg, Scenario.UNVOIDED_TRANSACTION: simulate_unvoided_transaction, Scenario.INVALID_MEMPOOL_TRANSACTION: simulate_invalid_mempool_transaction, + Scenario.EMPTY_SCRIPT: simulate_empty_script, + Scenario.CUSTOM_SCRIPT: simulate_custom_script, } simulate_fn = simulate_fns[self] @@ -174,3 +178,73 @@ def simulate_invalid_mempool_transaction(simulator: 'Simulator', manager: 'Hatho # the transaction should have been removed from the mempool and the storage after the re-org assert tx not in manager.tx_storage.iter_mempool_from_best_index() assert not manager.tx_storage.transaction_exists(tx.hash) + + +def simulate_empty_script(simulator: 'Simulator', manager: 'HathorManager') -> None: + from hathor.conf.get_settings import get_global_settings + from hathor.simulator.utils import add_new_blocks, gen_new_tx + from hathor.transaction import TxInput, TxOutput + + settings = get_global_settings() + assert manager.wallet is not None + address = manager.wallet.get_unused_address(mark_as_used=False) + + add_new_blocks(manager, settings.REWARD_SPEND_MIN_BLOCKS + 1) + simulator.run(60) + + tx1 = gen_new_tx(manager, address, 1000) + original_script = tx1.outputs[1].script + tx1.outputs[1].script = b'' + tx1.weight = manager.daa.minimum_tx_weight(tx1) + tx1.update_hash() + assert manager.propagate_tx(tx1, fails_silently=False) + simulator.run(60) + + tx2 = gen_new_tx(manager, address, 1000) + tx2.inputs = [TxInput(tx_id=tx1.hash, index=1, data=b'\x51')] + tx2.outputs = [TxOutput(value=1000, script=original_script)] + tx2.weight = manager.daa.minimum_tx_weight(tx2) + tx2.update_hash() + assert manager.propagate_tx(tx2, fails_silently=False) + simulator.run(60) + + add_new_blocks(manager, 1) + simulator.run(60) + + +def simulate_custom_script(simulator: 'Simulator', manager: 'HathorManager') -> None: + from hathor.conf.get_settings import get_global_settings + from hathor.simulator.utils import add_new_blocks, gen_new_tx + from hathor.transaction import TxInput, TxOutput + from hathor.transaction.scripts import HathorScript, Opcode + + settings = get_global_settings() + assert manager.wallet is not None + address = manager.wallet.get_unused_address() + + add_new_blocks(manager, settings.REWARD_SPEND_MIN_BLOCKS + 1) + simulator.run(60) + + tx1 = gen_new_tx(manager, address, 1000) + s = HathorScript() + some_data = b'some_data' + s.pushData(some_data) + s.addOpcode(Opcode.OP_EQUALVERIFY) + s.addOpcode(Opcode.OP_1) + original_script = tx1.outputs[1].script + tx1.outputs[1].script = s.data + tx1.weight = manager.daa.minimum_tx_weight(tx1) + tx1.update_hash() + assert manager.propagate_tx(tx1, fails_silently=False) + simulator.run(60) + + tx2 = gen_new_tx(manager, address, 1000) + tx2.inputs = [TxInput(tx_id=tx1.hash, index=1, data=bytes([len(some_data)]) + some_data)] + tx2.outputs = [TxOutput(value=1000, script=original_script)] + tx2.weight = manager.daa.minimum_tx_weight(tx2) + tx2.update_hash() + assert manager.propagate_tx(tx2, fails_silently=False) + simulator.run(60) + + add_new_blocks(manager, 1) + simulator.run(60) diff --git a/hathor/event/model/event_data.py b/hathor/event/model/event_data.py index b9f2b6da6..3ee281bd8 100644 --- a/hathor/event/model/event_data.py +++ b/hathor/event/model/event_data.py @@ -125,15 +125,22 @@ def from_event_arguments(cls, args: EventArguments) -> Self: output | dict(decoded=output['decoded'] or None) for output in tx_json['outputs'] ] - tx_json['inputs'] = [ - dict( - tx_id=input_['tx_id'], - index=input_['index'], - spent_output=input_ + + inputs = [] + for tx_input in tx_json['inputs']: + decoded = tx_input.get('decoded') + if decoded and decoded.get('address') is None: + # we remove the decoded data if it does not contain an address + tx_input['decoded'] = None + inputs.append( + dict( + tx_id=tx_input['tx_id'], + index=tx_input['index'], + spent_output=tx_input, + ) ) - for input_ in tx_json['inputs'] - ] + tx_json['inputs'] = inputs return cls(**tx_json) diff --git a/tests/event/test_event_simulation_scenarios.py b/tests/event/test_event_simulation_scenarios.py index d1a5cd171..3c1ef99ec 100644 --- a/tests/event/test_event_simulation_scenarios.py +++ b/tests/event/test_event_simulation_scenarios.py @@ -351,6 +351,137 @@ def test_invalid_mempool(self) -> None: expected = _remove_timestamp(expected) assert responses == expected, f'expected: {expected}\n\nactual: {responses}' + def test_empty_script(self) -> None: + stream_id = self.manager._event_manager._stream_id + assert stream_id is not None + Scenario.EMPTY_SCRIPT.simulate(self.simulator, self.manager) + self._start_stream() + + responses = self._get_success_responses() + + expected = [ + # LOAD_STATED + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=0, timestamp=0, type=EventType.LOAD_STARTED, data=EmptyData(), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + # One NEW_VERTEX_ACCEPTED for each genesis (1 block and 2 txs) + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + # LOAD_FINISHED + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=4, timestamp=0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each genesis tx (2), adding the new block as their first block # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + # One NEW_VERTEX_ACCEPTED for a new block + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + # One VERTEX_METADATA_CHANGED and one NEW_VERTEX_ACCEPTED for 10 new blocks + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=9, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=10, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=11, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, first_block=None, height=3, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=12, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, first_block=None, height=3, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=13, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, first_block=None, height=4, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=14, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, first_block=None, height=4, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=15, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, first_block=None, height=5, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=16, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, first_block=None, height=5, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=17, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, first_block=None, height=6, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=18, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, first_block=None, height=6, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=19, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, first_block=None, height=7, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=20, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, first_block=None, height=7, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=21, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, first_block=None, height=8, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=22, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, first_block=None, height=8, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=23, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, first_block=None, height=9, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=24, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, first_block=None, height=9, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=25, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, first_block=None, height=10, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=26, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, first_block=None, height=10, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=27, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, first_block=None, height=11, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=28, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, first_block=None, height=11, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + # One VERTEX_METADATA_CHANGED for a new tx (below), and one VERTEX_METADATA_CHANGED for a block, adding the new tx as spending their output # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=29, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.449427506558003, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='', decoded=None)], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.449427506558003, score=0.0, first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=30, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[SpentOutput(index=0, tx_ids=['ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d'])], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + # One NEW_VERTEX_ACCEPTED for a new tx + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=31, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.449427506558003, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='', decoded=None)], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.449427506558003, score=0.0, first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + # One VERTEX_METADATA_CHANGED for a new tx (below), and one VERTEX_METADATA_CHANGED for a tx, adding the new tx as spending their output and children # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=32, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=15.990494828748208, inputs=[TxInput(tx_id='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', index=1, spent_output=TxOutput(value=1000, token_data=0, script='', decoded=None))], outputs=[TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=15.990494828748208, score=0.0, first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=33, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.449427506558003, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='', decoded=None)], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', spent_outputs=[SpentOutput(index=1, tx_ids=['3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11'])], conflict_with=[], voided_by=[], received_by=[], children=['3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11'], twins=[], accumulated_weight=18.449427506558003, score=0.0, first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + # One NEW_VERTEX_ACCEPTED for a new tx + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=34, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=15.990494828748208, inputs=[TxInput(tx_id='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', index=1, spent_output=TxOutput(value=1000, token_data=0, script='', decoded=None))], outputs=[TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=15.990494828748208, score=0.0, first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each confirmed transaction (first block changed) # noqa E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=35, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmkey79Rbhjq4BtHYCm2mT8hDprWIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLatLcoaATFMqECb5fD5rdW2nF9WGyw9os', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', 'ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=18.691575942773007, first_block=None, height=12, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=36, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=15.990494828748208, inputs=[TxInput(tx_id='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', index=1, spent_output=TxOutput(value=1000, token_data=0, script='', decoded=None))], outputs=[TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d'], twins=[], accumulated_weight=15.990494828748208, score=0.0, first_block='da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d', height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=37, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.449427506558003, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='', decoded=None)], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', spent_outputs=[SpentOutput(index=1, tx_ids=['3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11'])], conflict_with=[], voided_by=[], received_by=[], children=['3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', 'da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d'], twins=[], accumulated_weight=18.449427506558003, score=0.0, first_block='da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d', height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + # One NEW_VERTEX_ACCEPTED for a new block + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=38, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmkey79Rbhjq4BtHYCm2mT8hDprWIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLatLcoaATFMqECb5fD5rdW2nF9WGyw9os', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', 'ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=18.691575942773007, first_block=None, height=12, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id)] # noqa: E501 + + responses = _remove_timestamp(responses) + expected = _remove_timestamp(expected) + assert responses == expected, f'expected: {expected}\n\nactual: {responses}' + + def test_custom_script(self) -> None: + stream_id = self.manager._event_manager._stream_id + assert stream_id is not None + Scenario.CUSTOM_SCRIPT.simulate(self.simulator, self.manager) + self._start_stream() + + responses = self._get_success_responses() + + expected = [ + # LOAD_STATED + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=0, timestamp=0, type=EventType.LOAD_STARTED, data=EmptyData(), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + # One NEW_VERTEX_ACCEPTED for each genesis (1 block and 2 txs) + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + # LOAD_FINISHED + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=4, timestamp=0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each genesis tx (2), adding the new block as their first block # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', '8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19', '1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62', '3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6', 'ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f', '61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea', '95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084', 'c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a', 'db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1', '09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc', '8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218'], twins=[], accumulated_weight=2.0, score=2.0, first_block='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', '8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19', '1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62', '3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6', 'ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f', '61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea', '95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084', 'c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a', 'db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1', '09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc', '8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218'], twins=[], accumulated_weight=2.0, score=2.0, first_block='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + # One NEW_VERTEX_ACCEPTED for a new block + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + # One VERTEX_METADATA_CHANGED and one NEW_VERTEX_ACCEPTED for 10 new blocks + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=9, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62'], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=10, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62'], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=11, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6'], twins=[], accumulated_weight=2.0, score=4.584962500721156, first_block=None, height=3, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=12, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6'], twins=[], accumulated_weight=2.0, score=4.584962500721156, first_block=None, height=3, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=13, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f'], twins=[], accumulated_weight=2.0, score=4.807354922057604, first_block=None, height=4, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=14, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f'], twins=[], accumulated_weight=2.0, score=4.807354922057604, first_block=None, height=4, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=15, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea'], twins=[], accumulated_weight=2.0, score=5.0, first_block=None, height=5, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=16, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea'], twins=[], accumulated_weight=2.0, score=5.0, first_block=None, height=5, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=17, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084'], twins=[], accumulated_weight=2.0, score=5.169925001442312, first_block=None, height=6, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=18, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084'], twins=[], accumulated_weight=2.0, score=5.169925001442312, first_block=None, height=6, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=19, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a'], twins=[], accumulated_weight=2.0, score=5.321928094887363, first_block=None, height=7, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=20, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a'], twins=[], accumulated_weight=2.0, score=5.321928094887363, first_block=None, height=7, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=21, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1'], twins=[], accumulated_weight=2.0, score=5.459431618637297, first_block=None, height=8, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=22, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1'], twins=[], accumulated_weight=2.0, score=5.459431618637297, first_block=None, height=8, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=23, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc'], twins=[], accumulated_weight=2.0, score=5.584962500721156, first_block=None, height=9, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=24, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc'], twins=[], accumulated_weight=2.0, score=5.584962500721156, first_block=None, height=9, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=25, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218'], twins=[], accumulated_weight=2.0, score=5.700439718141092, first_block=None, height=10, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=26, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218'], twins=[], accumulated_weight=2.0, score=5.700439718141092, first_block=None, height=10, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=27, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUBvl1aaAtzoh8a9vaZoqXA6JxK4OIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H7A1HBirZ4EhWtCWLcAy4yw6ybWcKnjdfG', timelock=None))], parents=['09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, first_block=None, height=11, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=28, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUBvl1aaAtzoh8a9vaZoqXA6JxK4OIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H7A1HBirZ4EhWtCWLcAy4yw6ybWcKnjdfG', timelock=None))], parents=['09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, first_block=None, height=11, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + # One VERTEX_METADATA_CHANGED for a new tx (below), and one VERTEX_METADATA_CHANGED for a block, adding the new tx as spending their output # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=29, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.55128132611371, inputs=[TxInput(tx_id='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H8XUjiUx24WLXUN63da34hX6bEs29GJjSs', timelock=None)), TxOutput(value=1000, token_data=0, script='CXNvbWVfZGF0YYhR', decoded=None)], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.55128132611371, score=0.0, first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=30, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', spent_outputs=[SpentOutput(index=0, tx_ids=['cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec'])], conflict_with=[], voided_by=[], received_by=[], children=['8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + # One NEW_VERTEX_ACCEPTED for a new tx + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=31, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.55128132611371, inputs=[TxInput(tx_id='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H8XUjiUx24WLXUN63da34hX6bEs29GJjSs', timelock=None)), TxOutput(value=1000, token_data=0, script='CXNvbWVfZGF0YYhR', decoded=None)], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.55128132611371, score=0.0, first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + # One VERTEX_METADATA_CHANGED for a new tx (below), and one VERTEX_METADATA_CHANGED for a tx, adding the new tx as spending their output and children # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=32, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=16.12160141040609, inputs=[TxInput(tx_id='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', index=1, spent_output=TxOutput(value=1000, token_data=0, script='CXNvbWVfZGF0YYhR', decoded=None))], outputs=[TxOutput(value=1000, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=16.12160141040609, score=0.0, first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=33, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.55128132611371, inputs=[TxInput(tx_id='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H8XUjiUx24WLXUN63da34hX6bEs29GJjSs', timelock=None)), TxOutput(value=1000, token_data=0, script='CXNvbWVfZGF0YYhR', decoded=None)], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', spent_outputs=[SpentOutput(index=1, tx_ids=['3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3'])], conflict_with=[], voided_by=[], received_by=[], children=['3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3'], twins=[], accumulated_weight=18.55128132611371, score=0.0, first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + # One NEW_VERTEX_ACCEPTED for a new tx + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=34, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=16.12160141040609, inputs=[TxInput(tx_id='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', index=1, spent_output=TxOutput(value=1000, token_data=0, script='CXNvbWVfZGF0YYhR', decoded=None))], outputs=[TxOutput(value=1000, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=16.12160141040609, score=0.0, first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each confirmed transaction (first block changed) # noqa E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=35, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUTisHvpM4sDeINzxF5auK/8bP6UaIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HDeSe6qKqjSLwtnjLBV84NddtZQyNb9HUU', timelock=None))], parents=['8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218', '3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', 'cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=18.79789471506282, first_block=None, height=12, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=36, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=16.12160141040609, inputs=[TxInput(tx_id='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', index=1, spent_output=TxOutput(value=1000, token_data=0, script='CXNvbWVfZGF0YYhR', decoded=None))], outputs=[TxOutput(value=1000, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302'], twins=[], accumulated_weight=16.12160141040609, score=0.0, first_block='99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302', height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=37, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.55128132611371, inputs=[TxInput(tx_id='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H8XUjiUx24WLXUN63da34hX6bEs29GJjSs', timelock=None)), TxOutput(value=1000, token_data=0, script='CXNvbWVfZGF0YYhR', decoded=None)], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', spent_outputs=[SpentOutput(index=1, tx_ids=['3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3'])], conflict_with=[], voided_by=[], received_by=[], children=['3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', '99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302'], twins=[], accumulated_weight=18.55128132611371, score=0.0, first_block='99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302', height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + # One NEW_VERTEX_ACCEPTED for a new block + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=38, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUTisHvpM4sDeINzxF5auK/8bP6UaIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HDeSe6qKqjSLwtnjLBV84NddtZQyNb9HUU', timelock=None))], parents=['8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218', '3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', 'cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=18.79789471506282, first_block=None, height=12, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id) # noqa: E501 + ] + + responses = _remove_timestamp(responses) + expected = _remove_timestamp(expected) + assert responses == expected, f'expected: {expected}\n\nactual: {responses}' + def _start_stream(self) -> None: start_stream = StartStreamRequest(type='START_STREAM', window_size=1_000_000, last_ack_event_id=None) self._send_request(start_stream) From 115569a652a0ea6e1bf2542a79868999bdda93b8 Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Thu, 3 Oct 2024 19:25:44 -0300 Subject: [PATCH 27/61] refactor(benchmark): change benchmark CI to download data from logs in AWS (#1133) --- .github/workflows/base_benchmarks.yml | 25 +++++++++++-------- .github/workflows/pr_benchmarks.yml | 11 +++++--- extras/benchmarking/.env | 7 ------ extras/benchmarking/sync_v2/.env | 9 +++++++ .../{ => sync_v2}/benchmark_sync_v2.sh | 0 5 files changed, 30 insertions(+), 22 deletions(-) delete mode 100644 extras/benchmarking/.env create mode 100644 extras/benchmarking/sync_v2/.env rename extras/benchmarking/{ => sync_v2}/benchmark_sync_v2.sh (100%) diff --git a/.github/workflows/base_benchmarks.yml b/.github/workflows/base_benchmarks.yml index a91d54ad6..7e9e3446a 100644 --- a/.github/workflows/base_benchmarks.yml +++ b/.github/workflows/base_benchmarks.yml @@ -25,16 +25,19 @@ jobs: python: 3.11 os: ubuntu-22.04 - name: Set env vars - run: cat ./extras/benchmarking/.env >> $GITHUB_ENV + run: cat ./extras/benchmarking/sync_v2/.env >> $GITHUB_ENV - name: Download benchmark data run: | + wget $BENCH_DATA_URL + tar -xzf $BENCH_DATA_FILE_NAME.tar.gz mkdir $SERVER_DATA_DIR - poetry run hathor-cli quick_test \ + poetry run hathor-cli load-from-logs \ --testnet \ --data $SERVER_DATA_DIR \ --cache \ --cache-size $CACHE_SIZE \ - --quit-after-n-blocks $N_BLOCKS + --x-localhost-only \ + --log-dump $BENCH_DATA_FILE_NAME - name: Run server node run: | poetry run hathor-cli run_node \ @@ -48,11 +51,11 @@ jobs: - name: Track base branch benchmarks with Bencher run: | bencher run \ - --project hathor-core \ - --token '${{ secrets.BENCHER_API_TOKEN }}' \ - --branch master \ - --testbed ubuntu-22.04 \ - --adapter shell_hyperfine \ - --err \ - --file $BENCH_FILE \ - './extras/benchmarking/benchmark_sync_v2.sh' + --project hathor-core \ + --token '${{ secrets.BENCHER_API_TOKEN }}' \ + --branch master \ + --testbed ubuntu-22.04 \ + --adapter shell_hyperfine \ + --err \ + --file $BENCH_FILE \ + './extras/benchmarking/sync_v2/benchmark_sync_v2.sh' diff --git a/.github/workflows/pr_benchmarks.yml b/.github/workflows/pr_benchmarks.yml index 40c9b2794..3e9c3cb6d 100644 --- a/.github/workflows/pr_benchmarks.yml +++ b/.github/workflows/pr_benchmarks.yml @@ -29,16 +29,19 @@ jobs: python: 3.11 os: ubuntu-22.04 - name: Set env vars - run: cat ./extras/benchmarking/.env >> $GITHUB_ENV + run: cat ./extras/benchmarking/sync_v2/.env >> $GITHUB_ENV - name: Download benchmark data run: | + wget $BENCH_DATA_URL + tar -xzf $BENCH_DATA_FILE_NAME.tar.gz mkdir $SERVER_DATA_DIR - poetry run hathor-cli quick_test \ + poetry run hathor-cli load-from-logs \ --testnet \ --data $SERVER_DATA_DIR \ --cache \ --cache-size $CACHE_SIZE \ - --quit-after-n-blocks $N_BLOCKS + --x-localhost-only \ + --log-dump $BENCH_DATA_FILE_NAME - name: Run server node run: | poetry run hathor-cli run_node \ @@ -62,4 +65,4 @@ jobs: --err \ --github-actions '${{ secrets.GITHUB_TOKEN }}' \ --file $BENCH_FILE \ - './extras/benchmarking/benchmark_sync_v2.sh' + './extras/benchmarking/sync_v2/benchmark_sync_v2.sh' diff --git a/extras/benchmarking/.env b/extras/benchmarking/.env deleted file mode 100644 index 4f22d614a..000000000 --- a/extras/benchmarking/.env +++ /dev/null @@ -1,7 +0,0 @@ -N_BLOCKS=20000 -CACHE_SIZE=100000 -SERVER_DATA_DIR=server-data -TCP_PORT=40403 -N_RUNS=2 -BENCH_FILE=bench_results.json -BENCH_DATA_DIR=bench-data diff --git a/extras/benchmarking/sync_v2/.env b/extras/benchmarking/sync_v2/.env new file mode 100644 index 000000000..73240719b --- /dev/null +++ b/extras/benchmarking/sync_v2/.env @@ -0,0 +1,9 @@ +BENCH_DATA_FILE_NAME=ci_testnet_upto_20k +BENCH_DATA_URL=https://hathor-public-files.s3.amazonaws.com/hathor-core-ci/ci_testnet_upto_20k.tar.gz +N_BLOCKS=20000 +CACHE_SIZE=100000 +SERVER_DATA_DIR=server-data +TCP_PORT=40403 +N_RUNS=2 +BENCH_FILE=testnet_upto_20k_results.json +BENCH_DATA_DIR=bench-data diff --git a/extras/benchmarking/benchmark_sync_v2.sh b/extras/benchmarking/sync_v2/benchmark_sync_v2.sh similarity index 100% rename from extras/benchmarking/benchmark_sync_v2.sh rename to extras/benchmarking/sync_v2/benchmark_sync_v2.sh From e3b73dd92a4e6b03f725f3e5ad1bf52a35af98da Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Fri, 4 Oct 2024 12:14:56 -0300 Subject: [PATCH 28/61] fix: removed txs from test wallet (#1139) --- hathor/cli/events_simulator/scenario.py | 7 ++++ hathor/consensus/consensus.py | 1 + hathor/event/model/event_data.py | 2 +- hathor/wallet/base_wallet.py | 7 ++++ tests/tx/test_reward_lock.py | 56 +++++++++++++++---------- 5 files changed, 50 insertions(+), 23 deletions(-) diff --git a/hathor/cli/events_simulator/scenario.py b/hathor/cli/events_simulator/scenario.py index 460268c2f..7ee5b7917 100644 --- a/hathor/cli/events_simulator/scenario.py +++ b/hathor/cli/events_simulator/scenario.py @@ -160,11 +160,15 @@ def simulate_invalid_mempool_transaction(simulator: 'Simulator', manager: 'Hatho blocks = add_new_blocks(manager, settings.REWARD_SPEND_MIN_BLOCKS + 1) simulator.run(60) + balance_per_address = manager.wallet.get_balance_per_address(settings.HATHOR_TOKEN_UID) + assert balance_per_address[address] == 6400 tx = gen_new_tx(manager, address, 1000) tx.weight = manager.daa.minimum_tx_weight(tx) tx.update_hash() assert manager.propagate_tx(tx, fails_silently=False) simulator.run(60) + balance_per_address = manager.wallet.get_balance_per_address(settings.HATHOR_TOKEN_UID) + assert balance_per_address[address] == 1000 # re-org: replace last two blocks with one block, new height will be just one short of enough block_to_replace = blocks[-2] @@ -178,6 +182,9 @@ def simulate_invalid_mempool_transaction(simulator: 'Simulator', manager: 'Hatho # the transaction should have been removed from the mempool and the storage after the re-org assert tx not in manager.tx_storage.iter_mempool_from_best_index() assert not manager.tx_storage.transaction_exists(tx.hash) + assert bool(tx.get_metadata().voided_by) + balance_per_address = manager.wallet.get_balance_per_address(settings.HATHOR_TOKEN_UID) + assert balance_per_address[address] == 6400 def simulate_empty_script(simulator: 'Simulator', manager: 'HathorManager') -> None: diff --git a/hathor/consensus/consensus.py b/hathor/consensus/consensus.py index 8bdbb4e4a..43a70e7f8 100644 --- a/hathor/consensus/consensus.py +++ b/hathor/consensus/consensus.py @@ -210,6 +210,7 @@ def _remove_transactions( for tx in txs: tx_meta = tx.get_metadata() assert not tx_meta.validation.is_checkpoint() + assert bool(tx_meta.voided_by), 'removed txs must be voided' for parent in set(tx.parents) - txset: parents_to_update[parent].append(tx.hash) dangling_children.update(set(tx_meta.children) - txset) diff --git a/hathor/event/model/event_data.py b/hathor/event/model/event_data.py index 3ee281bd8..a24ceca1c 100644 --- a/hathor/event/model/event_data.py +++ b/hathor/event/model/event_data.py @@ -117,7 +117,7 @@ class TxDataWithoutMeta(BaseEventData, extra=Extra.ignore): @classmethod def from_event_arguments(cls, args: EventArguments) -> Self: from hathor.transaction.resources.transaction import get_tx_extra_data - tx_extra_data_json = get_tx_extra_data(args.tx, detail_tokens=False) + tx_extra_data_json = get_tx_extra_data(args.tx, detail_tokens=False, force_reload_metadata=False) tx_json = tx_extra_data_json['tx'] meta_json = tx_extra_data_json['meta'] tx_json['metadata'] = meta_json diff --git a/hathor/wallet/base_wallet.py b/hathor/wallet/base_wallet.py index 7b38dfa12..000c8a100 100644 --- a/hathor/wallet/base_wallet.py +++ b/hathor/wallet/base_wallet.py @@ -126,6 +126,7 @@ def __init__(self, directory: str = './', pubsub: Optional[PubSubManager] = None self.pubsub_events = [ HathorEvents.CONSENSUS_TX_UPDATE, + HathorEvents.CONSENSUS_TX_REMOVED, ] if reactor is None: @@ -176,6 +177,12 @@ def handle_publish(self, key: HathorEvents, args: EventArguments) -> None: data = args.__dict__ if key == HathorEvents.CONSENSUS_TX_UPDATE: self.on_tx_update(data['tx']) + elif key == HathorEvents.CONSENSUS_TX_REMOVED: + # we use the same method as above because a removed tx is also voided + tx = data['tx'] + assert isinstance(tx, Transaction) + assert bool(tx.get_metadata().voided_by) + self.on_tx_update(tx) else: raise NotImplementedError diff --git a/tests/tx/test_reward_lock.py b/tests/tx/test_reward_lock.py index 99b9678a8..1a56d7e6a 100644 --- a/tests/tx/test_reward_lock.py +++ b/tests/tx/test_reward_lock.py @@ -1,9 +1,10 @@ import pytest -from hathor.crypto.util import get_address_from_public_key +from hathor.crypto.util import get_address_b58_from_bytes, get_address_from_public_key from hathor.exception import InvalidNewTransaction +from hathor.manager import HathorManager from hathor.simulator.utils import add_new_blocks -from hathor.transaction import Transaction, TxInput, TxOutput +from hathor.transaction import Block, Transaction, TxInput, TxOutput from hathor.transaction.exceptions import RewardLocked from hathor.transaction.scripts import P2PKH from hathor.transaction.storage import TransactionMemoryStorage @@ -15,7 +16,7 @@ class BaseTransactionTest(unittest.TestCase): __test__ = False - def setUp(self): + def setUp(self) -> None: super().setUp() self.wallet = Wallet() @@ -32,7 +33,7 @@ def setUp(self): blocks = add_blocks_unlock_reward(self.manager) self.last_block = blocks[-1] - def _add_reward_block(self): + def _add_reward_block(self) -> tuple[Block, int]: reward_block = self.manager.generate_mining_block( address=get_address_from_public_key(self.genesis_public_key) ) @@ -42,9 +43,10 @@ def _add_reward_block(self): unlock_height = reward_block.static_metadata.height + self._settings.REWARD_SPEND_MIN_BLOCKS + 1 return reward_block, unlock_height - def _spend_reward_tx(self, manager, reward_block): + def _spend_reward_tx(self, manager: HathorManager, reward_block: Block) -> tuple[Transaction, str]: value = reward_block.outputs[0].value - address = get_address_from_public_key(self.genesis_public_key) + assert manager.wallet is not None + address = manager.wallet.get_unused_address_bytes() script = P2PKH.create_output_script(address) input_ = TxInput(reward_block.hash, 0, b'') output = TxOutput(value, script) @@ -62,26 +64,26 @@ def _spend_reward_tx(self, manager, reward_block): self.manager.cpu_mining_service.resolve(tx) tx.update_initial_metadata(save=False) tx.init_static_metadata_from_storage(self._settings, self.tx_storage) - return tx + return tx, get_address_b58_from_bytes(address) - def test_classic_reward_lock(self): + def test_classic_reward_lock(self) -> None: # add block with a reward we can spend reward_block, unlock_height = self._add_reward_block() # reward cannot be spent while not enough blocks are added for _ in range(self._settings.REWARD_SPEND_MIN_BLOCKS): - tx = self._spend_reward_tx(self.manager, reward_block) + tx, _ = self._spend_reward_tx(self.manager, reward_block) self.assertEqual(tx.static_metadata.min_height, unlock_height) with self.assertRaises(RewardLocked): self.manager.verification_service.verify(tx) add_new_blocks(self.manager, 1, advance_clock=1) # now it should be spendable - tx = self._spend_reward_tx(self.manager, reward_block) + tx, _ = self._spend_reward_tx(self.manager, reward_block) self.assertEqual(tx.static_metadata.min_height, unlock_height) self.assertTrue(self.manager.propagate_tx(tx, fails_silently=False)) - def test_block_with_not_enough_height(self): + def test_block_with_not_enough_height(self) -> None: # add block with a reward we can spend reward_block, unlock_height = self._add_reward_block() @@ -91,7 +93,7 @@ def test_block_with_not_enough_height(self): # add tx bypassing reward-lock verification # XXX: this situation is impossible in practice, but we force it to test that when a block tries to confirm a # transaction before it can the RewardLocked exception is raised - tx = self._spend_reward_tx(self.manager, reward_block) + tx, _ = self._spend_reward_tx(self.manager, reward_block) self.assertEqual(tx.static_metadata.min_height, unlock_height) self.assertTrue(self.manager.on_new_tx(tx, fails_silently=False, reject_locked_reward=False)) @@ -105,7 +107,7 @@ def test_block_with_not_enough_height(self): all_blocks = [vertex for vertex in self.manager.tx_storage.get_all_transactions() if vertex.is_block] assert len(all_blocks) == 2 * self._settings.REWARD_SPEND_MIN_BLOCKS + 1 - def test_block_with_enough_height(self): + def test_block_with_enough_height(self) -> None: # add block with a reward we can spend reward_block, unlock_height = self._add_reward_block() @@ -113,14 +115,14 @@ def test_block_with_enough_height(self): add_new_blocks(self.manager, self._settings.REWARD_SPEND_MIN_BLOCKS, advance_clock=1) # add tx that spends the reward - tx = self._spend_reward_tx(self.manager, reward_block) + tx, _ = self._spend_reward_tx(self.manager, reward_block) self.assertEqual(tx.static_metadata.min_height, unlock_height) self.assertTrue(self.manager.on_new_tx(tx, fails_silently=False)) # new block will be able to confirm it add_new_blocks(self.manager, 1, advance_clock=1) - def test_mempool_tx_with_not_enough_height(self): + def test_mempool_tx_with_not_enough_height(self) -> None: from hathor.exception import InvalidNewTransaction # add block with a reward we can spend @@ -130,14 +132,14 @@ def test_mempool_tx_with_not_enough_height(self): add_new_blocks(self.manager, self._settings.REWARD_SPEND_MIN_BLOCKS - 1, advance_clock=1) # add tx to mempool, must fail reward-lock verification - tx = self._spend_reward_tx(self.manager, reward_block) + tx, _ = self._spend_reward_tx(self.manager, reward_block) self.assertEqual(tx.static_metadata.min_height, unlock_height) with self.assertRaises(RewardLocked): self.manager.verification_service.verify(tx) with self.assertRaises(InvalidNewTransaction): self.assertTrue(self.manager.on_new_tx(tx, fails_silently=False)) - def test_mempool_tx_with_enough_height(self): + def test_mempool_tx_with_enough_height(self) -> None: # add block with a reward we can spend reward_block, unlock_height = self._add_reward_block() @@ -145,11 +147,11 @@ def test_mempool_tx_with_enough_height(self): add_new_blocks(self.manager, self._settings.REWARD_SPEND_MIN_BLOCKS, advance_clock=1) # add tx that spends the reward, must not fail - tx = self._spend_reward_tx(self.manager, reward_block) + tx, _ = self._spend_reward_tx(self.manager, reward_block) self.assertEqual(tx.static_metadata.min_height, unlock_height) self.assertTrue(self.manager.on_new_tx(tx, fails_silently=False)) - def test_mempool_tx_invalid_after_reorg(self): + def test_mempool_tx_invalid_after_reorg(self) -> None: # add block with a reward we can spend reward_block, unlock_height = self._add_reward_block() @@ -157,9 +159,13 @@ def test_mempool_tx_invalid_after_reorg(self): blocks = add_new_blocks(self.manager, self._settings.REWARD_SPEND_MIN_BLOCKS, advance_clock=1) # add tx that spends the reward, must not fail - tx = self._spend_reward_tx(self.manager, reward_block) + tx, tx_address = self._spend_reward_tx(self.manager, reward_block) + balance_per_address = self.manager.wallet.get_balance_per_address(self._settings.HATHOR_TOKEN_UID) + assert tx_address not in balance_per_address self.assertEqual(tx.static_metadata.min_height, unlock_height) self.assertTrue(self.manager.on_new_tx(tx, fails_silently=False)) + balance_per_address = self.manager.wallet.get_balance_per_address(self._settings.HATHOR_TOKEN_UID) + assert balance_per_address[tx_address] == 6400 # re-org: replace last two blocks with one block, new height will be just one short of enough block_to_replace = blocks[-2] @@ -168,6 +174,7 @@ def test_mempool_tx_invalid_after_reorg(self): b0.weight = 10 self.manager.cpu_mining_service.resolve(b0) self.manager.propagate_tx(b0, fails_silently=False) + self.clock.advance(1) # now the new tx should not pass verification considering the reward lock with self.assertRaises(RewardLocked): @@ -179,6 +186,7 @@ def test_mempool_tx_invalid_after_reorg(self): # additionally the transaction should have been marked as invalid and removed from the storage after the re-org self.assertTrue(tx.get_metadata().validation.is_invalid()) self.assertFalse(self.manager.tx_storage.transaction_exists(tx.hash)) + self.assertTrue(bool(tx.get_metadata().voided_by)) # assert that the tx has been removed from its dependencies' metadata for parent_id in tx.parents: @@ -191,8 +199,12 @@ def test_mempool_tx_invalid_after_reorg(self): assert len(spent_outputs) == 1 assert tx.hash not in spent_outputs[0] + # the balance for the tx_address must have been removed + balance_per_address = self.manager.wallet.get_balance_per_address(self._settings.HATHOR_TOKEN_UID) + assert tx_address not in balance_per_address + @pytest.mark.xfail(reason='this is no longer the case, timestamp will not matter', strict=True) - def test_classic_reward_lock_timestamp_expected_to_fail(self): + def test_classic_reward_lock_timestamp_expected_to_fail(self) -> None: # add block with a reward we can spend reward_block, unlock_height = self._add_reward_block() @@ -201,7 +213,7 @@ def test_classic_reward_lock_timestamp_expected_to_fail(self): # tx timestamp is equal to the block that unlock the spent rewards. It should # be greater, so it'll fail - tx = self._spend_reward_tx(self.manager, reward_block) + tx, _ = self._spend_reward_tx(self.manager, reward_block) tx.timestamp = blocks[-1].timestamp self.manager.cpu_mining_service.resolve(tx) self.assertEqual(tx.static_metadata.min_height, unlock_height) From 6aa643824a4d36c995b32c7372327c7b1a10a04c Mon Sep 17 00:00:00 2001 From: Jan Segre Date: Mon, 30 Sep 2024 19:28:44 +0200 Subject: [PATCH 29/61] chore: bump version to v0.63.0 --- hathor/cli/openapi_files/openapi_base.json | 2 +- hathor/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/hathor/cli/openapi_files/openapi_base.json b/hathor/cli/openapi_files/openapi_base.json index c92cbb0b0..1374ff343 100644 --- a/hathor/cli/openapi_files/openapi_base.json +++ b/hathor/cli/openapi_files/openapi_base.json @@ -7,7 +7,7 @@ ], "info": { "title": "Hathor API", - "version": "0.62.0" + "version": "0.63.0" }, "consumes": [ "application/json" diff --git a/hathor/version.py b/hathor/version.py index 7b4749024..abee02e14 100644 --- a/hathor/version.py +++ b/hathor/version.py @@ -19,7 +19,7 @@ from structlog import get_logger -BASE_VERSION = '0.62.0' +BASE_VERSION = '0.63.0' DEFAULT_VERSION_SUFFIX = "local" BUILD_VERSION_FILE_PATH = "./BUILD_VERSION" diff --git a/pyproject.toml b/pyproject.toml index ffb6b8620..70fa83eed 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,7 +14,7 @@ [tool.poetry] name = "hathor" -version = "0.62.0" +version = "0.63.0" description = "Hathor Network full-node" authors = ["Hathor Team "] license = "Apache-2.0" From 646878b8bd9042c8441432bd70bd8abc302da175 Mon Sep 17 00:00:00 2001 From: Jan Segre Date: Mon, 19 Aug 2024 23:04:44 +0200 Subject: [PATCH 30/61] refactor(p2p): add PublicPeer and PrivatePeer to simplify Peer --- hathor/builder/builder.py | 20 +- hathor/builder/cli_builder.py | 18 +- hathor/cli/peer_id.py | 6 +- hathor/manager.py | 8 +- hathor/metrics.py | 2 +- hathor/p2p/factory.py | 6 +- hathor/p2p/manager.py | 78 +- hathor/p2p/netfilter/matches.py | 2 +- hathor/p2p/peer.py | 708 ++++++++++-------- hathor/p2p/peer_storage.py | 55 +- hathor/p2p/protocol.py | 23 +- hathor/p2p/resources/add_peers.py | 2 +- hathor/p2p/resources/status.py | 10 +- hathor/p2p/states/peer_id.py | 10 +- hathor/p2p/states/ready.py | 16 +- hathor/p2p/sync_v2/agent.py | 27 +- .../sync_v2/blockchain_streaming_client.py | 4 +- hathor/p2p/sync_v2/factory.py | 19 +- hathor/p2p/sync_v2/mempool.py | 15 +- .../sync_v2/transaction_streaming_client.py | 6 +- hathor/simulator/fake_connection.py | 18 +- hathor/simulator/simulator.py | 4 +- tests/event/event_simulation_tester.py | 4 +- tests/others/test_metrics.py | 10 +- tests/p2p/netfilter/test_match.py | 6 +- tests/p2p/test_bootstrap.py | 8 +- tests/p2p/test_peer_id.py | 197 +++-- tests/p2p/test_protocol.py | 18 +- tests/p2p/test_sync_mempool.py | 73 +- tests/p2p/test_sync_v2.py | 8 +- tests/resources/p2p/test_add_peer.py | 6 +- tests/resources/p2p/test_status.py | 4 +- tests/simulation/test_simulator_itself.py | 4 +- tests/unittest.py | 22 +- 34 files changed, 850 insertions(+), 567 deletions(-) diff --git a/hathor/builder/builder.py b/hathor/builder/builder.py index b4977820d..e6e97dbd5 100644 --- a/hathor/builder/builder.py +++ b/hathor/builder/builder.py @@ -35,7 +35,7 @@ from hathor.manager import HathorManager from hathor.mining.cpu_mining_service import CpuMiningService from hathor.p2p.manager import ConnectionsManager -from hathor.p2p.peer import Peer +from hathor.p2p.peer import PrivatePeer from hathor.pubsub import PubSubManager from hathor.reactor import ReactorProtocol as Reactor from hathor.storage import RocksDBStorage @@ -69,6 +69,7 @@ def add_factories( sync_v1_support: 'SyncSupportLevel', sync_v2_support: 'SyncSupportLevel', vertex_parser: VertexParser, + vertex_handler: VertexHandler, ) -> None: """Adds the sync factory to the manager according to the support level.""" from hathor.p2p.sync_v1.factory import SyncV11Factory @@ -82,9 +83,13 @@ def add_factories( p2p_manager.enable_sync_version(SyncVersion.V1_1) # sync-v2 support: if sync_v2_support > cls.UNAVAILABLE: - p2p_manager.add_sync_factory( - SyncVersion.V2, SyncV2Factory(settings, p2p_manager, vertex_parser=vertex_parser) + sync_v2_factory = SyncV2Factory( + settings, + p2p_manager, + vertex_parser=vertex_parser, + vertex_handler=vertex_handler, ) + p2p_manager.add_sync_factory(SyncVersion.V2, sync_v2_factory) if sync_v2_support is cls.ENABLED: p2p_manager.enable_sync_version(SyncVersion.V2) @@ -96,7 +101,7 @@ class StorageType(Enum): class BuildArtifacts(NamedTuple): """Artifacts created by a builder.""" - peer: Peer + peer: PrivatePeer settings: HathorSettingsType rng: Random reactor: Reactor @@ -137,7 +142,7 @@ def __init__(self) -> None: self._checkpoints: Optional[list[Checkpoint]] = None self._capabilities: Optional[list[str]] = None - self._peer: Optional[Peer] = None + self._peer: Optional[PrivatePeer] = None self._network: Optional[str] = None self._cmdline: str = '' @@ -337,7 +342,7 @@ def set_capabilities(self, capabilities: list[str]) -> 'Builder': self._capabilities = capabilities return self - def set_peer(self, peer: Peer) -> 'Builder': + def set_peer(self, peer: PrivatePeer) -> 'Builder': self.check_if_can_modify() self._peer = peer return self @@ -361,7 +366,7 @@ def _get_soft_voided_tx_ids(self) -> set[bytes]: return set(settings.SOFT_VOIDED_TX_IDS) - def _get_peer(self) -> Peer: + def _get_peer(self) -> PrivatePeer: if self._peer is not None: return self._peer raise ValueError('peer not set') @@ -436,6 +441,7 @@ def _get_or_create_p2p_manager(self) -> ConnectionsManager: self._sync_v1_support, self._sync_v2_support, self._get_or_create_vertex_parser(), + self._get_or_create_vertex_handler(), ) return self._p2p_manager diff --git a/hathor/builder/cli_builder.py b/hathor/builder/cli_builder.py index 4548c5077..1b4cf2e80 100644 --- a/hathor/builder/cli_builder.py +++ b/hathor/builder/cli_builder.py @@ -36,7 +36,7 @@ from hathor.mining.cpu_mining_service import CpuMiningService from hathor.p2p.entrypoint import Entrypoint from hathor.p2p.manager import ConnectionsManager -from hathor.p2p.peer import Peer +from hathor.p2p.peer import PrivatePeer from hathor.p2p.utils import discover_hostname, get_genesis_short_hash from hathor.pubsub import PubSubManager from hathor.reactor import ReactorProtocol as Reactor @@ -100,7 +100,11 @@ def create_manager(self, reactor: Reactor) -> HathorManager: self.log = logger.new() self.reactor = reactor - peer = Peer.create_from_json_path(self._args.peer) if self._args.peer else Peer() + peer: PrivatePeer + if self._args.peer: + peer = PrivatePeer.create_from_json_path(self._args.peer) + else: + peer = PrivatePeer.auto_generated() python = f'{platform.python_version()}-{platform.python_implementation()}' self.log.info( @@ -325,7 +329,6 @@ def create_manager(self, reactor: Reactor) -> HathorManager: whitelist_only=False, rng=Random(), ) - SyncSupportLevel.add_factories(settings, p2p_manager, sync_v1_support, sync_v2_support, vertex_parser) vertex_handler = VertexHandler( reactor=reactor, @@ -340,6 +343,15 @@ def create_manager(self, reactor: Reactor) -> HathorManager: log_vertex_bytes=self._args.log_vertex_bytes, ) + SyncSupportLevel.add_factories( + settings, + p2p_manager, + sync_v1_support, + sync_v2_support, + vertex_parser, + vertex_handler, + ) + from hathor.consensus.poa import PoaBlockProducer, PoaSignerFile poa_block_producer: PoaBlockProducer | None = None if settings.CONSENSUS_ALGORITHM.is_poa(): diff --git a/hathor/cli/peer_id.py b/hathor/cli/peer_id.py index 45bf3a04d..1a8a733e3 100644 --- a/hathor/cli/peer_id.py +++ b/hathor/cli/peer_id.py @@ -20,9 +20,9 @@ def main() -> None: - from hathor.p2p.peer import Peer + from hathor.p2p.peer import PrivatePeer - peer = Peer() - data = peer.to_json(include_private_key=True) + peer = PrivatePeer.auto_generated() + data = peer.to_json_private() txt = json.dumps(data, indent=4) print(txt) diff --git a/hathor/manager.py b/hathor/manager.py index 4f09d126c..7e702c73c 100644 --- a/hathor/manager.py +++ b/hathor/manager.py @@ -46,7 +46,7 @@ from hathor.mining import BlockTemplate, BlockTemplates from hathor.mining.cpu_mining_service import CpuMiningService from hathor.p2p.manager import ConnectionsManager -from hathor.p2p.peer import Peer +from hathor.p2p.peer import PrivatePeer from hathor.p2p.peer_id import PeerId from hathor.profiler import get_cpu_profiler from hathor.pubsub import HathorEvents, PubSubManager @@ -60,7 +60,7 @@ from hathor.transaction.storage.tx_allow_scope import TxAllowScope from hathor.transaction.vertex_parser import VertexParser from hathor.types import Address, VertexId -from hathor.util import EnvironmentInfo, LogDuration, Random, calculate_min_significant_weight, not_none +from hathor.util import EnvironmentInfo, LogDuration, Random, calculate_min_significant_weight from hathor.verification.verification_service import VerificationService from hathor.vertex_handler import VertexHandler from hathor.wallet import BaseWallet @@ -97,7 +97,7 @@ def __init__( pubsub: PubSubManager, consensus_algorithm: ConsensusAlgorithm, daa: DifficultyAdjustmentAlgorithm, - peer: Peer, + peer: PrivatePeer, tx_storage: TransactionStorage, p2p_manager: ConnectionsManager, event_manager: EventManager, @@ -298,7 +298,7 @@ def start(self) -> None: sys.exit(-1) if self._enable_event_queue: - self._event_manager.start(str(not_none(self.my_peer.id))) + self._event_manager.start(str(self.my_peer.id)) self.state = self.NodeState.INITIALIZING self.pubsub.publish(HathorEvents.MANAGER_ON_START) diff --git a/hathor/metrics.py b/hathor/metrics.py index 046c8c54d..cc72ce9e0 100644 --- a/hathor/metrics.py +++ b/hathor/metrics.py @@ -246,7 +246,7 @@ def collect_peer_connection_metrics(self) -> None: self.peer_connection_metrics.clear() for connection in self.connections.connections: - if not connection.peer or not connection.peer.id: + if not connection._peer: # A connection without peer will not be able to communicate # So we can just discard it for the sake of the metrics continue diff --git a/hathor/p2p/factory.py b/hathor/p2p/factory.py index 521abaa09..af1eb270a 100644 --- a/hathor/p2p/factory.py +++ b/hathor/p2p/factory.py @@ -19,7 +19,7 @@ from hathor.conf.settings import HathorSettings from hathor.p2p.manager import ConnectionsManager -from hathor.p2p.peer import Peer +from hathor.p2p.peer import PrivatePeer from hathor.p2p.protocol import HathorLineReceiver if TYPE_CHECKING: @@ -39,7 +39,7 @@ class HathorServerFactory(protocol.ServerFactory): def __init__( self, network: str, - my_peer: Peer, + my_peer: PrivatePeer, p2p_manager: ConnectionsManager, *, settings: HathorSettings, @@ -75,7 +75,7 @@ class HathorClientFactory(protocol.ClientFactory): def __init__( self, network: str, - my_peer: Peer, + my_peer: PrivatePeer, p2p_manager: ConnectionsManager, *, settings: HathorSettings, diff --git a/hathor/p2p/manager.py b/hathor/p2p/manager.py index f70cd1424..7fb3d35b9 100644 --- a/hathor/p2p/manager.py +++ b/hathor/p2p/manager.py @@ -27,10 +27,10 @@ from hathor.conf.settings import HathorSettings from hathor.p2p.entrypoint import Entrypoint from hathor.p2p.netfilter.factory import NetfilterFactory -from hathor.p2p.peer import Peer +from hathor.p2p.peer import PrivatePeer, PublicPeer, UnverifiedPeer from hathor.p2p.peer_discovery import PeerDiscovery from hathor.p2p.peer_id import PeerId -from hathor.p2p.peer_storage import PeerStorage +from hathor.p2p.peer_storage import UnverifiedPeerStorage, VerifiedPeerStorage from hathor.p2p.protocol import HathorProtocol from hathor.p2p.rate_limiter import RateLimiter from hathor.p2p.states.ready import ReadyState @@ -84,6 +84,8 @@ class GlobalRateLimiter: connecting_peers: dict[IStreamClientEndpoint, _ConnectingPeer] handshaking_peers: set[HathorProtocol] whitelist_only: bool + unverified_peer_storage: UnverifiedPeerStorage + verified_peer_storage: VerifiedPeerStorage _sync_factories: dict[SyncVersion, SyncAgentFactory] _enabled_sync_versions: set[SyncVersion] @@ -94,7 +96,7 @@ def __init__( settings: HathorSettings, reactor: Reactor, network: str, - my_peer: Peer, + my_peer: PrivatePeer, pubsub: PubSubManager, ssl: bool, rng: Random, @@ -157,10 +159,10 @@ def __init__( # List of peers received from the network. # We cannot trust their identity before we connect to them. - self.received_peer_storage = PeerStorage() + self.unverified_peer_storage = UnverifiedPeerStorage() # List of known peers. - self.peer_storage = PeerStorage() # dict[string (peer.id), Peer] + self.verified_peer_storage = VerifiedPeerStorage() # dict[string (peer.id), PublicPeer] # Maximum unseen time before removing a peer (seconds). self.max_peer_unseen_dt: float = 30 * 60 # 30-minutes @@ -326,7 +328,7 @@ def _get_peers_count(self) -> PeerConnectionsMetrics: len(self.connecting_peers), len(self.handshaking_peers), len(self.connected_peers), - len(self.peer_storage) + len(self.verified_peer_storage) ) def get_sync_factory(self, sync_version: SyncVersion) -> SyncAgentFactory: @@ -367,7 +369,8 @@ def disconnect_all_peers(self, *, force: bool = False) -> None: for conn in self.iter_all_connections(): conn.disconnect(force=force) - def on_connection_failure(self, failure: Failure, peer: Optional[Peer], endpoint: IStreamClientEndpoint) -> None: + def on_connection_failure(self, failure: Failure, peer: Optional[UnverifiedPeer | PublicPeer], + endpoint: IStreamClientEndpoint) -> None: connecting_peer = self.connecting_peers[endpoint] entrypoint = connecting_peer.entrypoint self.log.warn('connection failure', entrypoint=entrypoint, failure=failure.getErrorMessage()) @@ -397,11 +400,11 @@ def on_peer_connect(self, protocol: HathorProtocol) -> None: def on_peer_ready(self, protocol: HathorProtocol) -> None: """Called when a peer is ready.""" assert protocol.peer is not None - protocol.peer = self.peer_storage.add_or_merge(protocol.peer) + self.verified_peer_storage.add_or_replace(protocol.peer) assert protocol.peer.id is not None self.handshaking_peers.remove(protocol) - self.received_peer_storage.pop(protocol.peer.id, None) + self.unverified_peer_storage.pop(protocol.peer.id, None) # we emit the event even if it's a duplicate peer as a matching # NETWORK_PEER_DISCONNECTED will be emmited regardless @@ -423,7 +426,7 @@ def on_peer_ready(self, protocol: HathorProtocol) -> None: self.connected_peers[protocol.peer.id] = protocol # In case it was a retry, we must reset the data only here, after it gets ready - protocol.peer.reset_retry_timestamp() + protocol.peer.info.reset_retry_timestamp() if len(self.connected_peers) <= self.MAX_ENABLED_SYNC: protocol.enable_sync() @@ -434,7 +437,7 @@ def on_peer_ready(self, protocol: HathorProtocol) -> None: # Notify other peers about this new peer connection. self.relay_peer_to_ready_connections(protocol.peer) - def relay_peer_to_ready_connections(self, peer: Peer) -> None: + def relay_peer_to_ready_connections(self, peer: PublicPeer) -> None: """Relay peer to all ready connections.""" for conn in self.iter_ready_connections(): if conn.peer == peer: @@ -447,8 +450,7 @@ def on_peer_disconnect(self, protocol: HathorProtocol) -> None: self.connections.discard(protocol) if protocol in self.handshaking_peers: self.handshaking_peers.remove(protocol) - if protocol.peer: - assert protocol.peer.id is not None + if protocol._peer is not None: existing_protocol = self.connected_peers.pop(protocol.peer.id, None) if existing_protocol is None: # in this case, the connection was closed before it got to READY state @@ -492,29 +494,29 @@ def is_peer_connected(self, peer_id: PeerId) -> bool: """ return peer_id in self.connected_peers - def on_receive_peer(self, peer: Peer, origin: Optional[ReadyState] = None) -> None: + def on_receive_peer(self, peer: UnverifiedPeer, origin: Optional[ReadyState] = None) -> None: """ Update a peer information in our storage, and instantly attempt to connect to it if it is not connected yet. """ if peer.id == self.my_peer.id: return - peer = self.received_peer_storage.add_or_merge(peer) + peer = self.unverified_peer_storage.add_or_merge(peer) self.connect_to_if_not_connected(peer, int(self.reactor.seconds())) def peers_cleanup(self) -> None: """Clean up aged peers.""" now = self.reactor.seconds() - to_be_removed: list[Peer] = [] - for peer in self.peer_storage.values(): + to_be_removed: list[PublicPeer] = [] + for peer in self.verified_peer_storage.values(): assert peer.id is not None if self.is_peer_connected(peer.id): continue - dt = now - peer.last_seen + dt = now - peer.info.last_seen if dt > self.max_peer_unseen_dt: to_be_removed.append(peer) - for peer in to_be_removed: - self.peer_storage.remove(peer) + for remove_peer in to_be_removed: + self.verified_peer_storage.remove(remove_peer) def reconnect_to_all(self) -> None: """ It is called by the `lc_reconnect` timer and tries to connect to all known @@ -531,7 +533,7 @@ def reconnect_to_all(self) -> None: self.do_discovery() # We need to use list() here because the dict might change inside connect_to_if_not_connected # when the peer is disconnected and without entrypoint - for peer in list(self.peer_storage.values()): + for peer in list(self.verified_peer_storage.values()): self.connect_to_if_not_connected(peer, int(now)) def update_whitelist(self) -> Deferred[None]: @@ -575,27 +577,27 @@ def _update_whitelist_cb(self, body: bytes) -> None: for peer_id in peers_to_remove: self.manager.remove_peer_from_whitelist_and_disconnect(peer_id) - def connect_to_if_not_connected(self, peer: Peer, now: int) -> None: + def connect_to_if_not_connected(self, peer: UnverifiedPeer | PublicPeer, now: int) -> None: """ Attempts to connect if it is not connected to the peer. """ - if not peer.entrypoints: + if not peer.info.entrypoints: # It makes no sense to keep storing peers that have disconnected and have no entrypoints # We will never be able to connect to them anymore and they will only keep spending memory # and other resources when used in APIs, so we are removing them here if peer.id not in self.connected_peers: - self.peer_storage.remove(peer) + self.verified_peer_storage.remove(peer) return if peer.id in self.connected_peers: return assert peer.id is not None - if peer.can_retry(now): - self.connect_to(self.rng.choice(peer.entrypoints), peer) + if peer.info.can_retry(now): + self.connect_to(self.rng.choice(peer.info.entrypoints), peer) def _connect_to_callback( self, protocol: IProtocol, - peer: Optional[Peer], + peer: Optional[UnverifiedPeer | PublicPeer], endpoint: IStreamClientEndpoint, entrypoint: Entrypoint, ) -> None: @@ -611,8 +613,8 @@ def _connect_to_callback( def connect_to( self, entrypoint: Entrypoint, - peer: Optional[Peer] = None, - use_ssl: Optional[bool] = None, + peer: UnverifiedPeer | PublicPeer | None = None, + use_ssl: bool | None = None, ) -> None: """ Attempt to connect to a peer, even if a connection already exists. Usually you should call `connect_to_if_not_connected`. @@ -640,20 +642,19 @@ def connect_to( factory: IProtocolFactory if use_ssl: - certificate_options = self.my_peer.get_certificate_options() - factory = TLSMemoryBIOFactory(certificate_options, True, self.client_factory) + factory = TLSMemoryBIOFactory(self.my_peer.certificate_options, True, self.client_factory) else: factory = self.client_factory if peer is not None: now = int(self.reactor.seconds()) - peer.increment_retry_attempt(now) + peer.info.increment_retry_attempt(now) deferred = endpoint.connect(factory) self.connecting_peers[endpoint] = _ConnectingPeer(entrypoint, deferred) - deferred.addCallback(self._connect_to_callback, peer, endpoint, entrypoint) - deferred.addErrback(self.on_connection_failure, peer, endpoint) + deferred.addCallback(self._connect_to_callback, peer, endpoint, entrypoint) # type: ignore + deferred.addErrback(self.on_connection_failure, peer, endpoint) # type: ignore self.log.info('connect to', entrypoint=str(entrypoint), peer=str(peer)) self.pubsub.publish( HathorEvents.NETWORK_PEER_CONNECTING, @@ -680,8 +681,7 @@ def listen(self, description: str, use_ssl: Optional[bool] = None) -> None: factory: IProtocolFactory if use_ssl: - certificate_options = self.my_peer.get_certificate_options() - factory = TLSMemoryBIOFactory(certificate_options, False, self.server_factory) + factory = TLSMemoryBIOFactory(self.my_peer.certificate_options, False, self.server_factory) else: factory = self.server_factory @@ -712,13 +712,13 @@ def update_hostname_entrypoints(self, *, old_hostname: str | None, new_hostname: for address in self._listen_addresses: if old_hostname is not None: old_entrypoint = Entrypoint.from_hostname_address(old_hostname, address) - if old_entrypoint in self.my_peer.entrypoints: - self.my_peer.entrypoints.remove(old_entrypoint) + if old_entrypoint in self.my_peer.info.entrypoints: + self.my_peer.info.entrypoints.remove(old_entrypoint) self._add_hostname_entrypoint(new_hostname, address) def _add_hostname_entrypoint(self, hostname: str, address: IPv4Address | IPv6Address) -> None: hostname_entrypoint = Entrypoint.from_hostname_address(hostname, address) - self.my_peer.entrypoints.append(hostname_entrypoint) + self.my_peer.info.entrypoints.append(hostname_entrypoint) def get_connection_to_drop(self, protocol: HathorProtocol) -> HathorProtocol: """ When there are duplicate connections, determine which one should be dropped. diff --git a/hathor/p2p/netfilter/matches.py b/hathor/p2p/netfilter/matches.py index d7eb714bd..786189d0e 100644 --- a/hathor/p2p/netfilter/matches.py +++ b/hathor/p2p/netfilter/matches.py @@ -127,7 +127,7 @@ def match(self, context: 'NetfilterContext') -> bool: if context.protocol is None: return False - if context.protocol.peer is None: + if context.protocol._peer is None: return False if str(context.protocol.peer.id) != self.peer_id: diff --git a/hathor/p2p/peer.py b/hathor/p2p/peer.py index bb48e46fd..33aa617db 100644 --- a/hathor/p2p/peer.py +++ b/hathor/p2p/peer.py @@ -11,13 +11,35 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +""" +This module exposes three peer classes that share similar behavior but must not be mixed. + +This is the class structure: + + PeerInfo has entrypoints and reconnect info + UnverifiedPeer has a PeerId and PeerInfo + PublicPeer has an UnverifiedPeer and a public-key + PrivatePeer has a PublicPeer and a private-key + +This way the shared behavior is implemented and propagated through the private classes, and the public classes don't +share the same inheritance tree and for example a `peer: PublicPeer` will have `isinstance(peer, UnverifiedPeer) == +False`, so they can't be mixed. + +This makes it harder for external functions to support "subtypes" by accepting a base class, but this is intentional. +If a function can work for any type of peer, it should be defined as `def foo(peer: UnverifiedPeer)` and callers will +have to call `private_peer.to_unverified_peer()` because `PrivatePeer` is not a subclass of `UnverifiedPeer`. +""" + +from __future__ import annotations import base64 import hashlib import json +from dataclasses import dataclass, field from enum import Enum +from functools import cached_property from math import inf -from typing import TYPE_CHECKING, Any, Optional, cast +from typing import TYPE_CHECKING, Any, cast from cryptography import x509 from cryptography.exceptions import InvalidSignature @@ -28,8 +50,10 @@ from structlog import get_logger from twisted.internet.interfaces import ISSLTransport from twisted.internet.ssl import Certificate, CertificateOptions, TLSVersion, trustRootFromCertificates +from typing_extensions import Self from hathor.conf.get_settings import get_global_settings +from hathor.conf.settings import HathorSettings from hathor.daa import DifficultyAdjustmentAlgorithm from hathor.p2p.entrypoint import Entrypoint from hathor.p2p.peer_id import PeerId @@ -50,307 +74,66 @@ class PeerFlags(str, Enum): RETRIES_EXCEEDED = 'retries_exceeded' -class Peer: - """ Identify a peer, even when it is disconnected. +def _parse_entrypoint(entrypoint_string: str) -> Entrypoint: + """ Helper function to parse an entrypoint from string.""" + entrypoint = Entrypoint.parse(entrypoint_string) + if entrypoint.peer_id is not None: + raise ValueError('do not add id= to peer.json entrypoints') + return entrypoint - The public_key and private_key are used to ensure that a new connection - that claims to be this peer is really from this peer. - The entrypoints are strings that describe a way to connect to this peer. - Usually a peer will have only one entrypoint. - """ +def _parse_pubkey(pubkey_string: str) -> rsa.RSAPublicKey: + """ Helper function to parse a public key from string.""" + public_key_der = base64.b64decode(pubkey_string) + public_key = serialization.load_der_public_key(data=public_key_der, backend=default_backend()) + assert public_key is not None + return public_key - id: Optional[PeerId] - entrypoints: list[Entrypoint] - private_key: Optional[rsa.RSAPrivateKeyWithSerialization] - public_key: Optional[rsa.RSAPublicKey] - certificate: Optional[x509.Certificate] - retry_timestamp: int # should only try connecting to this peer after this timestamp - retry_interval: int # how long to wait for next connection retry. It will double for each failure - retry_attempts: int # how many retries were made - last_seen: float # last time this peer was seen - flags: set[str] - source_file: str | None - - def __init__(self, auto_generate_keys: bool = True) -> None: - self._log = logger.new() - self._settings = get_global_settings() - self.id = None - self.private_key = None - self.public_key = None - self.certificate = None - self.entrypoints = [] - self.retry_timestamp = 0 - self.retry_interval = 5 - self.retry_attempts = 0 - self.last_seen = inf - self.flags = set() - self._certificate_options: Optional[CertificateOptions] = None - self.source_file = None - - if auto_generate_keys: - self.generate_keys() - - def __str__(self): - return ( - f'Peer(id={self.id}, entrypoints={self.entrypoints_as_str()}, retry_timestamp={self.retry_timestamp}, ' - f'retry_interval={self.retry_interval})' - ) - def entrypoints_as_str(self) -> list[str]: - """Return a list of entrypoints serialized as str""" - return list(map(str, self.entrypoints)) +def _parse_privkey(privkey_string: str) -> rsa.RSAPrivateKeyWithSerialization: + """ Helper function to parse a private key from string.""" + private_key_der = base64.b64decode(privkey_string) + private_key = serialization.load_der_private_key(data=private_key_der, password=None, backend=default_backend()) + assert private_key is not None + return private_key - def merge(self, other: 'Peer') -> None: - """ Merge two Peer objects, checking that they have the same - id, public_key, and private_key. The entrypoints are merged without - duplicating their entries. - """ - assert (self.id == other.id) - # Copy public key if `self` doesn't have it and `other` does. - if not self.public_key and other.public_key: - self.public_key = other.public_key - self.validate() +def _calculate_peer_id(public_key: rsa.RSAPublicKey) -> PeerId: + """ Helper function to calculate a peer id from a public key.""" + public_der = public_key.public_bytes( + encoding=serialization.Encoding.DER, + format=serialization.PublicFormat.SubjectPublicKeyInfo, + ) + h1 = hashlib.sha256(public_der) + h2 = hashlib.sha256(h1.digest()) + return PeerId(h2.digest()) + - if self.public_key and other.public_key: - assert (self.get_public_key() == other.get_public_key()) +@dataclass(kw_only=True, slots=True) +class PeerInfo: + """ Stores entrypoint and connection attempts information. + """ - # Copy private key if `self` doesn't have it and `other` does. - if not self.private_key and other.private_key: - self.private_key = other.private_key - self.validate() + entrypoints: list[Entrypoint] = field(default_factory=list) + retry_timestamp: int = 0 # should only try connecting to this peer after this timestamp + retry_interval: int = 5 # how long to wait for next connection retry. It will double for each failure + retry_attempts: int = 0 # how many retries were made + last_seen: float = inf # last time this peer was seen + flags: set[str] = field(default_factory=set) + _settings: HathorSettings = field(default_factory=get_global_settings, repr=False) + + def entrypoints_as_str(self) -> list[str]: + """Return a list of entrypoints serialized as str""" + return list(map(str, self.entrypoints)) + def _merge(self, other: PeerInfo) -> None: + """Actual merge execution, must only be made after verifications.""" # Merge entrypoints. for ep in other.entrypoints: if ep not in self.entrypoints: self.entrypoints.append(ep) - def generate_keys(self, key_size: int = 2048) -> None: - """ Generate a random pair of private key and public key. - It also calculates the id of this peer, based on its public key. - """ - # https://security.stackexchange.com/questions/5096/rsa-vs-dsa-for-ssh-authentication-keys - self.private_key = rsa.generate_private_key(public_exponent=65537, key_size=key_size, - backend=default_backend()) - self.public_key = self.private_key.public_key() - self.id = self.calculate_id() - - def calculate_id(self) -> PeerId: - """ Calculate and return the id based on the public key. - """ - assert self.public_key is not None - public_der = self.public_key.public_bytes(encoding=serialization.Encoding.DER, - format=serialization.PublicFormat.SubjectPublicKeyInfo) - h1 = hashlib.sha256(public_der) - h2 = hashlib.sha256(h1.digest()) - return PeerId(h2.digest()) - - def get_public_key(self) -> str: - """ Return the public key in DER encoding as an `str`. - """ - assert self.public_key is not None - public_der = self.public_key.public_bytes(encoding=serialization.Encoding.DER, - format=serialization.PublicFormat.SubjectPublicKeyInfo) - return base64.b64encode(public_der).decode('utf-8') - - def sign(self, data: bytes) -> bytes: - """ Sign any data (of type `bytes`). - """ - assert self.private_key is not None - return self.private_key.sign( - data, padding.PSS(mgf=padding.MGF1(hashes.SHA256()), salt_length=padding.PSS.MAX_LENGTH), hashes.SHA256()) - - def verify_signature(self, signature: bytes, data: bytes) -> bool: - """ Verify a signature of a data. Both must be of type `bytes`. - """ - try: - assert self.public_key is not None - self.public_key.verify(signature, data, - padding.PSS(mgf=padding.MGF1(hashes.SHA256()), salt_length=padding.PSS.MAX_LENGTH), - hashes.SHA256()) - except InvalidSignature: - return False - else: - return True - - @classmethod - def create_from_json_path(cls, path: str) -> 'Peer': - """Create a new Peer from a JSON file.""" - data = json.load(open(path, 'r')) - peer = Peer.create_from_json(data) - peer.source_file = path - return peer - - @classmethod - def create_from_json(cls, data: dict[str, Any]) -> 'Peer': - """ Create a new Peer from JSON data. - - It is used both to load a Peer from disk and to create a Peer - from a peer connection. - """ - obj = cls(auto_generate_keys=False) - obj.id = PeerId(data['id']) - - if 'pubKey' in data: - public_key_der = base64.b64decode(data['pubKey']) - public_key = serialization.load_der_public_key(data=public_key_der, backend=default_backend()) - assert public_key is not None - public_key = cast(rsa.RSAPublicKey, public_key) - obj.public_key = public_key - - if 'privKey' in data: - private_key_der = base64.b64decode(data['privKey']) - private_key = serialization.load_der_private_key(data=private_key_der, password=None, - backend=default_backend()) - assert private_key is not None - private_key = cast(rsa.RSAPrivateKey, private_key) - obj.private_key = private_key - - if 'entrypoints' in data: - for entrypoint_string in data['entrypoints']: - entrypoint = Entrypoint.parse(entrypoint_string) - if entrypoint.peer_id is not None: - raise ValueError('do not add id= to peer.json entrypoints') - obj.entrypoints.append(entrypoint) - - # TODO(epnichols): call obj.validate()? - return obj - - def validate(self) -> None: - """ Return `True` if the following conditions are valid: - (i) public key and private key matches; - (ii) the id matches with the public key. - - TODO(epnichols): Update docs. Only raises exceptions; doesn't return anything. - """ - if self.private_key and not self.public_key: - # TODO(epnichols): Modifies self.public_key, even though we're calling "validate". Why is state modified? - self.public_key = self.private_key.public_key() - - if self.public_key: - if self.id != self.calculate_id(): - raise InvalidPeerIdException('id does not match public key') - - if self.private_key: - assert self.public_key is not None - public_der1 = self.public_key.public_bytes(encoding=serialization.Encoding.DER, - format=serialization.PublicFormat.SubjectPublicKeyInfo) - public_key = self.private_key.public_key() - public_der2 = public_key.public_bytes(encoding=serialization.Encoding.DER, - format=serialization.PublicFormat.SubjectPublicKeyInfo) - if public_der1 != public_der2: - raise InvalidPeerIdException('private/public pair does not match') - - def to_json(self, include_private_key: bool = False) -> dict[str, Any]: - """ Return a JSON serialization of the object. - - By default, it will not include the private key. If you would like to add - it, use the parameter `include_private_key`. - """ - assert self.public_key is not None - public_der = self.public_key.public_bytes(encoding=serialization.Encoding.DER, - format=serialization.PublicFormat.SubjectPublicKeyInfo) - # This format is compatible with libp2p. - result = { - 'id': str(self.id), - 'pubKey': base64.b64encode(public_der).decode('utf-8'), - 'entrypoints': self.entrypoints_as_str(), - } - if include_private_key: - assert self.private_key is not None - private_der = self.private_key.private_bytes( - encoding=serialization.Encoding.DER, - format=serialization.PrivateFormat.PKCS8, - # TODO encryption_algorithm=serialization.BestAvailableEncryption(b'mypassword') - encryption_algorithm=serialization.NoEncryption()) - result['privKey'] = base64.b64encode(private_der).decode('utf-8') - - return result - - def save_to_file(self, path: str) -> None: - """ Save the object to a JSON file. - """ - import json - data = self.to_json(include_private_key=True) - fp = open(path, 'w') - json.dump(data, fp, indent=4) - fp.close() - - def increment_retry_attempt(self, now: int) -> None: - """ Updates timestamp for next retry. - - :param now: current timestamp - """ - self.retry_timestamp = now + self.retry_interval - self.retry_attempts += 1 - self.retry_interval = self.retry_interval * self._settings.PEER_CONNECTION_RETRY_INTERVAL_MULTIPLIER - if self.retry_interval > self._settings.PEER_CONNECTION_RETRY_MAX_RETRY_INTERVAL: - self.retry_interval = self._settings.PEER_CONNECTION_RETRY_MAX_RETRY_INTERVAL - - def reset_retry_timestamp(self) -> None: - """ Resets retry values. - """ - self.retry_interval = 5 - self.retry_timestamp = 0 - self.retry_attempts = 0 - self.flags.discard(PeerFlags.RETRIES_EXCEEDED) - - def can_retry(self, now: int) -> bool: - """ Return if can retry to connect to self in `now` timestamp - We validate if peer already has RETRIES_EXCEEDED flag, or has reached the maximum allowed attempts - If not, we check if the timestamp is already a valid one to retry - """ - if now < self.retry_timestamp: - return False - return True - - def get_certificate(self) -> x509.Certificate: - if not self.certificate: - assert self.private_key is not None - certificate = generate_certificate( - self.private_key, - self._settings.CA_FILEPATH, - self._settings.CA_KEY_FILEPATH - ) - self.certificate = certificate - return self.certificate - - def get_certificate_options(self) -> CertificateOptions: - """ Return certificate options With certificate generated and signed with peer private key - - The result is cached so subsequent calls are really cheap. - """ - if self._certificate_options is None: - self._certificate_options = self._get_certificate_options() - return self._certificate_options - - def _get_certificate_options(self) -> CertificateOptions: - """Implementation of get_certificate_options, this should be cached to avoid opening the same static file - multiple times""" - certificate = self.get_certificate() - openssl_certificate = X509.from_cryptography(certificate) - assert self.private_key is not None - openssl_pkey = PKey.from_cryptography_key(self.private_key) - - with open(self._settings.CA_FILEPATH, 'rb') as f: - ca = x509.load_pem_x509_certificate(data=f.read(), backend=default_backend()) - - openssl_ca = X509.from_cryptography(ca) - ca_cert = Certificate(openssl_ca) - trust_root = trustRootFromCertificates([ca_cert]) - - # We should not use a ContextFactory - # https://twistedmatrix.com/documents/19.7.0/api/twisted.protocols.tls.TLSMemoryBIOFactory.html - certificate_options = CertificateOptions( - privateKey=openssl_pkey, - certificate=openssl_certificate, - trustRoot=trust_root, - raiseMinimumTo=TLSVersion.TLSv1_3 - ) - return certificate_options - - async def validate_entrypoint(self, protocol: 'HathorProtocol') -> bool: + async def validate_entrypoint(self, protocol: HathorProtocol) -> bool: """ Validates if connection entrypoint is one of the peer entrypoints """ found_entrypoint = False @@ -406,7 +189,137 @@ async def validate_entrypoint(self, protocol: 'HathorProtocol') -> bool: return True - def validate_certificate(self, protocol: 'HathorProtocol') -> bool: + def increment_retry_attempt(self, now: int) -> None: + """ Updates timestamp for next retry. + + :param now: current timestamp + """ + self.retry_timestamp = now + self.retry_interval + self.retry_attempts += 1 + self.retry_interval = self.retry_interval * self._settings.PEER_CONNECTION_RETRY_INTERVAL_MULTIPLIER + if self.retry_interval > self._settings.PEER_CONNECTION_RETRY_MAX_RETRY_INTERVAL: + self.retry_interval = self._settings.PEER_CONNECTION_RETRY_MAX_RETRY_INTERVAL + + def reset_retry_timestamp(self) -> None: + """ Resets retry values. + """ + self.retry_interval = 5 + self.retry_timestamp = 0 + self.retry_attempts = 0 + self.flags.discard(PeerFlags.RETRIES_EXCEEDED) + + def can_retry(self, now: int) -> bool: + """ Return if can retry to connect to self in `now` timestamp + We validate if peer already has RETRIES_EXCEEDED flag, or has reached the maximum allowed attempts + If not, we check if the timestamp is already a valid one to retry + """ + if now < self.retry_timestamp: + return False + return True + + +@dataclass(slots=True) +class UnverifiedPeer: + """ Represents a peer with an unverified id and entrypoint list, which we can try to connect to. + """ + + id: PeerId + info: PeerInfo = field(default_factory=PeerInfo) + + def to_json(self) -> dict[str, Any]: + """ Return a JSON serialization of the object. + + This format is compatible with libp2p. + """ + return { + 'id': str(self.id), + 'entrypoints': self.info.entrypoints_as_str(), + } + + @classmethod + def create_from_json(cls, data: dict[str, Any]) -> Self: + """ Create a new UnverifiedPeer from JSON data. + + It is to create an UnverifiedPeer from a peer connection. + """ + return cls( + id=PeerId(data['id']), + info=PeerInfo(entrypoints=[_parse_entrypoint(e) for e in data.get('entrypoints', [])]), + ) + + def merge(self, other: UnverifiedPeer) -> None: + """ Merge two UnverifiedPeer objects, checking that they have the same + id, public_key, and private_key. The entrypoints are merged without + duplicating their entries. + """ + assert self.id == other.id + self.info._merge(other.info) + + +@dataclass(slots=True) +class PublicPeer: + """ Represents a peer that can verify signatures, and thus communicate to. + """ + + _peer: UnverifiedPeer + public_key: rsa.RSAPublicKey + + @property + def id(self) -> PeerId: + return self._peer.id + + @property + def info(self) -> PeerInfo: + return self._peer.info + + def to_unverified_peer(self) -> UnverifiedPeer: + """Convert to a simple UnverifiedPeer.""" + return self._peer + + def to_json(self) -> dict[str, Any]: + """ Return a JSON serialization of the object. + + This format is compatible with libp2p. + """ + public_der = self.public_key.public_bytes( + encoding=serialization.Encoding.DER, + format=serialization.PublicFormat.SubjectPublicKeyInfo, + ) + return { + **self._peer.to_json(), + 'pubKey': base64.b64encode(public_der).decode('utf-8'), + } + + @classmethod + def create_from_json(cls, data: dict[str, Any]) -> Self: + """ Create a new PublicPeer from JSON data. + + It is used to create a PublicPeer from that same peer. + """ + public_key = _parse_pubkey(data['pubKey']) + peer = UnverifiedPeer.create_from_json(data) + obj = cls( + _peer=peer, + public_key=public_key, + ) + obj.validate() + return obj + + def calculate_id(self) -> PeerId: + """ Calculate and return the id based on the public key. + """ + return _calculate_peer_id(self.public_key) + + def get_public_key(self) -> str: + """ Return the public key in DER encoding as an `str`. + """ + public_der = self.public_key.public_bytes( + encoding=serialization.Encoding.DER, + format=serialization.PublicFormat.SubjectPublicKeyInfo, + ) + return base64.b64encode(public_der).decode('utf-8') + + def validate_certificate(self, protocol: HathorProtocol) -> bool: """ Validates if the public key of the connection certificate is the public key of the peer """ assert protocol.transport is not None @@ -426,7 +339,6 @@ def validate_certificate(self, protocol: 'HathorProtocol') -> bool: encoding=serialization.Encoding.PEM, format=serialization.PublicFormat.SubjectPublicKeyInfo ) - assert self.public_key is not None peer_pubkey_bytes = self.public_key.public_bytes( encoding=serialization.Encoding.PEM, format=serialization.PublicFormat.SubjectPublicKeyInfo @@ -436,19 +348,223 @@ def validate_certificate(self, protocol: 'HathorProtocol') -> bool: return True + def verify_signature(self, signature: bytes, data: bytes) -> bool: + """ Verify a signature of a data. Both must be of type `bytes`. + """ + try: + self.public_key.verify( + signature, + data, + padding.PSS(mgf=padding.MGF1(hashes.SHA256()), salt_length=padding.PSS.MAX_LENGTH), + hashes.SHA256(), + ) + except InvalidSignature: + return False + else: + return True + + def validate(self) -> None: + """ Return `True` if the following conditions are valid: + (i) public key and private key matches; + (ii) the id matches with the public key. + + TODO(epnichols): Update docs. Only raises exceptions; doesn't return anything. + """ + if self.id != self.calculate_id(): + raise InvalidPeerIdException('id does not match public key') + + def merge(self, other: PublicPeer) -> None: + """ Merge two PublicPeer objects, checking that they have the same + id, public_key, and private_key. The entrypoints are merged without + duplicating their entries. + """ + assert self.id == other.id + assert self.get_public_key() == other.get_public_key() + self._peer.merge(other._peer) + self.validate() + + +# XXX: no slots because we have cached properties +@dataclass +class PrivatePeer: + """ Represents a peer that can be used to sign messages, and thus communicate from. + """ + + _public_peer: PublicPeer + private_key: rsa.RSAPrivateKeyWithSerialization + _source_file: str | None = None + + @property + def id(self) -> PeerId: + return self._public_peer._peer.id + + @property + def info(self) -> PeerInfo: + return self._public_peer._peer.info + + @property + def public_key(self) -> rsa.RSAPublicKey: + return self._public_peer.public_key + + def to_unverified_peer(self) -> UnverifiedPeer: + """Convert to a simple UnverifiedPeer.""" + return self._public_peer._peer + + def to_public_peer(self) -> PublicPeer: + """Convert to a simple PublicPeer.""" + return self._public_peer + + def to_json(self) -> dict[str, Any]: + """ Return a JSON serialization of the object without the private key. + + This format is compatible with libp2p. + """ + return self._public_peer.to_json() + + def to_json_private(self) -> dict[str, Any]: + """ Return a JSON serialization of the object with the private key. + + This format is compatible with libp2p. + """ + private_der = self.private_key.private_bytes( + encoding=serialization.Encoding.DER, + format=serialization.PrivateFormat.PKCS8, + # TODO encryption_algorithm=serialization.BestAvailableEncryption(b'mypassword') + encryption_algorithm=serialization.NoEncryption(), + ) + return { + **self._public_peer.to_json(), + 'privKey': base64.b64encode(private_der).decode('utf-8'), + } + + def get_public_key(self) -> str: + """ Return the public key in DER encoding as an `str`. + """ + return self._public_peer.get_public_key() + + @classmethod + def create_from_json(cls, data: dict[str, Any]) -> Self: + private_key = _parse_privkey(data['privKey']) + public_peer = PublicPeer.create_from_json(data) + obj = cls( + _public_peer=public_peer, + private_key=private_key + ) + obj.validate() + return obj + + def validate(self) -> None: + self._public_peer.validate() + public_der1 = self._public_peer.public_key.public_bytes( + encoding=serialization.Encoding.DER, + format=serialization.PublicFormat.SubjectPublicKeyInfo, + ) + public_der2 = self.private_key.public_key().public_bytes( + encoding=serialization.Encoding.DER, + format=serialization.PublicFormat.SubjectPublicKeyInfo, + ) + if public_der1 != public_der2: + raise InvalidPeerIdException('private/public pair does not match') + + @classmethod + def auto_generated(cls, key_size: int = 2048) -> Self: + """ Generate a random pair of private key and public key. + It also calculates the id of this peer, based on its public key. + """ + # https://security.stackexchange.com/questions/5096/rsa-vs-dsa-for-ssh-authentication-keys + private_key = rsa.generate_private_key( + public_exponent=65537, + key_size=key_size, + backend=default_backend(), + ) + public_key = private_key.public_key() + return cls( + _public_peer=PublicPeer( + _peer=UnverifiedPeer(id=_calculate_peer_id(public_key)), + public_key=public_key, + ), + private_key=private_key, + ) + + def sign(self, data: bytes) -> bytes: + """ Sign any data (of type `bytes`). + """ + return self.private_key.sign( + data, + padding.PSS(mgf=padding.MGF1(hashes.SHA256()), salt_length=padding.PSS.MAX_LENGTH), + hashes.SHA256(), + ) + + @cached_property + def certificate(self) -> x509.Certificate: + """ Return certificate generated and signed with peer private key. + + The result is cached so subsequent calls are really cheap. + """ + _settings = self._public_peer._peer.info._settings + return generate_certificate( + self.private_key, + _settings.CA_FILEPATH, + _settings.CA_KEY_FILEPATH, + ) + + @cached_property + def certificate_options(self) -> CertificateOptions: + """ Return certificate options with certificate generated and signed with peer private key. + + The result is cached so subsequent calls are really cheap. + """ + _settings = self._public_peer._peer.info._settings + openssl_certificate = X509.from_cryptography(self.certificate) + openssl_pkey = PKey.from_cryptography_key(self.private_key) + + with open(_settings.CA_FILEPATH, 'rb') as f: + ca = x509.load_pem_x509_certificate(data=f.read(), backend=default_backend()) + + openssl_ca = X509.from_cryptography(ca) + ca_cert = Certificate(openssl_ca) + trust_root = trustRootFromCertificates([ca_cert]) + + # We should not use a ContextFactory + # https://twistedmatrix.com/documents/19.7.0/api/twisted.protocols.tls.TLSMemoryBIOFactory.html + certificate_options = CertificateOptions( + privateKey=openssl_pkey, + certificate=openssl_certificate, + trustRoot=trust_root, + raiseMinimumTo=TLSVersion.TLSv1_3 + ) + return certificate_options + + @classmethod + def create_from_json_path(cls, path: str) -> Self: + """Create a new PrivatePeer from a JSON file.""" + data = json.load(open(path, 'r')) + peer = cls.create_from_json(data) + peer._source_file = path + return peer + def reload_entrypoints_from_source_file(self) -> None: - """Update this Peer's entrypoints from the json file.""" - if not self.source_file: - raise Exception('Trying to reload entrypoints but no peer config file was provided.') + """Update this PrivatePeer's entrypoints from the json file.""" + if not self._source_file: + raise ValueError('Trying to reload entrypoints but no peer config file was provided.') - new_peer = Peer.create_from_json_path(self.source_file) + new_peer = PrivatePeer.create_from_json_path(self._source_file) if new_peer.id != self.id: - self._log.error( + logger.error( 'Ignoring peer id file update because the peer_id does not match.', current_peer_id=self.id, new_peer_id=new_peer.id, ) return - self.entrypoints = new_peer.entrypoints + self._public_peer._peer.info.entrypoints = new_peer._public_peer._peer.info.entrypoints + + def save_to_file(self, path: str) -> None: + """ Save the object to a JSON file. + """ + import json + data = self.to_json_private() + fp = open(path, 'w') + json.dump(data, fp, indent=4) + fp.close() diff --git a/hathor/p2p/peer_storage.py b/hathor/p2p/peer_storage.py index b6be116b7..b6a433077 100644 --- a/hathor/p2p/peer_storage.py +++ b/hathor/p2p/peer_storage.py @@ -12,16 +12,31 @@ # See the License for the specific language governing permissions and # limitations under the License. -from hathor.p2p.peer import Peer +from typing import Protocol, TypeVar + +from typing_extensions import Self + +from hathor.p2p.peer import PublicPeer, UnverifiedPeer from hathor.p2p.peer_id import PeerId -class PeerStorage(dict[PeerId, Peer]): - """ PeerStorage is used to store all known peers in memory. - It is a dict of peer objects, and peers can be retrieved by their `peer.id`. +class GenericPeer(Protocol): + @property + def id(self) -> PeerId: + pass + + def merge(self, other: Self) -> None: + pass + + +PeerType = TypeVar('PeerType', bound=GenericPeer) + + +class _BasePeerStorage(dict[PeerId, PeerType]): + """ Base class for VerifiedPeerStorage and UnverifiedPeerStorage, do not use directly. """ - def add(self, peer: Peer) -> None: + def add(self, peer: PeerType) -> None: """ Add a new peer to the storage. Raises a `ValueError` if the peer has already been added. @@ -31,9 +46,8 @@ def add(self, peer: Peer) -> None: raise ValueError('Peer has already been added') self[peer.id] = peer - def add_or_merge(self, peer: Peer) -> Peer: - """ Add a peer to the storage if it has not been added yet. - Otherwise, merge the current peer with the given one. + def add_or_merge(self, peer: PeerType) -> PeerType: + """ Add a peer to the storage if it has not been added yet. Otherwise, merge it with the existing peer. """ assert peer.id is not None if peer.id not in self: @@ -44,9 +58,32 @@ def add_or_merge(self, peer: Peer) -> Peer: current.merge(peer) return current - def remove(self, peer: Peer) -> None: + def add_or_replace(self, peer: PeerType) -> PeerType: + """ Add a peer to the storage if it has not been added yet. Otherwise, replace the existing peer. + """ + assert peer.id is not None + if peer.id in self: + del self[peer.id] + self.add(peer) + return peer + + def remove(self, peer: GenericPeer) -> None: """ Remove a peer from the storage """ assert peer.id is not None if peer.id in self: del self[peer.id] + + +class VerifiedPeerStorage(_BasePeerStorage[PublicPeer]): + """ VerifiedPeerStorage is used to store all peers that we have connected to and verified. + + It is a dict of PublicPeer objects, and peers can be retrieved by their `peer.id`. + """ + + +class UnverifiedPeerStorage(_BasePeerStorage[UnverifiedPeer]): + """ UnverifiedPeerStorage is used to store all received peers, we haven't verified their ids/entrypoints yet. + + It is a dict of Peer objects, and peers can be retrieved by their `peer.id`. + """ diff --git a/hathor/p2p/protocol.py b/hathor/p2p/protocol.py index 5a77bdacb..fd3306440 100644 --- a/hathor/p2p/protocol.py +++ b/hathor/p2p/protocol.py @@ -26,7 +26,7 @@ from hathor.conf.settings import HathorSettings from hathor.p2p.entrypoint import Entrypoint from hathor.p2p.messages import ProtocolMessages -from hathor.p2p.peer import Peer +from hathor.p2p.peer import PrivatePeer, PublicPeer from hathor.p2p.peer_id import PeerId from hathor.p2p.rate_limiter import RateLimiter from hathor.p2p.states import BaseState, HelloState, PeerIdState, ReadyState @@ -74,12 +74,12 @@ class WarningFlags(str, Enum): NO_ENTRYPOINTS = 'no_entrypoints' network: str - my_peer: Peer + my_peer: PrivatePeer connections: 'ConnectionsManager' node: 'HathorManager' app_version: str last_message: float - peer: Optional[Peer] + _peer: Optional[PublicPeer] transport: Optional[ITransport] state: Optional[BaseState] connection_time: float @@ -92,10 +92,15 @@ class WarningFlags(str, Enum): sync_version: Optional[SyncVersion] # version chosen to be used on this connection capabilities: set[str] # capabilities received from the peer in HelloState + @property + def peer(self) -> PublicPeer: + assert self._peer is not None, 'self.peer must be initialized' + return self._peer + def __init__( self, network: str, - my_peer: Peer, + my_peer: PrivatePeer, p2p_manager: 'ConnectionsManager', *, settings: HathorSettings, @@ -126,7 +131,7 @@ def __init__( self.diff_timestamp = None # The peer on the other side of the connection. - self.peer = None + self._peer = None # The last time a message has been received from this peer. self.last_message = 0 @@ -195,13 +200,13 @@ def get_short_remote(self) -> str: def get_peer_id(self) -> Optional[PeerId]: """Get peer id for logging.""" - if self.peer and self.peer.id: + if self._peer is not None: return self.peer.id return None def get_short_peer_id(self) -> Optional[str]: """Get short peer id for logging.""" - if self.peer and self.peer.id: + if self._peer and self._peer.id: return str(self.peer.id)[:7] return None @@ -297,8 +302,8 @@ def recv_message(self, cmd: ProtocolMessages, payload: str) -> Optional[Deferred now = self.reactor.seconds() self.last_message = now - if self.peer is not None: - self.peer.last_seen = now + if self._peer is not None: + self.peer.info.last_seen = now self.reset_idle_timeout() if not self.ratelimit.add_hit(self.RateLimitKeys.GLOBAL): diff --git a/hathor/p2p/resources/add_peers.py b/hathor/p2p/resources/add_peers.py index 75a39b901..aeb92208c 100644 --- a/hathor/p2p/resources/add_peers.py +++ b/hathor/p2p/resources/add_peers.py @@ -67,7 +67,7 @@ def render_POST(self, request: Request) -> bytes: 'message': 'Malformed entrypoint found.' }) - known_peers = self.manager.connections.peer_storage.values() + known_peers = self.manager.connections.verified_peer_storage.values() def already_connected(entrypoint: Entrypoint) -> bool: # ignore peers that we're already trying to connect diff --git a/hathor/p2p/resources/status.py b/hathor/p2p/resources/status.py index ac26f198c..68edb9f0e 100644 --- a/hathor/p2p/resources/status.py +++ b/hathor/p2p/resources/status.py @@ -80,12 +80,12 @@ def render_GET(self, request): }) known_peers = [] - for peer in self.manager.connections.peer_storage.values(): + for peer in self.manager.connections.verified_peer_storage.values(): known_peers.append({ 'id': str(peer.id), - 'entrypoints': peer.entrypoints_as_str(), - 'last_seen': now - peer.last_seen, - 'flags': [flag.value for flag in peer.flags], + 'entrypoints': peer.info.entrypoints_as_str(), + 'last_seen': now - peer.info.last_seen, + 'flags': [flag.value for flag in peer.info.flags], }) app = 'Hathor v{}'.format(hathor.__version__) @@ -106,7 +106,7 @@ def render_GET(self, request): 'state': self.manager.state.value, 'network': self.manager.network, 'uptime': now - self.manager.start_time, - 'entrypoints': self.manager.connections.my_peer.entrypoints_as_str(), + 'entrypoints': self.manager.connections.my_peer.info.entrypoints_as_str(), }, 'peers_whitelist': [str(peer_id) for peer_id in self.manager.peers_whitelist], 'known_peers': known_peers, diff --git a/hathor/p2p/states/peer_id.py b/hathor/p2p/states/peer_id.py index 7534487b3..2aca0a9db 100644 --- a/hathor/p2p/states/peer_id.py +++ b/hathor/p2p/states/peer_id.py @@ -18,7 +18,7 @@ from hathor.conf.settings import HathorSettings from hathor.p2p.messages import ProtocolMessages -from hathor.p2p.peer import Peer +from hathor.p2p.peer import PublicPeer from hathor.p2p.peer_id import PeerId from hathor.p2p.states.base import BaseState from hathor.util import json_dumps, json_loads @@ -71,7 +71,7 @@ def send_peer_id(self) -> None: hello = { 'id': str(my_peer.id), 'pubKey': my_peer.get_public_key(), - 'entrypoints': my_peer.entrypoints_as_str(), + 'entrypoints': my_peer.info.entrypoints_as_str(), } self.send_message(ProtocolMessages.PEER_ID, json_dumps(hello)) @@ -88,7 +88,7 @@ async def handle_peer_id(self, payload: str) -> None: data = json_loads(payload) - peer = Peer.create_from_json(data) + peer = PublicPeer.create_from_json(data) peer.validate() assert peer.id is not None @@ -114,7 +114,7 @@ async def handle_peer_id(self, payload: str) -> None: protocol.send_error_and_close_connection('We are already connected.') return - entrypoint_valid = await peer.validate_entrypoint(protocol) + entrypoint_valid = await peer.info.validate_entrypoint(protocol) if not entrypoint_valid: protocol.send_error_and_close_connection('Connection string is not in the entrypoints.') return @@ -126,7 +126,7 @@ async def handle_peer_id(self, payload: str) -> None: return # If it gets here, the peer is validated, and we are ready to start communicating. - protocol.peer = peer + protocol._peer = peer context = NetfilterContext( protocol=protocol, diff --git a/hathor/p2p/states/ready.py b/hathor/p2p/states/ready.py index 1d011cf00..1bed1c745 100644 --- a/hathor/p2p/states/ready.py +++ b/hathor/p2p/states/ready.py @@ -21,7 +21,7 @@ from hathor.conf.settings import HathorSettings from hathor.indexes.height_index import HeightInfo from hathor.p2p.messages import ProtocolMessages -from hathor.p2p.peer import Peer +from hathor.p2p.peer import PublicPeer, UnverifiedPeer from hathor.p2p.states.base import BaseState from hathor.p2p.sync_agent import SyncAgent from hathor.p2p.utils import to_height_info, to_serializable_best_blockchain @@ -155,19 +155,16 @@ def handle_get_peers(self, payload: str) -> None: """ Executed when a GET-PEERS command is received. It just responds with a list of all known peers. """ - for peer in self.protocol.connections.peer_storage.values(): + for peer in self.protocol.connections.verified_peer_storage.values(): self.send_peers([peer]) - def send_peers(self, peer_list: Iterable['Peer']) -> None: + def send_peers(self, peer_list: Iterable[PublicPeer]) -> None: """ Send a PEERS command with a list of peers. """ data = [] for peer in peer_list: - if peer.entrypoints: - data.append({ - 'id': str(peer.id), - 'entrypoints': peer.entrypoints_as_str(), - }) + if peer.info.entrypoints: + data.append(peer.to_unverified_peer().to_json()) self.send_message(ProtocolMessages.PEERS, json_dumps(data)) self.log.debug('send peers', peers=data) @@ -177,8 +174,7 @@ def handle_peers(self, payload: str) -> None: """ received_peers = json_loads(payload) for data in received_peers: - peer = Peer.create_from_json(data) - peer.validate() + peer = UnverifiedPeer.create_from_json(data) if self.protocol.connections: self.protocol.connections.on_receive_peer(peer, origin=self) self.log.debug('received peers', payload=payload) diff --git a/hathor/p2p/sync_v2/agent.py b/hathor/p2p/sync_v2/agent.py index c3de1f3e2..7046f755c 100644 --- a/hathor/p2p/sync_v2/agent.py +++ b/hathor/p2p/sync_v2/agent.py @@ -25,6 +25,7 @@ from twisted.internet.task import LoopingCall, deferLater from hathor.conf.settings import HathorSettings +from hathor.exception import InvalidNewTransaction from hathor.p2p.messages import ProtocolMessages from hathor.p2p.sync_agent import SyncAgent from hathor.p2p.sync_v2.blockchain_streaming_client import BlockchainStreamingClient, StreamingError @@ -43,6 +44,7 @@ from hathor.transaction.vertex_parser import VertexParser from hathor.types import VertexId from hathor.util import collect_n +from hathor.vertex_handler import VertexHandler if TYPE_CHECKING: from hathor.p2p.protocol import HathorProtocol @@ -91,6 +93,7 @@ def __init__( reactor: Reactor, *, vertex_parser: VertexParser, + vertex_handler: VertexHandler, ) -> None: """ :param protocol: Protocol of the connection. @@ -101,8 +104,8 @@ def __init__( """ self._settings = settings self.vertex_parser = vertex_parser + self.vertex_handler = vertex_handler self.protocol = protocol - self.manager = protocol.node self.tx_storage: 'TransactionStorage' = protocol.node.tx_storage self.state = PeerState.UNKNOWN @@ -615,13 +618,16 @@ def find_best_common_block(self, def on_block_complete(self, blk: Block, vertex_list: list[BaseTransaction]) -> Generator[Any, Any, None]: """This method is called when a block and its transactions are downloaded.""" # Note: Any vertex and block could have already been added by another concurrent syncing peer. - for tx in vertex_list: - if not self.tx_storage.transaction_exists(tx.hash): - self.manager.on_new_tx(tx, propagate_to_peers=False, fails_silently=False) - yield deferLater(self.reactor, 0, lambda: None) + try: + for tx in vertex_list: + if not self.tx_storage.transaction_exists(tx.hash): + self.vertex_handler.on_new_vertex(tx, propagate_to_peers=False, fails_silently=False) + yield deferLater(self.reactor, 0, lambda: None) - if not self.tx_storage.transaction_exists(blk.hash): - self.manager.on_new_tx(blk, propagate_to_peers=False, fails_silently=False) + if not self.tx_storage.transaction_exists(blk.hash): + self.vertex_handler.on_new_vertex(blk, propagate_to_peers=False, fails_silently=False) + except InvalidNewTransaction: + self.protocol.send_error_and_close_connection('invalid vertex received') def get_peer_block_hashes(self, heights: list[int]) -> Deferred[list[_HeightInfo]]: """ Returns the peer's block hashes in the given heights. @@ -1160,14 +1166,17 @@ def handle_data(self, payload: str) -> None: if self.partial_vertex_exists(tx.hash): # transaction already added to the storage, ignore it # XXX: maybe we could add a hash blacklist and punish peers propagating known bad txs - self.manager.tx_storage.compare_bytes_with_local_tx(tx) + self.tx_storage.compare_bytes_with_local_tx(tx) return else: # If we have not requested the data, it is a new transaction being propagated # in the network, thus, we propagate it as well. if tx.can_validate_full(): self.log.debug('tx received in real time from peer', tx=tx.hash_hex, peer=self.protocol.get_peer_id()) - self.manager.on_new_tx(tx, propagate_to_peers=True) + try: + self.vertex_handler.on_new_vertex(tx, propagate_to_peers=True, fails_silently=False) + except InvalidNewTransaction: + self.protocol.send_error_and_close_connection('invalid vertex received') else: self.log.debug('skipping tx received in real time from peer', tx=tx.hash_hex, peer=self.protocol.get_peer_id()) diff --git a/hathor/p2p/sync_v2/blockchain_streaming_client.py b/hathor/p2p/sync_v2/blockchain_streaming_client.py index a08b305de..f00395d79 100644 --- a/hathor/p2p/sync_v2/blockchain_streaming_client.py +++ b/hathor/p2p/sync_v2/blockchain_streaming_client.py @@ -40,7 +40,7 @@ def __init__(self, sync_agent: 'NodeBlockSync', start_block: '_HeightInfo', end_ self.sync_agent = sync_agent self.protocol = self.sync_agent.protocol self.tx_storage = self.sync_agent.tx_storage - self.manager = self.sync_agent.manager + self.vertex_handler = self.sync_agent.vertex_handler self.log = logger.new(peer=self.protocol.get_short_peer_id()) @@ -132,7 +132,7 @@ def handle_blocks(self, blk: Block) -> None: if blk.can_validate_full(): try: - self.manager.on_new_tx(blk, propagate_to_peers=False, fails_silently=False) + self.vertex_handler.on_new_vertex(blk, propagate_to_peers=False, fails_silently=False) except HathorError: self.fails(InvalidVertexError(blk.hash.hex())) return diff --git a/hathor/p2p/sync_v2/factory.py b/hathor/p2p/sync_v2/factory.py index 65d42d622..b9be356b3 100644 --- a/hathor/p2p/sync_v2/factory.py +++ b/hathor/p2p/sync_v2/factory.py @@ -21,16 +21,31 @@ from hathor.p2p.sync_v2.agent import NodeBlockSync from hathor.reactor import ReactorProtocol as Reactor from hathor.transaction.vertex_parser import VertexParser +from hathor.vertex_handler import VertexHandler if TYPE_CHECKING: from hathor.p2p.protocol import HathorProtocol class SyncV2Factory(SyncAgentFactory): - def __init__(self, settings: HathorSettings, connections: ConnectionsManager, *, vertex_parser: VertexParser): + def __init__( + self, + settings: HathorSettings, + connections: ConnectionsManager, + *, + vertex_parser: VertexParser, + vertex_handler: VertexHandler, + ): self._settings = settings self.connections = connections self.vertex_parser = vertex_parser + self.vertex_handler = vertex_handler def create_sync_agent(self, protocol: 'HathorProtocol', reactor: Reactor) -> SyncAgent: - return NodeBlockSync(self._settings, protocol, reactor=reactor, vertex_parser=self.vertex_parser) + return NodeBlockSync( + self._settings, + protocol, + reactor=reactor, + vertex_parser=self.vertex_parser, + vertex_handler=self.vertex_handler, + ) diff --git a/hathor/p2p/sync_v2/mempool.py b/hathor/p2p/sync_v2/mempool.py index d4eb7bfe6..806c16849 100644 --- a/hathor/p2p/sync_v2/mempool.py +++ b/hathor/p2p/sync_v2/mempool.py @@ -18,6 +18,7 @@ from structlog import get_logger from twisted.internet.defer import Deferred, inlineCallbacks +from hathor.exception import InvalidNewTransaction from hathor.transaction import BaseTransaction if TYPE_CHECKING: @@ -35,8 +36,8 @@ def __init__(self, sync_agent: 'NodeBlockSync'): # Shortcuts. self.sync_agent = sync_agent - self.manager = self.sync_agent.manager - self.tx_storage = self.manager.tx_storage + self.vertex_handler = self.sync_agent.vertex_handler + self.tx_storage = self.sync_agent.tx_storage self.reactor = self.sync_agent.reactor self._deferred: Optional[Deferred[bool]] = None @@ -74,6 +75,8 @@ def _run(self) -> Generator[Deferred, Any, None]: is_synced = False try: is_synced = yield self._unsafe_run() + except InvalidNewTransaction: + return finally: # sync_agent.run_sync will start it again when needed self._is_running = False @@ -134,4 +137,10 @@ def _next_missing_dep(self, tx: BaseTransaction) -> Optional[bytes]: def _add_tx(self, tx: BaseTransaction) -> None: """Add tx to the DAG.""" self.missing_tips.discard(tx.hash) - self.manager.on_new_tx(tx) + if self.tx_storage.transaction_exists(tx.hash): + return + try: + self.vertex_handler.on_new_vertex(tx, fails_silently=False) + except InvalidNewTransaction: + self.sync_agent.protocol.send_error_and_close_connection('invalid vertex received') + raise diff --git a/hathor/p2p/sync_v2/transaction_streaming_client.py b/hathor/p2p/sync_v2/transaction_streaming_client.py index d1b068222..e784a41cc 100644 --- a/hathor/p2p/sync_v2/transaction_streaming_client.py +++ b/hathor/p2p/sync_v2/transaction_streaming_client.py @@ -45,8 +45,8 @@ def __init__(self, self.sync_agent = sync_agent self.protocol = self.sync_agent.protocol self.tx_storage = self.sync_agent.tx_storage - self.manager = self.sync_agent.manager - self.reactor = self.manager.reactor + self.verification_service = self.protocol.node.verification_service + self.reactor = sync_agent.reactor self.log = logger.new(peer=self.protocol.get_short_peer_id()) @@ -153,7 +153,7 @@ def _process_transaction(self, tx: BaseTransaction) -> Generator[Any, Any, None] # Run basic verification. if not tx.is_genesis: try: - self.manager.verification_service.verify_basic(tx) + self.verification_service.verify_basic(tx) except TxValidationError as e: self.fails(InvalidVertexError(repr(e))) return diff --git a/hathor/simulator/fake_connection.py b/hathor/simulator/fake_connection.py index 4473813e8..c993302db 100644 --- a/hathor/simulator/fake_connection.py +++ b/hathor/simulator/fake_connection.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + from collections import deque from typing import TYPE_CHECKING, Optional @@ -20,21 +22,27 @@ from twisted.internet.address import HostnameAddress from twisted.internet.testing import StringTransport +from hathor.p2p.peer import PrivatePeer + if TYPE_CHECKING: from hathor.manager import HathorManager - from hathor.p2p.peer import Peer + from hathor.p2p.peer import PublicPeer logger = get_logger() class HathorStringTransport(StringTransport): - def __init__(self, peer: 'Peer'): + def __init__(self, peer: PrivatePeer): super().__init__() - self.peer = peer + self._peer = peer + + @property + def peer(self) -> PublicPeer: + return self._peer.to_public_peer() def getPeerCertificate(self) -> X509: - certificate = self.peer.get_certificate() - return X509.from_cryptography(certificate) + assert isinstance(self._peer, PrivatePeer) + return X509.from_cryptography(self._peer.certificate) class FakeConnection: diff --git a/hathor/simulator/simulator.py b/hathor/simulator/simulator.py index c1d0754f4..d5c6bbeed 100644 --- a/hathor/simulator/simulator.py +++ b/hathor/simulator/simulator.py @@ -26,7 +26,7 @@ from hathor.daa import DifficultyAdjustmentAlgorithm from hathor.feature_activation.feature_service import FeatureService from hathor.manager import HathorManager -from hathor.p2p.peer import Peer +from hathor.p2p.peer import PrivatePeer from hathor.simulator.clock import HeapClock, MemoryReactorHeapClock from hathor.simulator.miner.geometric_miner import GeometricMiner from hathor.simulator.patches import SimulatorCpuMiningService, SimulatorVertexVerifier @@ -81,7 +81,7 @@ def get_default_builder(self) -> Builder: """ return Builder() \ .set_network(self._network) \ - .set_peer(Peer()) \ + .set_peer(PrivatePeer.auto_generated()) \ .set_soft_voided_tx_ids(set()) \ .enable_full_verification() \ .enable_sync_v1() \ diff --git a/tests/event/event_simulation_tester.py b/tests/event/event_simulation_tester.py index 1383a4fd8..c7d3bb6bc 100644 --- a/tests/event/event_simulation_tester.py +++ b/tests/event/event_simulation_tester.py @@ -23,7 +23,7 @@ from hathor.event.websocket import EventWebsocketProtocol from hathor.event.websocket.request import Request from hathor.event.websocket.response import EventResponse, InvalidRequestResponse -from hathor.p2p.peer import Peer +from hathor.p2p.peer import PrivatePeer from hathor.transaction.util import unpack, unpack_len from hathor.util import json_loadb from tests.simulation.base import SimulatorTestCase @@ -34,7 +34,7 @@ class BaseEventSimulationTester(SimulatorTestCase): builder: Builder def _create_artifacts(self) -> None: - peer = Peer() + peer = PrivatePeer.auto_generated() builder = self.builder.set_peer(peer) \ .disable_full_verification() \ .enable_event_queue() diff --git a/tests/others/test_metrics.py b/tests/others/test_metrics.py index 99c410c50..6996f1ef5 100644 --- a/tests/others/test_metrics.py +++ b/tests/others/test_metrics.py @@ -5,7 +5,7 @@ from hathor.p2p.entrypoint import Entrypoint from hathor.p2p.manager import PeerConnectionsMetrics -from hathor.p2p.peer import Peer +from hathor.p2p.peer import PrivatePeer from hathor.p2p.protocol import HathorProtocol from hathor.pubsub import HathorEvents from hathor.simulator.utils import add_new_blocks @@ -61,7 +61,11 @@ def test_connections_manager_integration(self): wallet.unlock(b'teste') manager = self.create_peer('testnet', tx_storage=tx_storage, wallet=wallet) - manager.connections.peer_storage.update({"1": Peer(), "2": Peer(), "3": Peer()}) + manager.connections.verified_peer_storage.update({ + "1": PrivatePeer.auto_generated(), + "2": PrivatePeer.auto_generated(), + "3": PrivatePeer.auto_generated(), + }) manager.connections.connected_peers.update({"1": Mock(), "2": Mock()}) manager.connections.handshaking_peers.update({Mock()}) @@ -223,7 +227,7 @@ def build_hathor_protocol(): inbound=False, settings=self._settings ) - protocol.peer = Peer() + protocol._peer = PrivatePeer.auto_generated().to_public_peer() return protocol diff --git a/tests/p2p/netfilter/test_match.py b/tests/p2p/netfilter/test_match.py index b8debf58f..ad3929d85 100644 --- a/tests/p2p/netfilter/test_match.py +++ b/tests/p2p/netfilter/test_match.py @@ -11,7 +11,7 @@ NetfilterMatchOr, NetfilterMatchPeerId, ) -from hathor.p2p.peer import Peer +from hathor.p2p.peer import PrivatePeer from hathor.simulator import FakeConnection from tests import unittest @@ -202,8 +202,8 @@ class BaseNetfilterMatchTest(unittest.TestCase): def test_match_peer_id(self) -> None: network = 'testnet' - peer1 = Peer() - peer2 = Peer() + peer1 = PrivatePeer.auto_generated() + peer2 = PrivatePeer.auto_generated() manager1 = self.create_peer(network, peer=peer1) manager2 = self.create_peer(network, peer=peer2) diff --git a/tests/p2p/test_bootstrap.py b/tests/p2p/test_bootstrap.py index 721a1a1e2..344194549 100644 --- a/tests/p2p/test_bootstrap.py +++ b/tests/p2p/test_bootstrap.py @@ -6,7 +6,7 @@ from hathor.p2p.entrypoint import Entrypoint, Protocol from hathor.p2p.manager import ConnectionsManager -from hathor.p2p.peer import Peer +from hathor.p2p.peer import PrivatePeer from hathor.p2p.peer_discovery import DNSPeerDiscovery, PeerDiscovery from hathor.p2p.peer_discovery.dns import LookupResult from hathor.pubsub import PubSubManager @@ -48,7 +48,8 @@ def do_lookup_text(self, host: str) -> Deferred[LookupResult]: class BootstrapTestCase(unittest.TestCase): def test_mock_discovery(self) -> None: pubsub = PubSubManager(self.clock) - connections = ConnectionsManager(self._settings, self.clock, 'testnet', Peer(), pubsub, True, self.rng, True) + peer = PrivatePeer.auto_generated() + connections = ConnectionsManager(self._settings, self.clock, 'testnet', peer, pubsub, True, self.rng, True) host_ports1 = [ ('foobar', 1234), ('127.0.0.99', 9999), @@ -71,7 +72,8 @@ def test_mock_discovery(self) -> None: def test_dns_discovery(self) -> None: pubsub = PubSubManager(self.clock) - connections = ConnectionsManager(self._settings, self.clock, 'testnet', Peer(), pubsub, True, self.rng, True) + peer = PrivatePeer.auto_generated() + connections = ConnectionsManager(self._settings, self.clock, 'testnet', peer, pubsub, True, self.rng, True) bootstrap_a = [ '127.0.0.99', '127.0.0.88', diff --git a/tests/p2p/test_peer_id.py b/tests/p2p/test_peer_id.py index f870f824a..1604e29c9 100644 --- a/tests/p2p/test_peer_id.py +++ b/tests/p2p/test_peer_id.py @@ -7,51 +7,49 @@ from twisted.internet.interfaces import ITransport from hathor.p2p.entrypoint import Entrypoint -from hathor.p2p.peer import InvalidPeerIdException, Peer +from hathor.p2p.peer import InvalidPeerIdException, PrivatePeer, PublicPeer, UnverifiedPeer from hathor.p2p.peer_id import PeerId -from hathor.p2p.peer_storage import PeerStorage -from hathor.util import not_none +from hathor.p2p.peer_storage import VerifiedPeerStorage from tests import unittest from tests.unittest import TestBuilder class PeerIdTest(unittest.TestCase): def test_invalid_id(self) -> None: - p1 = Peer() - p1.id = PeerId(str(not_none(p1.id))[::-1]) + p1 = PrivatePeer.auto_generated() + p1._public_peer._peer.id = PeerId(str(p1.id)[::-1]) self.assertRaises(InvalidPeerIdException, p1.validate) def test_invalid_public_key(self) -> None: - p1 = Peer() - p2 = Peer() - p1.public_key = p2.public_key + p1 = PrivatePeer.auto_generated() + p2 = PrivatePeer.auto_generated() + p1._public_peer.public_key = p2.public_key self.assertRaises(InvalidPeerIdException, p1.validate) def test_invalid_private_key(self) -> None: - p1 = Peer() - p2 = Peer() + p1 = PrivatePeer.auto_generated() + p2 = PrivatePeer.auto_generated() p1.private_key = p2.private_key self.assertRaises(InvalidPeerIdException, p1.validate) def test_no_private_key(self) -> None: - p1 = Peer() - p1.private_key = None + p1 = PrivatePeer.auto_generated().to_public_peer() p1.validate() def test_create_from_json(self) -> None: - p1 = Peer() - data1 = p1.to_json(include_private_key=True) - p2 = Peer.create_from_json(data1) - data2 = p2.to_json(include_private_key=True) + p1 = PrivatePeer.auto_generated() + data1 = p1.to_json_private() + p2 = PrivatePeer.create_from_json(data1) + data2 = p2.to_json_private() self.assertEqual(data1, data2) p2.validate() def test_create_from_json_without_private_key(self) -> None: - p1 = Peer() + p1 = PrivatePeer.auto_generated() data1 = p1.to_json() # Just to test a part of the code del data1['entrypoints'] - p2 = Peer.create_from_json(data1) + p2 = PublicPeer.create_from_json(data1) data2 = p2.to_json() self.assertEqual(data2['entrypoints'], []) data1['entrypoints'] = [] @@ -60,53 +58,46 @@ def test_create_from_json_without_private_key(self) -> None: def test_sign_verify(self) -> None: data = b'abacate' - p1 = Peer() + p1 = PrivatePeer.auto_generated() signature = p1.sign(data) - self.assertTrue(p1.verify_signature(signature, data)) + self.assertTrue(p1.to_public_peer().verify_signature(signature, data)) def test_sign_verify_fail(self) -> None: data = b'abacate' - p1 = Peer() + p1 = PrivatePeer.auto_generated() signature = p1.sign(data) signature = signature[::-1] - self.assertFalse(p1.verify_signature(signature, data)) + self.assertFalse(p1.to_public_peer().verify_signature(signature, data)) def test_merge_peer(self) -> None: # Testing peer storage with merge of peers - peer_storage = PeerStorage() + peer_storage = VerifiedPeerStorage() - p1 = Peer() - p2 = Peer() - p2.id = p1.id - p2.public_key = p1.public_key - p1.public_key = None + p1 = PrivatePeer.auto_generated() + p2 = PrivatePeer.auto_generated() + p2._public_peer._peer.id = p1.id + p2._public_peer.public_key = p1.public_key - peer_storage.add_or_merge(p1) + peer_storage.add_or_merge(p1.to_public_peer()) self.assertEqual(len(peer_storage), 1) - peer_storage.add_or_merge(p2) - - peer = peer_storage[not_none(p1.id)] + peer_storage.add_or_merge(p2.to_public_peer()) + peer = peer_storage[p1.id] self.assertEqual(peer.id, p1.id) - self.assertEqual(peer.private_key, p1.private_key) self.assertEqual(peer.public_key, p1.public_key) - self.assertEqual(peer.entrypoints, []) + self.assertEqual(peer.info.entrypoints, []) ep1 = Entrypoint.parse('tcp://127.0.0.1:1001') ep2 = Entrypoint.parse('tcp://127.0.0.1:1002') ep3 = Entrypoint.parse('tcp://127.0.0.1:1003') - p3 = Peer() - p3.entrypoints.append(ep1) - p3.entrypoints.append(ep2) - p3.public_key = None - - p4 = Peer() - p4.public_key = None - p4.private_key = None - p4.id = p3.id - p4.entrypoints.append(ep2) - p4.entrypoints.append(ep3) + p3 = PrivatePeer.auto_generated().to_public_peer() + p3.info.entrypoints.append(ep1) + p3.info.entrypoints.append(ep2) + + p4 = PublicPeer(UnverifiedPeer(id=p3.id), public_key=p3.public_key) + p4.info.entrypoints.append(ep2) + p4.info.entrypoints.append(ep3) peer_storage.add_or_merge(p4) self.assertEqual(len(peer_storage), 2) @@ -114,18 +105,17 @@ def test_merge_peer(self) -> None: peer_storage.add_or_merge(p3) self.assertEqual(len(peer_storage), 2) - peer = peer_storage[not_none(p3.id)] + peer = peer_storage[p3.id] self.assertEqual(peer.id, p3.id) - self.assertEqual(peer.private_key, p3.private_key) - self.assertEqual(set(peer.entrypoints), {ep1, ep2, ep3}) + self.assertEqual(set(peer.info.entrypoints), {ep1, ep2, ep3}) with self.assertRaises(ValueError): - peer_storage.add(p1) + peer_storage.add(p1.to_public_peer()) def test_save_peer_file(self) -> None: import json - p = Peer() + p = PrivatePeer.auto_generated() tmpdir = tempfile.mkdtemp() path = os.path.join(tmpdir, 'peer.json') p.save_to_file(path) @@ -133,87 +123,86 @@ def test_save_peer_file(self) -> None: with open(path, 'r') as f: peer_from_file = json.load(f) - self.assertEqual(p.to_json(include_private_key=True), peer_from_file) + self.assertEqual(p.to_json_private(), peer_from_file) # Removing tmpdir shutil.rmtree(tmpdir) def test_retry_connection(self) -> None: - p = Peer() - interval = p.retry_interval - p.increment_retry_attempt(0) - self.assertEqual(self._settings.PEER_CONNECTION_RETRY_INTERVAL_MULTIPLIER*interval, p.retry_interval) - self.assertEqual(interval, p.retry_timestamp) + p = PrivatePeer.auto_generated() + interval = p.info.retry_interval + p.info.increment_retry_attempt(0) + self.assertEqual(self._settings.PEER_CONNECTION_RETRY_INTERVAL_MULTIPLIER*interval, p.info.retry_interval) + self.assertEqual(interval, p.info.retry_timestamp) # when retry_interval is already 180 - p.retry_interval = self._settings.PEER_CONNECTION_RETRY_MAX_RETRY_INTERVAL + 10 - p.increment_retry_attempt(0) - self.assertEqual(self._settings.PEER_CONNECTION_RETRY_MAX_RETRY_INTERVAL, p.retry_interval) + p.info.retry_interval = self._settings.PEER_CONNECTION_RETRY_MAX_RETRY_INTERVAL + 10 + p.info.increment_retry_attempt(0) + self.assertEqual(self._settings.PEER_CONNECTION_RETRY_MAX_RETRY_INTERVAL, p.info.retry_interval) # reset - p.reset_retry_timestamp() - self.assertEqual(p.retry_interval, 5) - self.assertEqual(p.retry_timestamp, 0) + p.info.reset_retry_timestamp() + self.assertEqual(p.info.retry_interval, 5) + self.assertEqual(p.info.retry_timestamp, 0) def test_validate_certificate(self) -> None: builder = TestBuilder() artifacts = builder.build() protocol = artifacts.p2p_manager.server_factory.buildProtocol(Mock()) - peer = Peer() + peer = PrivatePeer.auto_generated() from OpenSSL import crypto class FakeTransport: def getPeerCertificate(self) -> crypto.X509: - # we use a new peer here just to save the trouble of manually creating a certificate - random_peer = Peer() - return crypto.X509.from_cryptography(random_peer.get_certificate()) + random_peer = PrivatePeer.auto_generated() + return crypto.X509.from_cryptography(random_peer.certificate) protocol.transport = cast(ITransport, FakeTransport()) - result = peer.validate_certificate(protocol) + result = peer.to_public_peer().validate_certificate(protocol) self.assertFalse(result) def test_retry_logic(self) -> None: - peer = Peer() - self.assertTrue(peer.can_retry(0)) + peer = PrivatePeer.auto_generated() + self.assertTrue(peer.info.can_retry(0)) - retry_interval = peer.retry_interval + retry_interval = peer.info.retry_interval - peer.increment_retry_attempt(0) - self.assertFalse(peer.can_retry(0)) - self.assertFalse(peer.can_retry(retry_interval - 1)) - self.assertTrue(peer.can_retry(retry_interval)) - self.assertTrue(peer.can_retry(retry_interval + 1)) + peer.info.increment_retry_attempt(0) + self.assertFalse(peer.info.can_retry(0)) + self.assertFalse(peer.info.can_retry(retry_interval - 1)) + self.assertTrue(peer.info.can_retry(retry_interval)) + self.assertTrue(peer.info.can_retry(retry_interval + 1)) - peer.increment_retry_attempt(0) - self.assertFalse(peer.can_retry(retry_interval)) + peer.info.increment_retry_attempt(0) + self.assertFalse(peer.info.can_retry(retry_interval)) retry_interval *= self._settings.PEER_CONNECTION_RETRY_INTERVAL_MULTIPLIER - self.assertFalse(peer.can_retry(retry_interval - 1)) - self.assertTrue(peer.can_retry(retry_interval)) - self.assertTrue(peer.can_retry(retry_interval)) + self.assertFalse(peer.info.can_retry(retry_interval - 1)) + self.assertTrue(peer.info.can_retry(retry_interval)) + self.assertTrue(peer.info.can_retry(retry_interval)) # Retry until we reach max retry interval. - while peer.retry_interval < self._settings.PEER_CONNECTION_RETRY_MAX_RETRY_INTERVAL: - peer.increment_retry_attempt(0) + while peer.info.retry_interval < self._settings.PEER_CONNECTION_RETRY_MAX_RETRY_INTERVAL: + peer.info.increment_retry_attempt(0) # We need to call it once more because peer.retry_interval is always one step behind. - peer.increment_retry_attempt(0) + peer.info.increment_retry_attempt(0) # Confirm we are at the max retry interval. - self.assertFalse(peer.can_retry(self._settings.PEER_CONNECTION_RETRY_MAX_RETRY_INTERVAL - 1)) - self.assertTrue(peer.can_retry(self._settings.PEER_CONNECTION_RETRY_MAX_RETRY_INTERVAL)) - self.assertTrue(peer.can_retry(self._settings.PEER_CONNECTION_RETRY_MAX_RETRY_INTERVAL + 1)) + self.assertFalse(peer.info.can_retry(self._settings.PEER_CONNECTION_RETRY_MAX_RETRY_INTERVAL - 1)) + self.assertTrue(peer.info.can_retry(self._settings.PEER_CONNECTION_RETRY_MAX_RETRY_INTERVAL)) + self.assertTrue(peer.info.can_retry(self._settings.PEER_CONNECTION_RETRY_MAX_RETRY_INTERVAL + 1)) # It shouldn't change with another retry. - peer.increment_retry_attempt(0) - self.assertFalse(peer.can_retry(self._settings.PEER_CONNECTION_RETRY_MAX_RETRY_INTERVAL - 1)) - self.assertTrue(peer.can_retry(self._settings.PEER_CONNECTION_RETRY_MAX_RETRY_INTERVAL)) - self.assertTrue(peer.can_retry(self._settings.PEER_CONNECTION_RETRY_MAX_RETRY_INTERVAL + 1)) + peer.info.increment_retry_attempt(0) + self.assertFalse(peer.info.can_retry(self._settings.PEER_CONNECTION_RETRY_MAX_RETRY_INTERVAL - 1)) + self.assertTrue(peer.info.can_retry(self._settings.PEER_CONNECTION_RETRY_MAX_RETRY_INTERVAL)) + self.assertTrue(peer.info.can_retry(self._settings.PEER_CONNECTION_RETRY_MAX_RETRY_INTERVAL + 1)) # Finally, reset it. - peer.reset_retry_timestamp() - self.assertTrue(peer.can_retry(0)) + peer.info.reset_retry_timestamp() + self.assertTrue(peer.info.can_retry(0)) class BasePeerIdTest(unittest.TestCase): @@ -222,38 +211,38 @@ class BasePeerIdTest(unittest.TestCase): async def test_validate_entrypoint(self) -> None: manager = self.create_peer('testnet', unlock_wallet=False) peer = manager.my_peer - peer.entrypoints = [Entrypoint.parse('tcp://127.0.0.1:40403')] + peer.info.entrypoints = [Entrypoint.parse('tcp://127.0.0.1:40403')] # we consider that we are starting the connection to the peer protocol = manager.connections.client_factory.buildProtocol('127.0.0.1') protocol.entrypoint = Entrypoint.parse('tcp://127.0.0.1:40403') - result = await peer.validate_entrypoint(protocol) + result = await peer.info.validate_entrypoint(protocol) self.assertTrue(result) # if entrypoint is an URI - peer.entrypoints = [Entrypoint.parse('tcp://uri_name:40403')] - result = await peer.validate_entrypoint(protocol) + peer.info.entrypoints = [Entrypoint.parse('tcp://uri_name:40403')] + result = await peer.info.validate_entrypoint(protocol) self.assertTrue(result) # test invalid. DNS in test mode will resolve to '127.0.0.1:40403' protocol.entrypoint = Entrypoint.parse('tcp://45.45.45.45:40403') - result = await peer.validate_entrypoint(protocol) + result = await peer.info.validate_entrypoint(protocol) self.assertFalse(result) # now test when receiving the connection - i.e. the peer starts it protocol.entrypoint = None - peer.entrypoints = [Entrypoint.parse('tcp://127.0.0.1:40403')] + peer.info.entrypoints = [Entrypoint.parse('tcp://127.0.0.1:40403')] from collections import namedtuple - Peer = namedtuple('Peer', 'host') + DummyPeer = namedtuple('DummyPeer', 'host') class FakeTransport: - def getPeer(self) -> Peer: - return Peer(host='127.0.0.1') + def getPeer(self) -> DummyPeer: + return DummyPeer(host='127.0.0.1') protocol.transport = FakeTransport() - result = await peer.validate_entrypoint(protocol) + result = await peer.info.validate_entrypoint(protocol) self.assertTrue(result) # if entrypoint is an URI - peer.entrypoints = [Entrypoint.parse('tcp://uri_name:40403')] - result = await peer.validate_entrypoint(protocol) + peer.info.entrypoints = [Entrypoint.parse('tcp://uri_name:40403')] + result = await peer.info.validate_entrypoint(protocol) self.assertTrue(result) diff --git a/tests/p2p/test_protocol.py b/tests/p2p/test_protocol.py index 7187c7149..34ec291d3 100644 --- a/tests/p2p/test_protocol.py +++ b/tests/p2p/test_protocol.py @@ -6,7 +6,7 @@ from twisted.python.failure import Failure from hathor.p2p.entrypoint import Entrypoint -from hathor.p2p.peer import Peer +from hathor.p2p.peer import PrivatePeer from hathor.p2p.protocol import HathorLineReceiver, HathorProtocol from hathor.simulator import FakeConnection from hathor.util import json_dumps, json_loadb @@ -19,8 +19,8 @@ class BaseHathorProtocolTestCase(unittest.TestCase): def setUp(self) -> None: super().setUp() self.network = 'testnet' - self.peer1 = Peer() - self.peer2 = Peer() + self.peer1 = PrivatePeer.auto_generated() + self.peer2 = PrivatePeer.auto_generated() self.manager1 = self.create_peer(self.network, peer=self.peer1) self.manager2 = self.create_peer(self.network, peer=self.peer2) self.conn = FakeConnection(self.manager1, self.manager2) @@ -73,8 +73,8 @@ def test_on_connect(self) -> None: def test_peer_with_entrypoint(self) -> None: entrypoint_str = 'tcp://192.168.1.1:54321' entrypoint = Entrypoint.parse(entrypoint_str) - self.peer1.entrypoints.append(entrypoint) - self.peer2.entrypoints.append(entrypoint) + self.peer1.info.entrypoints.append(entrypoint) + self.peer2.info.entrypoints.append(entrypoint) self.conn.run_one_step() # HELLO msg1 = self.conn.peek_tr1_value() @@ -291,22 +291,22 @@ def test_on_disconnect_after_peer(self) -> None: self.conn.run_one_step() # HELLO self.assertIn(self.conn.proto1, self.manager1.connections.handshaking_peers) # No peer id in the peer_storage (known_peers) - self.assertNotIn(self.peer2.id, self.manager1.connections.peer_storage) + self.assertNotIn(self.peer2.id, self.manager1.connections.verified_peer_storage) # The peer READY now depends on a message exchange from both peers, so we need one more step self.conn.run_one_step() # PEER-ID self.conn.run_one_step() # READY self.assertIn(self.conn.proto1, self.manager1.connections.connected_peers.values()) # Peer id 2 in the peer_storage (known_peers) after connection - self.assertIn(self.peer2.id, self.manager1.connections.peer_storage) + self.assertIn(self.peer2.id, self.manager1.connections.verified_peer_storage) self.assertNotIn(self.conn.proto1, self.manager1.connections.handshaking_peers) self.conn.disconnect(Failure(Exception('testing'))) # Peer id 2 in the peer_storage (known_peers) after disconnection but before looping call - self.assertIn(self.peer2.id, self.manager1.connections.peer_storage) + self.assertIn(self.peer2.id, self.manager1.connections.verified_peer_storage) self.assertNotIn(self.conn.proto1, self.manager1.connections.connected_peers.values()) self.clock.advance(10) # Peer id 2 removed from peer_storage (known_peers) after disconnection and after looping call - self.assertNotIn(self.peer2.id, self.manager1.connections.peer_storage) + self.assertNotIn(self.peer2.id, self.manager1.connections.verified_peer_storage) def test_idle_connection(self) -> None: self.clock.advance(self._settings.PEER_IDLE_TIMEOUT - 10) diff --git a/tests/p2p/test_sync_mempool.py b/tests/p2p/test_sync_mempool.py index 27c518552..87bf3edde 100644 --- a/tests/p2p/test_sync_mempool.py +++ b/tests/p2p/test_sync_mempool.py @@ -1,8 +1,11 @@ +import base64 + from hathor.crypto.util import decode_address from hathor.graphviz import GraphvizVisualizer +from hathor.mining.cpu_mining_service import CpuMiningService from hathor.simulator import FakeConnection from hathor.transaction import Block, Transaction -from hathor.util import not_none +from hathor.util import json_loadb, not_none from tests import unittest from tests.utils import add_blocks_unlock_reward @@ -112,6 +115,74 @@ def test_mempool_basic(self) -> None: self.assertEqual(len(self.manager2.tx_storage.indexes.mempool_tips.get()), 1) self.assertEqual(len(self.manager1.tx_storage.indexes.mempool_tips.get()), 1) + def test_mempool_invalid_new_tx(self) -> None: + # 10 blocks + self._add_new_blocks(2) + # N blocks to unlock the reward + add_blocks_unlock_reward(self.manager1) + + # 5 transactions to be confirmed by the next blocks + self._add_new_transactions(5) + # 2 more blocks + self._add_new_blocks(2) + # 30 transactions in the mempool + txs = self._add_new_transactions(30) + + self.manager2 = self.create_peer(self.network, enable_sync_v1=True) + self.assertEqual(self.manager2.state, self.manager2.NodeState.READY) + + conn = FakeConnection(self.manager1, self.manager2) + + # inject invalid tx in manager1 to be sent to manager2 through mempool-sync + invalid_tx = txs[0].clone() + invalid_tx.parents[1] = invalid_tx.parents[0] # duplicate parents + cpu_mining = CpuMiningService() + cpu_mining.resolve(invalid_tx) + self.manager1.tx_storage.save_transaction(invalid_tx) + self.manager1.tx_storage.indexes.mempool_tips.update(invalid_tx) + self.log.debug('YYY invalid tx injected', tx=invalid_tx.hash_hex) + + # advance until the invalid transaction is requested + for _ in range(1000): + if conn.is_empty(): + break + conn.run_one_step(debug=True) + self.clock.advance(1) + msg = conn.peek_tr2_value() + if not msg.startswith(b'GET-DATA'): + continue + request = json_loadb(msg.partition(b' ')[2]) + if request.get('origin') == 'mempool' and request['txid'] == invalid_tx.hash_hex: + break + else: + self.fail('took too many iterations') + + request_txid = request['txid'] + + # keep going until the response is sent + for _ in range(10): + if conn.is_empty(): + break + conn.run_one_step(debug=True) + self.clock.advance(1) + msg = conn.peek_tr1_value() + if not msg.startswith(b'DATA'): + continue + _, _, payload = msg.partition(b' ') + origin, _, tx_encoded = payload.partition(b' ') + self.assertEqual(origin, b'mempool') + tx_data = base64.b64decode(tx_encoded) + tx = self.manager2.vertex_parser.deserialize(tx_data) + self.assertEqual(tx.hash_hex, request_txid) + break + else: + self.fail('took too many iterations') + + # manager2 will fail to add the transaction and will start to disconnect + self.assertFalse(conn.tr2.disconnecting) + conn.run_one_step(debug=True) + self.assertTrue(conn.tr2.disconnecting) + # sync-bridge should behave like sync-v2 class SyncBridgeHathorSyncMempoolTestCase(unittest.SyncBridgeParams, SyncV2HathorSyncMempoolTestCase): diff --git a/tests/p2p/test_sync_v2.py b/tests/p2p/test_sync_v2.py index 3e8af4f8b..842f75bda 100644 --- a/tests/p2p/test_sync_v2.py +++ b/tests/p2p/test_sync_v2.py @@ -7,7 +7,7 @@ from twisted.python.failure import Failure from hathor.p2p.messages import ProtocolMessages -from hathor.p2p.peer import Peer +from hathor.p2p.peer import PrivatePeer from hathor.p2p.states import ReadyState from hathor.p2p.sync_v2.agent import NodeBlockSync, _HeightInfo from hathor.simulator import FakeConnection @@ -66,7 +66,7 @@ def _run_restart_test(self, *, full_verification: bool, use_tx_storage_cache: bo # Create a new peer and run sync for a while (but stop before getting synced). path = self.mkdtemp() - peer = Peer() + peer = PrivatePeer.auto_generated() builder2 = self.simulator.get_default_builder() \ .set_peer(peer) \ .disable_sync_v1() \ @@ -220,7 +220,7 @@ def test_exceeds_streaming_and_mempool_limits(self) -> None: print() # Create a new peer and run sync for a while (but stop before getting synced). - peer = Peer() + peer = PrivatePeer.auto_generated() builder2 = self.simulator.get_default_builder() \ .set_peer(peer) \ .disable_sync_v1() \ @@ -311,7 +311,7 @@ def custom_gen_new_tx(manager: HathorManager, _address: str, value: int) -> Tran self.assertGreater(mempool_tips_count, 30) # Create a new peer and run sync for a while (but stop before getting synced). - peer = Peer() + peer = PrivatePeer.auto_generated() builder2 = self.simulator.get_default_builder() \ .set_peer(peer) \ .disable_sync_v1() \ diff --git a/tests/resources/p2p/test_add_peer.py b/tests/resources/p2p/test_add_peer.py index 79bb9fa5d..ca9ca99a2 100644 --- a/tests/resources/p2p/test_add_peer.py +++ b/tests/resources/p2p/test_add_peer.py @@ -1,7 +1,7 @@ from twisted.internet.defer import inlineCallbacks from hathor.p2p.entrypoint import Entrypoint -from hathor.p2p.peer import Peer +from hathor.p2p.peer import PrivatePeer from hathor.p2p.resources import AddPeersResource from tests import unittest from tests.resources.base_resource import StubSite, _BaseResourceTest @@ -21,9 +21,9 @@ def test_connecting_peers(self): self.assertTrue(data['success']) # test when we send a peer we're already connected to - peer = Peer() + peer = PrivatePeer.auto_generated() peer.entrypoints = [Entrypoint.parse('tcp://localhost:8006')] - self.manager.connections.peer_storage.add(peer) + self.manager.connections.verified_peer_storage.add(peer) response = yield self.web.post('p2p/peers', ['tcp://localhost:8006', 'tcp://localhost:8007']) data = response.json_value() self.assertTrue(data['success']) diff --git a/tests/resources/p2p/test_status.py b/tests/resources/p2p/test_status.py index baa2c19eb..68d409348 100644 --- a/tests/resources/p2p/test_status.py +++ b/tests/resources/p2p/test_status.py @@ -17,12 +17,12 @@ def setUp(self): super().setUp() self.web = StubSite(StatusResource(self.manager)) self.entrypoint = Entrypoint.parse('tcp://192.168.1.1:54321') - self.manager.connections.my_peer.entrypoints.append(self.entrypoint) + self.manager.connections.my_peer.info.entrypoints.append(self.entrypoint) self.manager.peers_whitelist.append(self.get_random_peer_from_pool().id) self.manager.peers_whitelist.append(self.get_random_peer_from_pool().id) self.manager2 = self.create_peer('testnet') - self.manager2.connections.my_peer.entrypoints.append(self.entrypoint) + self.manager2.connections.my_peer.info.entrypoints.append(self.entrypoint) self.conn1 = FakeConnection(self.manager, self.manager2) @inlineCallbacks diff --git a/tests/simulation/test_simulator_itself.py b/tests/simulation/test_simulator_itself.py index 146f327ce..45b8046de 100644 --- a/tests/simulation/test_simulator_itself.py +++ b/tests/simulation/test_simulator_itself.py @@ -2,7 +2,7 @@ from hathor.builder import SyncSupportLevel from hathor.manager import HathorManager -from hathor.p2p.peer import Peer +from hathor.p2p.peer import PrivatePeer from hathor.simulator import FakeConnection, Simulator from tests import unittest @@ -42,7 +42,7 @@ def tearDown(self) -> None: def create_simulator_peer( self, simulator: Simulator, - peer_pool: list[Peer], + peer_pool: list[PrivatePeer], enable_sync_v1: bool | None = None, enable_sync_v2: bool | None = None ) -> HathorManager: diff --git a/tests/unittest.py b/tests/unittest.py index fc5ec27f1..4b932eb52 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -17,7 +17,7 @@ from hathor.event import EventManager from hathor.event.storage import EventStorage from hathor.manager import HathorManager -from hathor.p2p.peer import Peer +from hathor.p2p.peer import PrivatePeer from hathor.p2p.sync_v1.agent import NodeSyncTimestamp from hathor.p2p.sync_v2.agent import NodeBlockSync from hathor.p2p.sync_version import SyncVersion @@ -40,7 +40,7 @@ def short_hashes(container: Collection[bytes]) -> Iterable[str]: return map(lambda hash_bytes: hash_bytes[-2:].hex(), container) -def _load_peer_pool(file_path: Optional[str] = None) -> Iterator[Peer]: +def _load_peer_pool(file_path: Optional[str] = None) -> Iterator[PrivatePeer]: import json if file_path is None: @@ -49,7 +49,7 @@ def _load_peer_pool(file_path: Optional[str] = None) -> Iterator[Peer]: with open(file_path) as peer_id_pool_file: peer_id_pool_dict = json.load(peer_id_pool_file) for peer_id_dict in peer_id_pool_dict: - yield Peer.create_from_json(peer_id_dict) + yield PrivatePeer.create_from_json(peer_id_dict) def _get_default_peer_id_pool_filepath() -> str: @@ -97,10 +97,10 @@ def build(self) -> BuildArtifacts: artifacts.manager.connections.disable_rate_limiter() return artifacts - def _get_peer(self) -> Peer: + def _get_peer(self) -> PrivatePeer: if self._peer is not None: return self._peer - return Peer() + return PrivatePeer.auto_generated() def _get_reactor(self) -> Reactor: if self._reactor is None: @@ -135,10 +135,14 @@ def tearDown(self) -> None: def reset_peer_pool(self) -> None: self._free_peer_pool = self.new_peer_pool() - def new_peer_pool(self) -> list[Peer]: + def new_peer_pool(self) -> list[PrivatePeer]: return PEER_ID_POOL.copy() - def get_random_peer_from_pool(self, pool: Optional[list[Peer]] = None, rng: Optional[Random] = None) -> Peer: + def get_random_peer_from_pool( + self, + pool: Optional[list[PrivatePeer]] = None, + rng: Optional[Random] = None, + ) -> PrivatePeer: if pool is None: pool = self._free_peer_pool if not pool: @@ -193,7 +197,7 @@ def create_peer_from_builder(self, builder: Builder, start_manager: bool = True) def create_peer( # type: ignore[no-untyped-def] self, network: str, - peer: Peer | None = None, + peer: PrivatePeer | None = None, wallet: BaseWallet | None = None, tx_storage: TransactionStorage | None = None, unlock_wallet: bool = True, @@ -225,7 +229,7 @@ def create_peer( # type: ignore[no-untyped-def] builder.set_pubsub(pubsub) if peer is None: - peer = Peer() + peer = PrivatePeer.auto_generated() builder.set_peer(peer) if not wallet: From 22f478c0c8f0fe6acb276980c3482f5105b4e20b Mon Sep 17 00:00:00 2001 From: Luis Helder Date: Mon, 7 Oct 2024 19:53:43 -0300 Subject: [PATCH 31/61] chore: collect garbage collector metrics (#1137) --- hathor/prometheus.py | 39 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/hathor/prometheus.py b/hathor/prometheus.py index ac5ed653f..5418c44d4 100644 --- a/hathor/prometheus.py +++ b/hathor/prometheus.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import gc import os from typing import TYPE_CHECKING @@ -61,6 +62,14 @@ 'total_sst_files_size': 'Storage size in bytes of all SST files of a certain column-family in RocksDB' } +GC_METRICS = { + 'objects': 'Number of objects tracked by the garbage collector', + 'collections': 'Number of collections done by the garbage collector', + 'collected': 'Number of objects collected by the garbage collector', + 'uncollectable': 'Number of objects that could not be collected by the garbage collector', + 'threshold': 'Current threshold of the garbage collector', +} + class PrometheusMetricsExporter: """ Class that sends hathor metrics to a node exporter that will be read by Prometheus @@ -112,6 +121,7 @@ def _initial_setup(self) -> None: self._initialize_peer_connection_metrics() self._initialize_tx_storage_metrics() + self._initialize_garbage_collection_metrics() for name, comment in METRIC_INFO.items(): self.metric_gauges[name] = Gauge(self.metrics_prefix + name, comment, registry=self.registry) @@ -147,6 +157,22 @@ def _initialize_tx_storage_metrics(self) -> None: ) for name, description in TX_STORAGE_METRICS.items() } + def _initialize_garbage_collection_metrics(self) -> None: + """Initializes the metrics related to garbage collection + """ + gc_labels = ["generation"] + + prefix = self.metrics_prefix + "python_gc_" + + self.gc_metrics = { + name: Gauge( + prefix + name, + description, + labelnames=gc_labels, + registry=self.registry + ) for name, description in GC_METRICS.items() + } + def start(self) -> None: """ Starts exporter """ @@ -161,6 +187,7 @@ def set_new_metrics(self) -> None: self._set_rocksdb_tx_storage_metrics() self._set_new_peer_connection_metrics() + self._set_garbage_collection_metrics() write_to_textfile(self.filepath, self.registry) @@ -179,6 +206,18 @@ def _set_new_peer_connection_metrics(self) -> None: connection_string=connection_metric.connection_string ).set(getattr(connection_metric, name)) + def _set_garbage_collection_metrics(self) -> None: + counts = gc.get_count() + stats = gc.get_stats() + threshold = gc.get_threshold() + + for i in range(3): + self.gc_metrics['objects'].labels(generation=i).set(counts[i]) + self.gc_metrics['collections'].labels(generation=i).set(stats[i]['collections']) + self.gc_metrics['collected'].labels(generation=i).set(stats[i]['collected']) + self.gc_metrics['uncollectable'].labels(generation=i).set(stats[i]['uncollectable']) + self.gc_metrics['threshold'].labels(generation=i).set(threshold[i]) + def _write_data(self) -> None: """ Update all metric data with new values Write new data to file From d85f88cf5da409e4585714f0d235c8e917956dc5 Mon Sep 17 00:00:00 2001 From: Luis Helder Date: Tue, 8 Oct 2024 13:03:23 -0300 Subject: [PATCH 32/61] chore[websocket]: improve websocket factory start (#1145) --- hathor/builder/resources_builder.py | 5 ++--- hathor/builder/sysctl_builder.py | 2 +- hathor/cli/run_node.py | 3 ++- hathor/manager.py | 17 ++++++++++++++++- hathor/metrics.py | 2 ++ tests/cli/test_run_node.py | 16 ++++++++++++++++ tests/cli/test_sysctl_init.py | 6 +++--- 7 files changed, 42 insertions(+), 9 deletions(-) diff --git a/hathor/builder/resources_builder.py b/hathor/builder/resources_builder.py index ce453aef6..f067cc3d9 100644 --- a/hathor/builder/resources_builder.py +++ b/hathor/builder/resources_builder.py @@ -297,7 +297,6 @@ def create_resources(self) -> server.Site: address_index=self.manager.tx_storage.indexes.addresses) if self._args.disable_ws_history_streaming: ws_factory.disable_history_streaming() - ws_factory.start() root.putChild(b'ws', WebSocketResource(ws_factory)) if settings.CONSENSUS_ALGORITHM.is_pow(): @@ -322,8 +321,8 @@ def create_resources(self) -> server.Site: status_server = SiteProfiler(real_root) self.log.info('with status', listen=self._args.status, with_wallet_api=with_wallet_api) - # Set websocket factory in metrics - self.manager.metrics.websocket_factory = ws_factory + # Set websocket factory in metrics. It'll be started when the manager is started. + self.manager.websocket_factory = ws_factory self._built_status = True return status_server diff --git a/hathor/builder/sysctl_builder.py b/hathor/builder/sysctl_builder.py index 0b2131ad8..206547458 100644 --- a/hathor/builder/sysctl_builder.py +++ b/hathor/builder/sysctl_builder.py @@ -38,7 +38,7 @@ def build(self) -> Sysctl: root.put_child('core', core) root.put_child('p2p', ConnectionsManagerSysctl(self.artifacts.p2p_manager)) - ws_factory = self.artifacts.manager.metrics.websocket_factory + ws_factory = self.artifacts.manager.websocket_factory if ws_factory is not None: root.put_child('ws', WebsocketManagerSysctl(ws_factory)) diff --git a/hathor/cli/run_node.py b/hathor/cli/run_node.py index 88489fa8a..9dbc3005c 100644 --- a/hathor/cli/run_node.py +++ b/hathor/cli/run_node.py @@ -198,7 +198,6 @@ def prepare(self, *, register_resources: bool = True) -> None: self.tx_storage = self.manager.tx_storage self.wallet = self.manager.wallet - self.start_manager() if self._args.stratum: assert self.manager.stratum_factory is not None @@ -219,6 +218,8 @@ def prepare(self, *, register_resources: bool = True) -> None: assert status_server is not None self.reactor.listenTCP(self._args.status, status_server) + self.start_manager() + from hathor.builder.builder import BuildArtifacts self.artifacts = BuildArtifacts( peer=self.manager.my_peer, diff --git a/hathor/manager.py b/hathor/manager.py index 7e702c73c..1edcf7592 100644 --- a/hathor/manager.py +++ b/hathor/manager.py @@ -17,7 +17,7 @@ import time from cProfile import Profile from enum import Enum -from typing import Iterator, NamedTuple, Optional, Union +from typing import TYPE_CHECKING, Iterator, NamedTuple, Optional, Union from hathorlib.base_transaction import tx_or_block_from_bytes as lib_tx_or_block_from_bytes from structlog import get_logger @@ -65,6 +65,9 @@ from hathor.vertex_handler import VertexHandler from hathor.wallet import BaseWallet +if TYPE_CHECKING: + from hathor.websocket.factory import HathorAdminWebsocketFactory + logger = get_logger() cpu = get_cpu_profiler() @@ -117,6 +120,8 @@ def __init__( full_verification: bool = False, enable_event_queue: bool = False, poa_block_producer: PoaBlockProducer | None = None, + # Websocket factory + websocket_factory: Optional['HathorAdminWebsocketFactory'] = None ) -> None: """ :param reactor: Twisted reactor which handles the mainloop and the events. @@ -199,12 +204,15 @@ def __init__( self.vertex_handler = vertex_handler self.vertex_parser = vertex_parser + self.websocket_factory = websocket_factory + self.metrics = Metrics( pubsub=self.pubsub, avg_time_between_blocks=settings.AVG_TIME_BETWEEN_BLOCKS, connections=self.connections, tx_storage=self.tx_storage, reactor=self.reactor, + websocket_factory=self.websocket_factory, ) self.wallet = wallet @@ -322,6 +330,10 @@ def start(self) -> None: self.tx_storage.set_allow_scope(TxAllowScope.VALID) self.tx_storage.enable_lock() + # Preferably start before self.metrics + if self.websocket_factory: + self.websocket_factory.start() + # Metric starts to capture data self.metrics.start() @@ -360,6 +372,9 @@ def stop(self) -> Deferred: # Metric stops to capture data self.metrics.stop() + if self.websocket_factory: + self.websocket_factory.stop() + if self.lc_check_sync_state.running: self.lc_check_sync_state.stop() diff --git a/hathor/metrics.py b/hathor/metrics.py index cc72ce9e0..0f3296857 100644 --- a/hathor/metrics.py +++ b/hathor/metrics.py @@ -217,6 +217,8 @@ def set_websocket_data(self) -> None: """ Set websocket metrics data. Connections and addresses subscribed. """ if self.websocket_factory: + assert self.websocket_factory.is_running, 'Websocket factory has not been started' + self.websocket_connections = len(self.websocket_factory.connections) self.subscribed_addresses = len(self.websocket_factory.address_connections) diff --git a/tests/cli/test_run_node.py b/tests/cli/test_run_node.py index d613f8163..3b72a2592 100644 --- a/tests/cli/test_run_node.py +++ b/tests/cli/test_run_node.py @@ -1,3 +1,5 @@ +from unittest.mock import ANY, patch + from hathor.cli.run_node import RunNode from tests import unittest @@ -15,3 +17,17 @@ def register_signal_handlers(self) -> None: run_node = CustomRunNode(argv=['--memory-storage']) self.assertTrue(run_node is not None) + + @patch('twisted.internet.reactor.listenTCP') + def test_listen_tcp_ipv4(self, mock_listenTCP): + class CustomRunNode(RunNode): + def start_manager(self) -> None: + pass + + def register_signal_handlers(self) -> None: + pass + + run_node = CustomRunNode(argv=['--memory-storage', '--status', '1234']) + self.assertTrue(run_node is not None) + + mock_listenTCP.assert_called_with(1234, ANY) diff --git a/tests/cli/test_sysctl_init.py b/tests/cli/test_sysctl_init.py index b71da9d1e..2063d7f76 100644 --- a/tests/cli/test_sysctl_init.py +++ b/tests/cli/test_sysctl_init.py @@ -39,7 +39,7 @@ def test_sysctl_builder_fail_with_invalid_property(self): # prepare to register only p2p commands artifacts = Mock(**{ 'p2p_manager': Mock(), - 'manager.metrics.websocket_factory.return_value': None + 'manager.websocket_factory.return_value': None }) with self.assertRaises(SysctlEntryNotFound) as context: @@ -68,7 +68,7 @@ def test_sysctl_builder_fail_with_invalid_value(self): # prepare to register only p2p commands artifacts = Mock(**{ 'p2p_manager': Mock(), - 'manager.metrics.websocket_factory.return_value': None + 'manager.websocket_factory.return_value': None }) with self.assertRaises(SysctlRunnerException) as context: @@ -85,7 +85,7 @@ def test_syctl_init_file_fail_with_empty_or_invalid_file(self): # prepare to register only p2p commands artifacts = Mock(**{ 'p2p_manager': Mock(), - 'manager.metrics.websocket_factory.return_value': None + 'manager.websocket_factory.return_value': None }) with self.assertRaises(AssertionError): From 4c13fdd7e27ac5ee8c82a82642b2fd8a7c668422 Mon Sep 17 00:00:00 2001 From: Jan Segre Date: Tue, 8 Oct 2024 18:27:33 +0200 Subject: [PATCH 33/61] fix(cli): subcommand quick_test not stopping after #1115 --- hathor/cli/quick_test.py | 80 +++++++++++++++++++++++++--------------- 1 file changed, 50 insertions(+), 30 deletions(-) diff --git a/hathor/cli/quick_test.py b/hathor/cli/quick_test.py index 1ba6fd0ff..2bf6f16fe 100644 --- a/hathor/cli/quick_test.py +++ b/hathor/cli/quick_test.py @@ -16,11 +16,49 @@ from argparse import ArgumentParser from typing import Any +from structlog import get_logger + from hathor.cli.run_node import RunNode +logger = get_logger() -class QuickTest(RunNode): +class VertexHandlerWrapper: + def __init__(self, vertex_handler, manager, n_blocks): + self.log = logger.new() + self._vertex_handler = vertex_handler + self._manager = manager + self._n_blocks = n_blocks + + def on_new_vertex(self, *args: Any, **kwargs: Any) -> bool: + from hathor.transaction import Block + from hathor.transaction.base_transaction import GenericVertex + + msg: str | None = None + res = self._vertex_handler.on_new_vertex(*args, **kwargs) + + if self._n_blocks is None: + should_quit = res + msg = 'added a tx' + else: + vertex = args[0] + should_quit = False + assert isinstance(vertex, GenericVertex) + + if isinstance(vertex, Block): + should_quit = vertex.get_height() >= self._n_blocks + msg = f'reached height {vertex.get_height()}' + + if should_quit: + assert msg is not None + self.log.info(f'successfully {msg}, exit now') + self._manager.connections.disconnect_all_peers(force=True) + self._manager.reactor.fireSystemEvent('shutdown') + os._exit(0) + return res + + +class QuickTest(RunNode): @classmethod def create_parser(cls) -> ArgumentParser: parser = super().create_parser() @@ -30,38 +68,20 @@ def create_parser(cls) -> ArgumentParser: return parser def prepare(self, *, register_resources: bool = True) -> None: - from hathor.transaction import Block - from hathor.transaction.base_transaction import GenericVertex + from hathor.p2p.sync_v2.factory import SyncV2Factory + from hathor.p2p.sync_version import SyncVersion + super().prepare(register_resources=False) self._no_wait = self._args.no_wait - self.log.info('patching on_new_tx to quit on success') - orig_on_new_tx = self.manager.on_new_tx - - def patched_on_new_tx(*args: Any, **kwargs: Any) -> bool: - res = orig_on_new_tx(*args, **kwargs) - msg: str | None = None - - if self._args.quit_after_n_blocks is None: - should_quit = res - msg = 'added a tx' - else: - vertex = args[0] - should_quit = False - assert isinstance(vertex, GenericVertex) - - if isinstance(vertex, Block): - should_quit = vertex.get_height() >= self._args.quit_after_n_blocks - msg = f'reached height {vertex.get_height()}' - - if should_quit: - assert msg is not None - self.log.info(f'successfully {msg}, exit now') - self.manager.connections.disconnect_all_peers(force=True) - self.reactor.fireSystemEvent('shutdown') - os._exit(0) - return res - self.manager.on_new_tx = patched_on_new_tx + self.log.info('patching vertex_handler.on_new_vertex to quit on success') + p2p_factory = self.manager.connections.get_sync_factory(SyncVersion.V2) + assert isinstance(p2p_factory, SyncV2Factory) + p2p_factory.vertex_handler = VertexHandlerWrapper( + self.manager.vertex_handler, + self.manager, + self._args.quit_after_n_blocks, + ) # type: ignore timeout = 300 self.log.info('exit with error code if it take too long', timeout=timeout) From 6a4c56228360ea6d9157b8b4b2ec6271c3dcaab8 Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Mon, 14 Oct 2024 00:37:05 -0300 Subject: [PATCH 34/61] refactor(static-metadata): undo migration of feature_states to static metadata (#1146) --- hathor/transaction/static_metadata.py | 2 +- hathor/transaction/storage/rocksdb_storage.py | 2 +- hathor/transaction/transaction_metadata.py | 2 +- .../test_feature_service.py | 34 ++ .../test_feature_simulation.py | 577 ++++++++++-------- 5 files changed, 364 insertions(+), 253 deletions(-) diff --git a/hathor/transaction/static_metadata.py b/hathor/transaction/static_metadata.py index d52c435e8..09cdf98dd 100644 --- a/hathor/transaction/static_metadata.py +++ b/hathor/transaction/static_metadata.py @@ -100,7 +100,7 @@ def create( height=height, min_height=min_height, feature_activation_bit_counts=feature_activation_bit_counts, - feature_states={}, # This will be populated in the next PR + feature_states={}, # This will be populated in a future PR ) @staticmethod diff --git a/hathor/transaction/storage/rocksdb_storage.py b/hathor/transaction/storage/rocksdb_storage.py index efe8f607e..38df6ef30 100644 --- a/hathor/transaction/storage/rocksdb_storage.py +++ b/hathor/transaction/storage/rocksdb_storage.py @@ -263,7 +263,7 @@ def migrate_static_metadata(self, log: BoundLogger) -> None: height=height, min_height=min_height, feature_activation_bit_counts=bit_counts, - feature_states={}, # This will be populated in the next PR + feature_states={}, # This will be populated in a future PR ) else: assert bit_counts is None or bit_counts == [] diff --git a/hathor/transaction/transaction_metadata.py b/hathor/transaction/transaction_metadata.py index db4279e96..e083d4d86 100644 --- a/hathor/transaction/transaction_metadata.py +++ b/hathor/transaction/transaction_metadata.py @@ -304,7 +304,7 @@ def to_bytes(self) -> bytes: del json_dict['min_height'] if 'feature_activation_bit_counts' in json_dict: del json_dict['feature_activation_bit_counts'] - # TODO: This one has not been migrated yet, but will be in the next PR + # TODO: This one has not been migrated yet, but will be in a future PR # if 'feature_states' in json_dict: # del json_dict['feature_states'] diff --git a/tests/feature_activation/test_feature_service.py b/tests/feature_activation/test_feature_service.py index a1c2f1549..cb8546bb1 100644 --- a/tests/feature_activation/test_feature_service.py +++ b/tests/feature_activation/test_feature_service.py @@ -393,6 +393,40 @@ def test_get_state_from_active(block_height: int) -> None: assert result == FeatureState.ACTIVE +@pytest.mark.parametrize('block_height', [16, 17, 18, 19]) +def test_caching_mechanism(block_height: int) -> None: + features = { + Feature.NOP_FEATURE_1: Criteria.construct( + bit=3, + start_height=0, + timeout_height=8, + lock_in_on_timeout=True, + version=Mock() + ) + } + settings = get_settings(features=features) + storage = get_storage(settings, up_to_height=block_height) + service = FeatureService( + settings=settings, + tx_storage=storage + ) + service.bit_signaling_service = Mock() + block = not_none(storage.get_block_by_height(block_height)) + calculate_new_state_mock = Mock(wraps=service._calculate_new_state) + + with patch.object(FeatureService, '_calculate_new_state', calculate_new_state_mock): + result1 = service.get_state(block=block, feature=Feature.NOP_FEATURE_1) + + assert result1 == FeatureState.ACTIVE + assert calculate_new_state_mock.call_count == 4 + + calculate_new_state_mock.reset_mock() + result2 = service.get_state(block=block, feature=Feature.NOP_FEATURE_1) + + assert result2 == FeatureState.ACTIVE + assert calculate_new_state_mock.call_count == 0 + + @pytest.mark.parametrize('block_height', [16, 17, 18, 19]) def test_is_feature_active(block_height: int) -> None: features = { diff --git a/tests/feature_activation/test_feature_simulation.py b/tests/feature_activation/test_feature_simulation.py index 5aac3a4de..6bbeb9e35 100644 --- a/tests/feature_activation/test_feature_simulation.py +++ b/tests/feature_activation/test_feature_simulation.py @@ -13,12 +13,14 @@ # limitations under the License. from typing import Any +from unittest.mock import Mock, patch import pytest from hathor.builder import Builder from hathor.conf.get_settings import get_global_settings from hathor.feature_activation.feature import Feature +from hathor.feature_activation.feature_service import FeatureService from hathor.feature_activation.model.criteria import Criteria from hathor.feature_activation.resources.feature import FeatureResource from hathor.feature_activation.settings import Settings as FeatureSettings @@ -47,9 +49,15 @@ def _get_result(web_client: StubSite) -> dict[str, Any]: return result + @staticmethod + def _calculate_new_state_mock_block_height_calls(calculate_new_state_mock: Mock) -> list[int]: + """Return the heights of blocks that calculate_new_state_mock was called with.""" + return [call.kwargs['boundary_block'].get_height() for call in calculate_new_state_mock.call_args_list] + def test_feature(self) -> None: """ - Tests that a feature goes through all possible states in the correct block heights. + Tests that a feature goes through all possible states in the correct block heights, and also assert internal + method calls to make sure we're executing it in the intended, most performatic way. """ feature_settings = FeatureSettings( evaluation_interval=4, @@ -80,207 +88,252 @@ def test_feature(self) -> None: ) web_client = StubSite(feature_resource) - # at the beginning, the feature is DEFINED: - add_new_blocks(manager, 10) - self.simulator.run(60) - result = self._get_result(web_client) - assert result == dict( - block_height=10, - features=[ - dict( - name='NOP_FEATURE_1', - state='DEFINED', - acceptance=None, - threshold=0.75, - start_height=20, - timeout_height=60, - minimum_activation_height=72, - lock_in_on_timeout=True, - version='0.0.0' - ) - ] - ) - - # at block 19, the feature is DEFINED, just before becoming STARTED: - add_new_blocks(manager, 9) - self.simulator.run(60) - result = self._get_result(web_client) - assert result == dict( - block_height=19, - features=[ - dict( - name='NOP_FEATURE_1', - state='DEFINED', - acceptance=None, - threshold=0.75, - start_height=20, - timeout_height=60, - minimum_activation_height=72, - lock_in_on_timeout=True, - version='0.0.0' - ) - ] - ) - - # at block 20, the feature becomes STARTED: - add_new_blocks(manager, 1) - self.simulator.run(60) - result = self._get_result(web_client) - assert result == dict( - block_height=20, - features=[ - dict( - name='NOP_FEATURE_1', - state='STARTED', - acceptance=0, - threshold=0.75, - start_height=20, - timeout_height=60, - minimum_activation_height=72, - lock_in_on_timeout=True, - version='0.0.0' - ) - ] - ) - - # at block 55, the feature is STARTED, just before becoming MUST_SIGNAL: - add_new_blocks(manager, 35) - self.simulator.run(60) - result = self._get_result(web_client) - assert result == dict( - block_height=55, - features=[ - dict( - name='NOP_FEATURE_1', - state='STARTED', - acceptance=0, - threshold=0.75, - start_height=20, - timeout_height=60, - minimum_activation_height=72, - lock_in_on_timeout=True, - version='0.0.0' - ) - ] - ) - - # at block 56, the feature becomes MUST_SIGNAL: - add_new_blocks(manager, 1) - self.simulator.run(60) - result = self._get_result(web_client) - assert result == dict( - block_height=56, - features=[ - dict( - name='NOP_FEATURE_1', - state='MUST_SIGNAL', - acceptance=0, - threshold=0.75, - start_height=20, - timeout_height=60, - minimum_activation_height=72, - lock_in_on_timeout=True, - version='0.0.0' - ) - ] - ) - - add_new_blocks(manager, 1, signal_bits=0b1) - - # if we try to propagate a non-signaling block, it is not accepted - non_signaling_block = manager.generate_mining_block() - manager.cpu_mining_service.resolve(non_signaling_block) - non_signaling_block.signal_bits = 0b10 - non_signaling_block.init_static_metadata_from_storage(settings, manager.tx_storage) - - with pytest.raises(BlockMustSignalError): - manager.verification_service.verify(non_signaling_block) - - assert not manager.propagate_tx(non_signaling_block) - - # at block 59, the feature is MUST_SIGNAL, just before becoming LOCKED_IN: - add_new_blocks(manager, num_blocks=2, signal_bits=0b1) - self.simulator.run(60) - result = self._get_result(web_client) - assert result == dict( - block_height=59, - features=[ - dict( - name='NOP_FEATURE_1', - state='MUST_SIGNAL', - acceptance=0.75, - threshold=0.75, - start_height=20, - timeout_height=60, - minimum_activation_height=72, - lock_in_on_timeout=True, - version='0.0.0' - ) - ] - ) - - # at block 60, the feature becomes LOCKED_IN: - add_new_blocks(manager, 1) - self.simulator.run(60) - result = self._get_result(web_client) - assert result == dict( - block_height=60, - features=[ - dict( - name='NOP_FEATURE_1', - state='LOCKED_IN', - acceptance=None, - threshold=0.75, - start_height=20, - timeout_height=60, - minimum_activation_height=72, - lock_in_on_timeout=True, - version='0.0.0' - ) - ] - ) - - # at block 71, the feature is LOCKED_IN, just before becoming ACTIVE: - add_new_blocks(manager, 11) - self.simulator.run(60) - result = self._get_result(web_client) - assert result == dict( - block_height=71, - features=[ - dict( - name='NOP_FEATURE_1', - state='LOCKED_IN', - acceptance=None, - threshold=0.75, - start_height=20, - timeout_height=60, - minimum_activation_height=72, - lock_in_on_timeout=True, - version='0.0.0' - ) - ] - ) - - # at block 72, the feature becomes ACTIVE, forever: - add_new_blocks(manager, 1) - self.simulator.run(60) - result = self._get_result(web_client) - assert result == dict( - block_height=72, - features=[ - dict( - name='NOP_FEATURE_1', - state='ACTIVE', - acceptance=None, - threshold=0.75, - start_height=20, - timeout_height=60, - minimum_activation_height=72, - lock_in_on_timeout=True, - version='0.0.0' - ) - ] - ) + calculate_new_state_mock = Mock(wraps=feature_service._calculate_new_state) + get_ancestor_iteratively_mock = Mock(wraps=feature_service._get_ancestor_iteratively) + + with ( + patch.object(FeatureService, '_calculate_new_state', calculate_new_state_mock), + patch.object(FeatureService, '_get_ancestor_iteratively', get_ancestor_iteratively_mock), + ): + # at the beginning, the feature is DEFINED: + add_new_blocks(manager, 10) + self.simulator.run(60) + result = self._get_result(web_client) + assert result == dict( + block_height=10, + features=[ + dict( + name='NOP_FEATURE_1', + state='DEFINED', + acceptance=None, + threshold=0.75, + start_height=20, + timeout_height=60, + minimum_activation_height=72, + lock_in_on_timeout=True, + version='0.0.0' + ) + ] + ) + # so we calculate states all the way down to the first evaluation boundary (after genesis): + assert min(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 4 + # no blocks are voided, so we only use the height index, and not get_ancestor_iteratively: + assert get_ancestor_iteratively_mock.call_count == 0 + calculate_new_state_mock.reset_mock() + + # at block 19, the feature is DEFINED, just before becoming STARTED: + add_new_blocks(manager, 9) + self.simulator.run(60) + result = self._get_result(web_client) + assert result == dict( + block_height=19, + features=[ + dict( + name='NOP_FEATURE_1', + state='DEFINED', + acceptance=None, + threshold=0.75, + start_height=20, + timeout_height=60, + minimum_activation_height=72, + lock_in_on_timeout=True, + version='0.0.0' + ) + ] + ) + # so we calculate states down to block 12, as block 8's state is saved: + assert min(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 12 + assert get_ancestor_iteratively_mock.call_count == 0 + calculate_new_state_mock.reset_mock() + + # at block 20, the feature becomes STARTED: + add_new_blocks(manager, 1) + self.simulator.run(60) + result = self._get_result(web_client) + assert result == dict( + block_height=20, + features=[ + dict( + name='NOP_FEATURE_1', + state='STARTED', + acceptance=0, + threshold=0.75, + start_height=20, + timeout_height=60, + minimum_activation_height=72, + lock_in_on_timeout=True, + version='0.0.0' + ) + ] + ) + assert min(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 20 + assert get_ancestor_iteratively_mock.call_count == 0 + + # we add one block before resetting the mock, just to make sure block 20 gets a chance to be saved + add_new_blocks(manager, 1) + calculate_new_state_mock.reset_mock() + + # at block 55, the feature is STARTED, just before becoming MUST_SIGNAL: + add_new_blocks(manager, 34) + self.simulator.run(60) + result = self._get_result(web_client) + assert result == dict( + block_height=55, + features=[ + dict( + name='NOP_FEATURE_1', + state='STARTED', + acceptance=0, + threshold=0.75, + start_height=20, + timeout_height=60, + minimum_activation_height=72, + lock_in_on_timeout=True, + version='0.0.0' + ) + ] + ) + assert min(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 24 + assert get_ancestor_iteratively_mock.call_count == 0 + calculate_new_state_mock.reset_mock() + + # at block 56, the feature becomes MUST_SIGNAL: + add_new_blocks(manager, 1) + self.simulator.run(60) + result = self._get_result(web_client) + assert result == dict( + block_height=56, + features=[ + dict( + name='NOP_FEATURE_1', + state='MUST_SIGNAL', + acceptance=0, + threshold=0.75, + start_height=20, + timeout_height=60, + minimum_activation_height=72, + lock_in_on_timeout=True, + version='0.0.0' + ) + ] + ) + assert min(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 56 + assert get_ancestor_iteratively_mock.call_count == 0 + + # we add one block before resetting the mock, just to make sure block 56 gets a chance to be saved + add_new_blocks(manager, 1, signal_bits=0b1) + calculate_new_state_mock.reset_mock() + + # if we try to propagate a non-signaling block, it is not accepted + non_signaling_block = manager.generate_mining_block() + manager.cpu_mining_service.resolve(non_signaling_block) + non_signaling_block.signal_bits = 0b10 + non_signaling_block.init_static_metadata_from_storage(settings, manager.tx_storage) + + with pytest.raises(BlockMustSignalError): + manager.verification_service.verify(non_signaling_block) + + assert not manager.propagate_tx(non_signaling_block) + + # at block 59, the feature is MUST_SIGNAL, just before becoming LOCKED_IN: + add_new_blocks(manager, num_blocks=2, signal_bits=0b1) + self.simulator.run(60) + result = self._get_result(web_client) + assert result == dict( + block_height=59, + features=[ + dict( + name='NOP_FEATURE_1', + state='MUST_SIGNAL', + acceptance=0.75, + threshold=0.75, + start_height=20, + timeout_height=60, + minimum_activation_height=72, + lock_in_on_timeout=True, + version='0.0.0' + ) + ] + ) + # we don't need to calculate any new state, as block 56's state is saved: + assert len(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 0 + assert get_ancestor_iteratively_mock.call_count == 0 + calculate_new_state_mock.reset_mock() + + # at block 60, the feature becomes LOCKED_IN: + add_new_blocks(manager, 1) + self.simulator.run(60) + result = self._get_result(web_client) + assert result == dict( + block_height=60, + features=[ + dict( + name='NOP_FEATURE_1', + state='LOCKED_IN', + acceptance=None, + threshold=0.75, + start_height=20, + timeout_height=60, + minimum_activation_height=72, + lock_in_on_timeout=True, + version='0.0.0' + ) + ] + ) + assert min(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 60 + assert get_ancestor_iteratively_mock.call_count == 0 + + # we add one block before resetting the mock, just to make sure block 60 gets a chance to be saved + add_new_blocks(manager, 1) + calculate_new_state_mock.reset_mock() + + # at block 71, the feature is LOCKED_IN, just before becoming ACTIVE: + add_new_blocks(manager, 10) + self.simulator.run(60) + result = self._get_result(web_client) + assert result == dict( + block_height=71, + features=[ + dict( + name='NOP_FEATURE_1', + state='LOCKED_IN', + acceptance=None, + threshold=0.75, + start_height=20, + timeout_height=60, + minimum_activation_height=72, + lock_in_on_timeout=True, + version='0.0.0' + ) + ] + ) + assert min(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 64 + assert get_ancestor_iteratively_mock.call_count == 0 + calculate_new_state_mock.reset_mock() + + # at block 72, the feature becomes ACTIVE, forever: + add_new_blocks(manager, 1) + self.simulator.run(60) + result = self._get_result(web_client) + assert result == dict( + block_height=72, + features=[ + dict( + name='NOP_FEATURE_1', + state='ACTIVE', + acceptance=None, + threshold=0.75, + start_height=20, + timeout_height=60, + minimum_activation_height=72, + lock_in_on_timeout=True, + version='0.0.0' + ) + ] + ) + assert min(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 72 + assert get_ancestor_iteratively_mock.call_count == 0 + calculate_new_state_mock.reset_mock() def test_reorg(self) -> None: feature_settings = FeatureSettings( @@ -526,30 +579,43 @@ def test_feature_from_existing_storage(self) -> None: ) web_client = StubSite(feature_resource) - assert artifacts1.tx_storage.get_vertices_count() == 3 # genesis vertices in the storage - - # we add 64 blocks so the feature becomes active. It would be active by timeout anyway, - # we just set signal bits to conform with the MUST_SIGNAL phase. - add_new_blocks(manager1, 64, signal_bits=0b1) - self.simulator.run(60) - result = self._get_result(web_client) - assert result == dict( - block_height=64, - features=[ - dict( - name='NOP_FEATURE_1', - state='ACTIVE', - acceptance=None, - threshold=0.75, - start_height=20, - timeout_height=60, - minimum_activation_height=0, - lock_in_on_timeout=True, - version='0.0.0' - ) - ] - ) - assert artifacts1.tx_storage.get_vertices_count() == 67 + calculate_new_state_mock = Mock(wraps=feature_service1._calculate_new_state) + get_ancestor_iteratively_mock = Mock(wraps=feature_service1._get_ancestor_iteratively) + + with ( + patch.object(FeatureService, '_calculate_new_state', calculate_new_state_mock), + patch.object(FeatureService, '_get_ancestor_iteratively', get_ancestor_iteratively_mock), + ): + assert artifacts1.tx_storage.get_vertices_count() == 3 # genesis vertices in the storage + + # we add 64 blocks so the feature becomes active. It would be active by timeout anyway, + # we just set signal bits to conform with the MUST_SIGNAL phase. + add_new_blocks(manager1, 64, signal_bits=0b1) + self.simulator.run(60) + result = self._get_result(web_client) + assert result == dict( + block_height=64, + features=[ + dict( + name='NOP_FEATURE_1', + state='ACTIVE', + acceptance=None, + threshold=0.75, + start_height=20, + timeout_height=60, + minimum_activation_height=0, + lock_in_on_timeout=True, + version='0.0.0' + ) + ] + ) + # feature states have to be calculated for all blocks in evaluation interval boundaries, + # down to the first one (after genesis), as this is the first run: + assert min(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 4 + # no blocks are voided, so we only use the height index: + assert get_ancestor_iteratively_mock.call_count == 0 + assert artifacts1.tx_storage.get_vertices_count() == 67 + calculate_new_state_mock.reset_mock() manager1.stop() not_none(artifacts1.rocksdb_storage).close() @@ -566,30 +632,41 @@ def test_feature_from_existing_storage(self) -> None: ) web_client = StubSite(feature_resource) - # the new storage starts populated - assert artifacts2.tx_storage.get_vertices_count() == 67 - self.simulator.run(60) - - result = self._get_result(web_client) - - # the result should be the same as before - assert result == dict( - block_height=64, - features=[ - dict( - name='NOP_FEATURE_1', - state='ACTIVE', - acceptance=None, - threshold=0.75, - start_height=20, - timeout_height=60, - minimum_activation_height=0, - lock_in_on_timeout=True, - version='0.0.0' - ) - ] - ) - assert artifacts2.tx_storage.get_vertices_count() == 67 + calculate_new_state_mock = Mock(wraps=feature_service._calculate_new_state) + get_ancestor_iteratively_mock = Mock(wraps=feature_service._get_ancestor_iteratively) + + with ( + patch.object(FeatureService, '_calculate_new_state', calculate_new_state_mock), + patch.object(FeatureService, '_get_ancestor_iteratively', get_ancestor_iteratively_mock), + ): + # the new storage starts populated + assert artifacts2.tx_storage.get_vertices_count() == 67 + self.simulator.run(60) + + result = self._get_result(web_client) + + # the result should be the same as before + assert result == dict( + block_height=64, + features=[ + dict( + name='NOP_FEATURE_1', + state='ACTIVE', + acceptance=None, + threshold=0.75, + start_height=20, + timeout_height=60, + minimum_activation_height=0, + lock_in_on_timeout=True, + version='0.0.0' + ) + ] + ) + # features states are not calculate for any block, as they're all saved: + assert len(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 0 + assert get_ancestor_iteratively_mock.call_count == 0 + assert artifacts2.tx_storage.get_vertices_count() == 67 + calculate_new_state_mock.reset_mock() class SyncV1MemoryStorageFeatureSimulationTest(unittest.SyncV1Params, BaseMemoryStorageFeatureSimulationTest): From 9c1fe88a7877cc4a9b7518ca86b1ab14a98269b7 Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Mon, 14 Oct 2024 16:10:53 -0300 Subject: [PATCH 35/61] refactor(p2p): simplify factory (#1151) --- hathor/builder/builder.py | 15 +---- hathor/builder/cli_builder.py | 5 +- .../cli/events_simulator/events_simulator.py | 3 +- hathor/event/websocket/factory.py | 5 +- hathor/manager.py | 5 +- hathor/metrics.py | 2 +- hathor/p2p/factory.py | 64 ++++--------------- hathor/p2p/manager.py | 9 +-- hathor/p2p/protocol.py | 3 - hathor/p2p/states/hello.py | 4 +- hathor/simulator/simulator.py | 2 - hathor/transaction/block.py | 1 + hathor/transaction/storage/__init__.py | 2 + hathor/transaction/storage/traversal.py | 63 +++++++++++++++--- tests/cli/test_events_simulator.py | 4 +- tests/event/websocket/test_factory.py | 7 +- tests/others/test_metrics.py | 5 +- tests/p2p/test_bootstrap.py | 4 +- tests/poa/test_poa_verification.py | 2 +- tests/tx/test_cache_storage.py | 2 +- tests/unittest.py | 11 ++-- 21 files changed, 101 insertions(+), 117 deletions(-) diff --git a/hathor/builder/builder.py b/hathor/builder/builder.py index e6e97dbd5..46aaad22f 100644 --- a/hathor/builder/builder.py +++ b/hathor/builder/builder.py @@ -143,7 +143,6 @@ def __init__(self) -> None: self._capabilities: Optional[list[str]] = None self._peer: Optional[PrivatePeer] = None - self._network: Optional[str] = None self._cmdline: str = '' self._storage_type: StorageType = StorageType.MEMORY @@ -207,9 +206,6 @@ def build(self) -> BuildArtifacts: if self.artifacts is not None: raise ValueError('cannot call build twice') - if self._network is None: - raise TypeError('you must set a network') - if SyncSupportLevel.ENABLED not in {self._sync_v1_support, self._sync_v2_support}: raise TypeError('you must enable at least one sync version') @@ -257,7 +253,6 @@ def build(self) -> BuildArtifacts: manager = HathorManager( reactor, settings=settings, - network=self._network, pubsub=pubsub, consensus_algorithm=consensus_algorithm, daa=daa, @@ -423,12 +418,9 @@ def _get_or_create_p2p_manager(self) -> ConnectionsManager: reactor = self._get_reactor() my_peer = self._get_peer() - assert self._network is not None - self._p2p_manager = ConnectionsManager( settings=self._get_or_create_settings(), reactor=reactor, - network=self._network, my_peer=my_peer, pubsub=self._get_or_create_pubsub(), ssl=enable_ssl, @@ -522,7 +514,7 @@ def _get_or_create_event_manager(self) -> EventManager: storage = self._get_or_create_event_storage() factory = EventWebsocketFactory( peer_id=str(peer.id), - network=settings.NETWORK_NAME, + settings=settings, reactor=reactor, event_storage=storage, ) @@ -776,11 +768,6 @@ def set_pubsub(self, pubsub: PubSubManager) -> 'Builder': self._pubsub = pubsub return self - def set_network(self, network: str) -> 'Builder': - self.check_if_can_modify() - self._network = network - return self - def set_sync_v1_support(self, support_level: SyncSupportLevel) -> 'Builder': self.check_if_can_modify() self._sync_v1_support = support_level diff --git a/hathor/builder/cli_builder.py b/hathor/builder/cli_builder.py index 1b4cf2e80..493d31b6d 100644 --- a/hathor/builder/cli_builder.py +++ b/hathor/builder/cli_builder.py @@ -197,7 +197,6 @@ def create_manager(self, reactor: Reactor) -> HathorManager: self.log.info('with wallet', wallet=self.wallet, path=self._args.data) hostname = self.get_hostname() - network = settings.NETWORK_NAME sync_choice: SyncChoice if self._args.sync_bridge: @@ -245,7 +244,7 @@ def create_manager(self, reactor: Reactor) -> HathorManager: if self._args.x_enable_event_queue: self.event_ws_factory = EventWebsocketFactory( peer_id=str(peer.id), - network=network, + settings=settings, reactor=reactor, event_storage=event_storage ) @@ -322,7 +321,6 @@ def create_manager(self, reactor: Reactor) -> HathorManager: p2p_manager = ConnectionsManager( settings=settings, reactor=reactor, - network=network, my_peer=peer, pubsub=pubsub, ssl=True, @@ -367,7 +365,6 @@ def create_manager(self, reactor: Reactor) -> HathorManager: self.manager = HathorManager( reactor, settings=settings, - network=network, hostname=hostname, pubsub=pubsub, consensus_algorithm=consensus_algorithm, diff --git a/hathor/cli/events_simulator/events_simulator.py b/hathor/cli/events_simulator/events_simulator.py index 135a95296..897c57bf3 100644 --- a/hathor/cli/events_simulator/events_simulator.py +++ b/hathor/cli/events_simulator/events_simulator.py @@ -48,6 +48,7 @@ def execute(args: Namespace, reactor: 'ReactorProtocol') -> None: os.environ['HATHOR_CONFIG_YAML'] = UNITTESTS_SETTINGS_FILEPATH from hathor.cli.events_simulator.event_forwarding_websocket_factory import EventForwardingWebsocketFactory from hathor.cli.events_simulator.scenario import Scenario + from hathor.conf.get_settings import get_global_settings from hathor.simulator import Simulator try: @@ -70,7 +71,7 @@ def execute(args: Namespace, reactor: 'ReactorProtocol') -> None: forwarding_ws_factory = EventForwardingWebsocketFactory( simulator=simulator, peer_id='simulator_peer_id', - network='simulator_network', + settings=get_global_settings(), reactor=reactor, event_storage=event_ws_factory._event_storage ) diff --git a/hathor/event/websocket/factory.py b/hathor/event/websocket/factory.py index 2bc2724e7..99913319d 100644 --- a/hathor/event/websocket/factory.py +++ b/hathor/event/websocket/factory.py @@ -17,6 +17,7 @@ from autobahn.twisted.websocket import WebSocketServerFactory from structlog import get_logger +from hathor.conf.settings import HathorSettings from hathor.event.model.base_event import BaseEvent from hathor.event.storage import EventStorage from hathor.event.websocket.protocol import EventWebsocketProtocol @@ -45,14 +46,14 @@ def __init__( self, *, peer_id: str, - network: str, + settings: HathorSettings, reactor: Reactor, event_storage: EventStorage ) -> None: super().__init__() self.log = logger.new() self._peer_id = peer_id - self._network = network + self._network = settings.NETWORK_NAME self._reactor = reactor self._event_storage = event_storage self._connections: set[EventWebsocketProtocol] = set() diff --git a/hathor/manager.py b/hathor/manager.py index 1edcf7592..bf1e8fba9 100644 --- a/hathor/manager.py +++ b/hathor/manager.py @@ -107,7 +107,6 @@ def __init__( bit_signaling_service: BitSignalingService, verification_service: VerificationService, cpu_mining_service: CpuMiningService, - network: str, execution_manager: ExecutionManager, vertex_handler: VertexHandler, vertex_parser: VertexParser, @@ -126,8 +125,6 @@ def __init__( """ :param reactor: Twisted reactor which handles the mainloop and the events. :param peer: Peer object, with peer-id of this node. - :param network: Name of the network this node participates. Usually it is either testnet or mainnet. - :type network: string :param tx_storage: Required storage backend. :type tx_storage: :py:class:`hathor.transaction.storage.transaction_storage.TransactionStorage` @@ -170,7 +167,7 @@ def __init__( self.remote_address = None self.my_peer = peer - self.network = network + self.network = settings.NETWORK_NAME self.is_started: bool = False diff --git a/hathor/metrics.py b/hathor/metrics.py index 0f3296857..b53752342 100644 --- a/hathor/metrics.py +++ b/hathor/metrics.py @@ -256,7 +256,7 @@ def collect_peer_connection_metrics(self) -> None: metric = PeerConnectionMetrics( connection_string=str(connection.entrypoint) if connection.entrypoint else "", peer_id=str(connection.peer.id), - network=connection.network, + network=settings.NETWORK_NAME, received_messages=connection.metrics.received_messages, sent_messages=connection.metrics.sent_messages, received_bytes=connection.metrics.received_bytes, diff --git a/hathor/p2p/factory.py b/hathor/p2p/factory.py index af1eb270a..832f2e501 100644 --- a/hathor/p2p/factory.py +++ b/hathor/p2p/factory.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import TYPE_CHECKING, Optional +from abc import ABC from twisted.internet import protocol from twisted.internet.interfaces import IAddress @@ -22,23 +22,12 @@ from hathor.p2p.peer import PrivatePeer from hathor.p2p.protocol import HathorLineReceiver -if TYPE_CHECKING: - from hathor.manager import HathorManager # noqa: F401 -MyServerProtocol = HathorLineReceiver -MyClientProtocol = HathorLineReceiver - - -class HathorServerFactory(protocol.ServerFactory): - """ HathorServerFactory is used to generate HathorProtocol objects when a new connection arrives. - """ - - manager: Optional[ConnectionsManager] - protocol: type[MyServerProtocol] = MyServerProtocol +class _HathorLineReceiverFactory(ABC, protocol.Factory): + inbound: bool def __init__( self, - network: str, my_peer: PrivatePeer, p2p_manager: ConnectionsManager, *, @@ -47,56 +36,29 @@ def __init__( ): super().__init__() self._settings = settings - self.network = network self.my_peer = my_peer self.p2p_manager = p2p_manager self.use_ssl = use_ssl - def buildProtocol(self, addr: IAddress) -> MyServerProtocol: - assert self.protocol is not None - p = self.protocol( - network=self.network, + def buildProtocol(self, addr: IAddress) -> HathorLineReceiver: + p = HathorLineReceiver( my_peer=self.my_peer, p2p_manager=self.p2p_manager, use_ssl=self.use_ssl, - inbound=True, + inbound=self.inbound, settings=self._settings ) p.factory = self return p -class HathorClientFactory(protocol.ClientFactory): - """ HathorClientFactory is used to generate HathorProtocol objects when we connected to another peer. +class HathorServerFactory(_HathorLineReceiverFactory, protocol.ServerFactory): + """ HathorServerFactory is used to generate HathorProtocol objects when a new connection arrives. """ + inbound = True - protocol: type[MyClientProtocol] = MyClientProtocol - - def __init__( - self, - network: str, - my_peer: PrivatePeer, - p2p_manager: ConnectionsManager, - *, - settings: HathorSettings, - use_ssl: bool, - ): - super().__init__() - self._settings = settings - self.network = network - self.my_peer = my_peer - self.p2p_manager = p2p_manager - self.use_ssl = use_ssl - def buildProtocol(self, addr: IAddress) -> MyClientProtocol: - assert self.protocol is not None - p = self.protocol( - network=self.network, - my_peer=self.my_peer, - p2p_manager=self.p2p_manager, - use_ssl=self.use_ssl, - inbound=False, - settings=self._settings - ) - p.factory = self - return p +class HathorClientFactory(_HathorLineReceiverFactory, protocol.ClientFactory): + """ HathorClientFactory is used to generate HathorProtocol objects when we connected to another peer. + """ + inbound = False diff --git a/hathor/p2p/manager.py b/hathor/p2p/manager.py index 7fb3d35b9..1b94f92d8 100644 --- a/hathor/p2p/manager.py +++ b/hathor/p2p/manager.py @@ -95,7 +95,6 @@ def __init__( self, settings: HathorSettings, reactor: Reactor, - network: str, my_peer: PrivatePeer, pubsub: PubSubManager, ssl: bool, @@ -114,8 +113,6 @@ def __init__( self.reactor = reactor self.my_peer = my_peer - self.network = network - # List of address descriptions to listen for new connections (eg: [tcp:8000]) self.listen_address_descriptions: list[str] = [] @@ -132,10 +129,10 @@ def __init__( from hathor.p2p.factory import HathorClientFactory, HathorServerFactory self.use_ssl = ssl self.server_factory = HathorServerFactory( - self.network, self.my_peer, p2p_manager=self, use_ssl=self.use_ssl, settings=self._settings + self.my_peer, p2p_manager=self, use_ssl=self.use_ssl, settings=self._settings ) self.client_factory = HathorClientFactory( - self.network, self.my_peer, p2p_manager=self, use_ssl=self.use_ssl, settings=self._settings + self.my_peer, p2p_manager=self, use_ssl=self.use_ssl, settings=self._settings ) # Global maximum number of connections. @@ -407,7 +404,7 @@ def on_peer_ready(self, protocol: HathorProtocol) -> None: self.unverified_peer_storage.pop(protocol.peer.id, None) # we emit the event even if it's a duplicate peer as a matching - # NETWORK_PEER_DISCONNECTED will be emmited regardless + # NETWORK_PEER_DISCONNECTED will be emitted regardless self.pubsub.publish( HathorEvents.NETWORK_PEER_READY, protocol=protocol, diff --git a/hathor/p2p/protocol.py b/hathor/p2p/protocol.py index fd3306440..f2bda9cc4 100644 --- a/hathor/p2p/protocol.py +++ b/hathor/p2p/protocol.py @@ -73,7 +73,6 @@ class WarningFlags(str, Enum): NO_PEER_ID_URL = 'no_peer_id_url' NO_ENTRYPOINTS = 'no_entrypoints' - network: str my_peer: PrivatePeer connections: 'ConnectionsManager' node: 'HathorManager' @@ -99,7 +98,6 @@ def peer(self) -> PublicPeer: def __init__( self, - network: str, my_peer: PrivatePeer, p2p_manager: 'ConnectionsManager', *, @@ -108,7 +106,6 @@ def __init__( inbound: bool, ) -> None: self._settings = settings - self.network = network self.my_peer = my_peer self.connections = p2p_manager diff --git a/hathor/p2p/states/hello.py b/hathor/p2p/states/hello.py index b7cb42dce..47c9cf4e5 100644 --- a/hathor/p2p/states/hello.py +++ b/hathor/p2p/states/hello.py @@ -52,7 +52,7 @@ def _get_hello_data(self) -> dict[str, Any]: remote = protocol.transport.getPeer() data = { 'app': self._app(), - 'network': protocol.network, + 'network': self._settings.NETWORK_NAME, 'remote_address': format_address(remote), 'genesis_short_hash': get_genesis_short_hash(), 'timestamp': protocol.node.reactor.seconds(), @@ -135,7 +135,7 @@ def handle_hello(self, payload: str) -> None: # XXX: this used to be a warning, but it shouldn't be since it's perfectly normal self.log.debug('different versions', theirs=remote_app, ours=our_app) - if data['network'] != protocol.network: + if data['network'] != self._settings.NETWORK_NAME: protocol.send_error_and_close_connection('Wrong network.') return diff --git a/hathor/simulator/simulator.py b/hathor/simulator/simulator.py index d5c6bbeed..913d88ea9 100644 --- a/hathor/simulator/simulator.py +++ b/hathor/simulator/simulator.py @@ -55,7 +55,6 @@ def __init__(self, seed: Optional[int] = None): self.seed = seed self.rng = Random(self.seed) self.settings = get_global_settings()._replace(AVG_TIME_BETWEEN_BLOCKS=SIMULATOR_AVG_TIME_BETWEEN_BLOCKS) - self._network = 'testnet' self._clock = MemoryReactorHeapClock() self._peers: OrderedDict[str, HathorManager] = OrderedDict() self._connections: list['FakeConnection'] = [] @@ -80,7 +79,6 @@ def get_default_builder(self) -> Builder: Returns a builder with default configuration, for convenience when using create_peer() or create_artifacts() """ return Builder() \ - .set_network(self._network) \ .set_peer(PrivatePeer.auto_generated()) \ .set_soft_voided_tx_ids(set()) \ .enable_full_verification() \ diff --git a/hathor/transaction/block.py b/hathor/transaction/block.py index 9bdeb6ac9..9f5f5a06d 100644 --- a/hathor/transaction/block.py +++ b/hathor/transaction/block.py @@ -359,6 +359,7 @@ def get_feature_activation_bit_value(self, bit: int) -> int: def iter_transactions_in_this_block(self) -> Iterator[BaseTransaction]: """Return an iterator of the transactions that have this block as meta.first_block.""" from hathor.transaction.storage.traversal import BFSOrderWalk + assert self.storage is not None bfs = BFSOrderWalk(self.storage, is_dag_verifications=True, is_dag_funds=True, is_left_to_right=False) for tx in bfs.run(self, skip_root=True): tx_meta = tx.get_metadata() diff --git a/hathor/transaction/storage/__init__.py b/hathor/transaction/storage/__init__.py index e46ff6035..4fbdd6ae7 100644 --- a/hathor/transaction/storage/__init__.py +++ b/hathor/transaction/storage/__init__.py @@ -15,6 +15,7 @@ from hathor.transaction.storage.cache_storage import TransactionCacheStorage from hathor.transaction.storage.memory_storage import TransactionMemoryStorage from hathor.transaction.storage.transaction_storage import TransactionStorage +from hathor.transaction.storage.vertex_storage_protocol import VertexStorageProtocol try: from hathor.transaction.storage.rocksdb_storage import TransactionRocksDBStorage @@ -26,4 +27,5 @@ 'TransactionMemoryStorage', 'TransactionCacheStorage', 'TransactionRocksDBStorage', + 'VertexStorageProtocol' ] diff --git a/hathor/transaction/storage/traversal.py b/hathor/transaction/storage/traversal.py index d88b47b9d..7900cb8d6 100644 --- a/hathor/transaction/storage/traversal.py +++ b/hathor/transaction/storage/traversal.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations import heapq from abc import ABC, abstractmethod @@ -21,7 +22,7 @@ if TYPE_CHECKING: from hathor.transaction import BaseTransaction # noqa: F401 - from hathor.transaction.storage import TransactionStorage # noqa: F401 + from hathor.transaction.storage import VertexStorageProtocol from hathor.types import VertexId @@ -47,8 +48,14 @@ class GenericWalk(ABC): """ seen: set['VertexId'] - def __init__(self, storage: 'TransactionStorage', *, is_dag_funds: bool = False, - is_dag_verifications: bool = False, is_left_to_right: bool = True): + def __init__( + self, + storage: VertexStorageProtocol, + *, + is_dag_funds: bool = False, + is_dag_verifications: bool = False, + is_left_to_right: bool = True, + ) -> None: """ If `is_left_to_right` is `True`, we walk in the direction of the unverified transactions. Otherwise, we walk in the direction of the genesis. @@ -112,7 +119,7 @@ def add_neighbors(self, tx: 'BaseTransaction') -> None: for _hash in it: if _hash not in self.seen: self.seen.add(_hash) - neighbor = self.storage.get_transaction(_hash) + neighbor = self.storage.get_vertex(_hash) self._push_visit(neighbor) def skip_neighbors(self, tx: 'BaseTransaction') -> None: @@ -155,8 +162,20 @@ class BFSTimestampWalk(GenericWalk): """ _to_visit: list[HeapItem] - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) + def __init__( + self, + storage: VertexStorageProtocol, + *, + is_dag_funds: bool = False, + is_dag_verifications: bool = False, + is_left_to_right: bool = True, + ) -> None: + super().__init__( + storage, + is_dag_funds=is_dag_funds, + is_dag_verifications=is_dag_verifications, + is_left_to_right=is_left_to_right + ) self._to_visit = [] def _is_empty(self) -> bool: @@ -179,8 +198,20 @@ class BFSOrderWalk(GenericWalk): """ _to_visit: deque['BaseTransaction'] - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) + def __init__( + self, + storage: VertexStorageProtocol, + *, + is_dag_funds: bool = False, + is_dag_verifications: bool = False, + is_left_to_right: bool = True, + ) -> None: + super().__init__( + storage, + is_dag_funds=is_dag_funds, + is_dag_verifications=is_dag_verifications, + is_left_to_right=is_left_to_right + ) self._to_visit = deque() def _is_empty(self) -> bool: @@ -198,8 +229,20 @@ class DFSWalk(GenericWalk): """ _to_visit: list['BaseTransaction'] - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) + def __init__( + self, + storage: VertexStorageProtocol, + *, + is_dag_funds: bool = False, + is_dag_verifications: bool = False, + is_left_to_right: bool = True, + ) -> None: + super().__init__( + storage, + is_dag_funds=is_dag_funds, + is_dag_verifications=is_dag_verifications, + is_left_to_right=is_left_to_right + ) self._to_visit = [] def _is_empty(self) -> bool: diff --git a/tests/cli/test_events_simulator.py b/tests/cli/test_events_simulator.py index 83f6049e9..2a4ee941f 100644 --- a/tests/cli/test_events_simulator.py +++ b/tests/cli/test_events_simulator.py @@ -11,10 +11,12 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + from unittest.mock import Mock from hathor.cli.events_simulator.event_forwarding_websocket_factory import EventForwardingWebsocketFactory from hathor.cli.events_simulator.events_simulator import create_parser, execute +from hathor.conf.get_settings import get_global_settings from tests.test_memory_reactor_clock import TestMemoryReactorClock @@ -29,7 +31,7 @@ def test_events_simulator() -> None: factory = EventForwardingWebsocketFactory( simulator=Mock(), peer_id='test_peer_id', - network='test_network', + settings=get_global_settings(), reactor=reactor, event_storage=Mock() ) diff --git a/tests/event/websocket/test_factory.py b/tests/event/websocket/test_factory.py index 6c703a609..24feeab98 100644 --- a/tests/event/websocket/test_factory.py +++ b/tests/event/websocket/test_factory.py @@ -16,6 +16,7 @@ import pytest +from hathor.conf.get_settings import get_global_settings from hathor.event.storage import EventMemoryStorage from hathor.event.websocket.factory import EventWebsocketFactory from hathor.event.websocket.protocol import EventWebsocketProtocol @@ -76,7 +77,7 @@ def test_broadcast_event(can_receive_event: bool) -> None: response = EventResponse( peer_id='my_peer_id', - network='my_network', + network='unittests', event=event, latest_event_id=n_starting_events - 1, stream_id=stream_id @@ -141,7 +142,7 @@ def test_send_next_event_to_connection(next_expected_event_id: int, can_receive_ event = EventMocker.create_event(_id) response = EventResponse( peer_id='my_peer_id', - network='my_network', + network='unittests', event=event, latest_event_id=n_starting_events - 1, stream_id=stream_id @@ -164,7 +165,7 @@ def _get_factory( return EventWebsocketFactory( peer_id='my_peer_id', - network='my_network', + settings=get_global_settings(), reactor=clock, event_storage=event_storage ) diff --git a/tests/others/test_metrics.py b/tests/others/test_metrics.py index 6996f1ef5..bbdede763 100644 --- a/tests/others/test_metrics.py +++ b/tests/others/test_metrics.py @@ -96,7 +96,7 @@ def test_tx_storage_data_collection_with_rocksdb_storage_and_no_cache(self): self.tmpdirs.append(path) def _init_manager(): - builder = self.get_builder('testnet') \ + builder = self.get_builder() \ .use_rocksdb(path, cache_capacity=100) \ .force_memory_index() \ .set_wallet(self._create_test_wallet(unlocked=True)) @@ -148,7 +148,7 @@ def test_tx_storage_data_collection_with_rocksdb_storage_and_cache(self): self.tmpdirs.append(path) def _init_manager(): - builder = self.get_builder('testnet') \ + builder = self.get_builder() \ .use_rocksdb(path, cache_capacity=100) \ .force_memory_index() \ .set_wallet(self._create_test_wallet(unlocked=True)) \ @@ -220,7 +220,6 @@ def test_peer_connections_data_collection(self): def build_hathor_protocol(): protocol = HathorProtocol( - network="testnet", my_peer=my_peer, p2p_manager=manager.connections, use_ssl=False, diff --git a/tests/p2p/test_bootstrap.py b/tests/p2p/test_bootstrap.py index 344194549..3c3d9fa8c 100644 --- a/tests/p2p/test_bootstrap.py +++ b/tests/p2p/test_bootstrap.py @@ -49,7 +49,7 @@ class BootstrapTestCase(unittest.TestCase): def test_mock_discovery(self) -> None: pubsub = PubSubManager(self.clock) peer = PrivatePeer.auto_generated() - connections = ConnectionsManager(self._settings, self.clock, 'testnet', peer, pubsub, True, self.rng, True) + connections = ConnectionsManager(self._settings, self.clock, peer, pubsub, True, self.rng, True) host_ports1 = [ ('foobar', 1234), ('127.0.0.99', 9999), @@ -73,7 +73,7 @@ def test_mock_discovery(self) -> None: def test_dns_discovery(self) -> None: pubsub = PubSubManager(self.clock) peer = PrivatePeer.auto_generated() - connections = ConnectionsManager(self._settings, self.clock, 'testnet', peer, pubsub, True, self.rng, True) + connections = ConnectionsManager(self._settings, self.clock, peer, pubsub, True, self.rng, True) bootstrap_a = [ '127.0.0.99', '127.0.0.88', diff --git a/tests/poa/test_poa_verification.py b/tests/poa/test_poa_verification.py index 062a0c599..e82512878 100644 --- a/tests/poa/test_poa_verification.py +++ b/tests/poa/test_poa_verification.py @@ -47,7 +47,7 @@ def setUp(self) -> None: ), ) - builder = self.get_builder('network').set_settings(settings) + builder = self.get_builder().set_settings(settings) self.manager = self.create_peer_from_builder(builder) self.verifiers = self.manager.verification_service.verifiers diff --git a/tests/tx/test_cache_storage.py b/tests/tx/test_cache_storage.py index d2698d84e..bf6e9670a 100644 --- a/tests/tx/test_cache_storage.py +++ b/tests/tx/test_cache_storage.py @@ -14,7 +14,7 @@ class BaseCacheStorageTest(unittest.TestCase): def setUp(self): super().setUp() - builder = self.get_builder('testnet') \ + builder = self.get_builder() \ .use_memory() \ .use_tx_storage_cache(capacity=5) \ .set_wallet(self._create_test_wallet(unlocked=True)) diff --git a/tests/unittest.py b/tests/unittest.py index 4b932eb52..afb11c1b0 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -85,7 +85,6 @@ class TestBuilder(Builder): def __init__(self, settings: HathorSettings | None = None) -> None: super().__init__() - self.set_network('testnet') # default builder has sync-v2 enabled for tests self.enable_sync_v2() self.set_settings(settings or get_global_settings()) @@ -171,11 +170,10 @@ def _create_test_wallet(self, unlocked: bool = False) -> Wallet: wallet.lock() return wallet - def get_builder(self, network: str) -> TestBuilder: + def get_builder(self) -> TestBuilder: builder = TestBuilder() builder.set_rng(self.rng) \ - .set_reactor(self.clock) \ - .set_network(network) + .set_reactor(self.clock) return builder def create_peer_from_builder(self, builder: Builder, start_manager: bool = True) -> HathorManager: @@ -218,9 +216,10 @@ def create_peer( # type: ignore[no-untyped-def] ): # TODO: Add -> HathorManager here. It breaks the lint in a lot of places. enable_sync_v1, enable_sync_v2 = self._syncVersionFlags(enable_sync_v1, enable_sync_v2) - builder = self.get_builder(network) \ + settings = self._settings._replace(NETWORK_NAME=network) + builder = self.get_builder() \ .set_full_verification(full_verification) \ - .set_settings(self._settings) + .set_settings(settings) if checkpoints is not None: builder.set_checkpoints(checkpoints) From 349555f6386c690fb4ea81e21c2587a6ae672039 Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Wed, 16 Oct 2024 13:39:05 -0300 Subject: [PATCH 36/61] fix(benchmark): fix benchmark CI after bencher update (#1157) --- .github/workflows/pr_benchmarks.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pr_benchmarks.yml b/.github/workflows/pr_benchmarks.yml index 3e9c3cb6d..a8d9e9dc0 100644 --- a/.github/workflows/pr_benchmarks.yml +++ b/.github/workflows/pr_benchmarks.yml @@ -58,8 +58,9 @@ jobs: --project hathor-core \ --token '${{ secrets.BENCHER_API_TOKEN }}' \ --branch '${{ github.head_ref }}' \ - --branch-start-point '${{ github.base_ref }}' \ - --branch-start-point-hash '${{ github.event.pull_request.base.sha }}' \ + --start-point '${{ github.base_ref }}' \ + --start-point-hash '${{ github.event.pull_request.base.sha }}' \ + --start-point-clone-thresholds \ --testbed ubuntu-22.04 \ --adapter shell_hyperfine \ --err \ From 026cb7ba44d2081db7c6fae28c220a9f0f6bf58b Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Thu, 17 Oct 2024 14:56:17 -0300 Subject: [PATCH 37/61] refactor(settings): change wallets and reward lock to use injected settings (#1148) --- hathor/builder/builder.py | 2 +- hathor/conf/settings.py | 4 +++- hathor/manager.py | 2 +- hathor/reward_lock/reward_lock.py | 17 +++++++------ hathor/simulator/simulator.py | 2 +- .../storage/transaction_storage.py | 2 +- hathor/verification/transaction_verifier.py | 5 ++-- hathor/wallet/base_wallet.py | 24 ++++++++++++------- hathor/wallet/hd_wallet.py | 6 +++-- hathor/wallet/wallet.py | 6 +++-- 10 files changed, 43 insertions(+), 27 deletions(-) diff --git a/hathor/builder/builder.py b/hathor/builder/builder.py index 46aaad22f..228791355 100644 --- a/hathor/builder/builder.py +++ b/hathor/builder/builder.py @@ -676,7 +676,7 @@ def _get_or_create_wallet(self) -> Optional[BaseWallet]: if self._wallet_directory is None: return None - self._wallet = Wallet(directory=self._wallet_directory) + self._wallet = Wallet(directory=self._wallet_directory, settings=self._get_or_create_settings()) if self._wallet_unlock is not None: self._wallet.unlock(self._wallet_unlock) return self._wallet diff --git a/hathor/conf/settings.py b/hathor/conf/settings.py index d6509b4b6..db235f2b7 100644 --- a/hathor/conf/settings.py +++ b/hathor/conf/settings.py @@ -30,6 +30,8 @@ GENESIS_TOKEN_UNITS = 1 * (10**9) # 1B GENESIS_TOKENS = GENESIS_TOKEN_UNITS * (10**DECIMAL_PLACES) # 100B +HATHOR_TOKEN_UID = b'\x00' + class HathorSettings(NamedTuple): # Version byte of the address in P2PKH @@ -125,7 +127,7 @@ def GENESIS_TX2_TIMESTAMP(self) -> int: MIN_TX_WEIGHT: int = 14 MIN_SHARE_WEIGHT: int = 21 - HATHOR_TOKEN_UID: bytes = b'\x00' + HATHOR_TOKEN_UID: bytes = HATHOR_TOKEN_UID # Maximum distance between two consecutive blocks (in seconds), except for genesis. # This prevent some DoS attacks exploiting the calculation of the score of a side chain. diff --git a/hathor/manager.py b/hathor/manager.py index bf1e8fba9..a385fb3d8 100644 --- a/hathor/manager.py +++ b/hathor/manager.py @@ -934,7 +934,7 @@ def push_tx(self, tx: Transaction, allow_non_standard_script: bool = False, if is_spending_voided_tx: raise SpendingVoidedError('Invalid transaction. At least one input is voided.') - if is_spent_reward_locked(tx): + if is_spent_reward_locked(self._settings, tx): raise RewardLockedError('Spent reward is locked.') # We are using here the method from lib because the property diff --git a/hathor/reward_lock/reward_lock.py b/hathor/reward_lock/reward_lock.py index 446afb855..85b6871e8 100644 --- a/hathor/reward_lock/reward_lock.py +++ b/hathor/reward_lock/reward_lock.py @@ -14,7 +14,7 @@ from typing import TYPE_CHECKING, Iterator, Optional -from hathor.conf.get_settings import get_global_settings +from hathor.conf.settings import HathorSettings from hathor.transaction import Block from hathor.util import not_none @@ -32,19 +32,23 @@ def iter_spent_rewards(tx: 'Transaction', storage: 'VertexStorageProtocol') -> I yield spent_tx -def is_spent_reward_locked(tx: 'Transaction') -> bool: +def is_spent_reward_locked(settings: HathorSettings, tx: 'Transaction') -> bool: """ Check whether any spent reward is currently locked, considering only the block rewards spent by this tx itself, and not the inherited `min_height`""" - return get_spent_reward_locked_info(tx, not_none(tx.storage)) is not None + return get_spent_reward_locked_info(settings, tx, not_none(tx.storage)) is not None -def get_spent_reward_locked_info(tx: 'Transaction', storage: 'VertexStorageProtocol') -> Optional['RewardLockedInfo']: +def get_spent_reward_locked_info( + settings: HathorSettings, + tx: 'Transaction', + storage: 'VertexStorageProtocol', +) -> Optional['RewardLockedInfo']: """Check if any input block reward is locked, returning the locked information if any, or None if they are all unlocked.""" from hathor.transaction.transaction import RewardLockedInfo best_height = get_minimum_best_height(storage) for blk in iter_spent_rewards(tx, storage): - needed_height = _spent_reward_needed_height(blk, best_height) + needed_height = _spent_reward_needed_height(settings, blk, best_height) if needed_height > 0: return RewardLockedInfo(blk.hash, needed_height) return None @@ -65,10 +69,9 @@ def get_minimum_best_height(storage: 'VertexStorageProtocol') -> int: return best_height -def _spent_reward_needed_height(block: Block, best_height: int) -> int: +def _spent_reward_needed_height(settings: HathorSettings, block: Block, best_height: int) -> int: """ Returns height still needed to unlock this `block` reward: 0 means it's unlocked.""" spent_height = block.get_height() spend_blocks = best_height - spent_height - settings = get_global_settings() needed_height = settings.REWARD_SPEND_MIN_BLOCKS - spend_blocks return max(needed_height, 0) diff --git a/hathor/simulator/simulator.py b/hathor/simulator/simulator.py index 913d88ea9..a31862909 100644 --- a/hathor/simulator/simulator.py +++ b/hathor/simulator/simulator.py @@ -103,7 +103,7 @@ def create_artifacts(self, builder: Optional[Builder] = None) -> BuildArtifacts: assert self._started, 'Simulator is not started.' builder = builder or self.get_default_builder() - wallet = HDWallet(gap_limit=2) + wallet = HDWallet(gap_limit=2, settings=self.settings) wallet._manually_initialize() cpu_mining_service = SimulatorCpuMiningService() diff --git a/hathor/transaction/storage/transaction_storage.py b/hathor/transaction/storage/transaction_storage.py index ec1492939..aa367fa8c 100644 --- a/hathor/transaction/storage/transaction_storage.py +++ b/hathor/transaction/storage/transaction_storage.py @@ -1063,7 +1063,7 @@ def compute_transactions_that_became_invalid(self, new_best_height: int) -> list for tx in self.iter_mempool_from_best_index(): try: TransactionVerifier.verify_reward_locked_for_height( - tx, new_best_height, assert_min_height_verification=False + self._settings, tx, new_best_height, assert_min_height_verification=False ) except RewardLocked: tx.set_validation(ValidationState.INVALID) diff --git a/hathor/verification/transaction_verifier.py b/hathor/verification/transaction_verifier.py index 6efd28fc3..906df38c2 100644 --- a/hathor/verification/transaction_verifier.py +++ b/hathor/verification/transaction_verifier.py @@ -147,10 +147,11 @@ def verify_reward_locked(self, tx: Transaction) -> None: the block rewards spent by this tx itself, and the inherited `min_height`.""" assert tx.storage is not None best_height = get_minimum_best_height(tx.storage) - self.verify_reward_locked_for_height(tx, best_height) + self.verify_reward_locked_for_height(self._settings, tx, best_height) @staticmethod def verify_reward_locked_for_height( + settings: HathorSettings, tx: Transaction, best_height: int, *, @@ -174,7 +175,7 @@ def verify_reward_locked_for_height( and a normal `RewardLocked` exception is raised instead. """ assert tx.storage is not None - info = get_spent_reward_locked_info(tx, tx.storage) + info = get_spent_reward_locked_info(settings, tx, tx.storage) if info is not None: raise RewardLocked(f'Reward {info.block_hash.hex()} still needs {info.blocks_needed} to be unlocked.') diff --git a/hathor/wallet/base_wallet.py b/hathor/wallet/base_wallet.py index 000c8a100..f83ab4fbc 100644 --- a/hathor/wallet/base_wallet.py +++ b/hathor/wallet/base_wallet.py @@ -24,7 +24,8 @@ from structlog import get_logger from twisted.internet.interfaces import IDelayedCall -from hathor.conf import HathorSettings +from hathor.conf.get_settings import get_global_settings +from hathor.conf.settings import HATHOR_TOKEN_UID, HathorSettings from hathor.crypto.util import decode_address from hathor.pubsub import EventArguments, HathorEvents, PubSubManager from hathor.reactor import ReactorProtocol as Reactor, get_global_reactor @@ -36,7 +37,6 @@ from hathor.types import AddressB58, Amount, TokenUid from hathor.wallet.exceptions import InputDuplicated, InsufficientFunds, PrivateKeyNotFound -settings = HathorSettings() logger = get_logger() # check interval for maybe_spent_txs @@ -55,7 +55,7 @@ class WalletOutputInfo(NamedTuple): address: bytes value: int timelock: Optional[int] - token_uid: str = settings.HATHOR_TOKEN_UID.hex() + token_uid: str = HATHOR_TOKEN_UID.hex() class WalletBalance(NamedTuple): @@ -79,8 +79,13 @@ class WalletType(Enum): # Normal key pair wallet KEY_PAIR = 'keypair' - def __init__(self, directory: str = './', pubsub: Optional[PubSubManager] = None, - reactor: Optional[Reactor] = None) -> None: + def __init__( + self, + directory: str = './', + pubsub: Optional[PubSubManager] = None, + reactor: Optional[Reactor] = None, + settings: HathorSettings | None = None, + ) -> None: """ A wallet will hold the unspent and spent transactions All files will be stored in the same directory, and it should @@ -96,6 +101,7 @@ def __init__(self, directory: str = './', pubsub: Optional[PubSubManager] = None :type reactor: :py:class:`twisted.internet.Reactor` """ self.log = logger.new() + self.settings = settings or get_global_settings() # dict[token_id, dict[tuple[tx_id, index], UnspentTx]] self.unspent_txs: defaultdict[bytes, dict[tuple[bytes, int], UnspentTx]] = defaultdict(dict) @@ -228,7 +234,7 @@ def prepare_transaction(self, cls: ABCMeta, inputs: list[WalletInputInfo], tokens = [] # list[bytes] = list[token_uid] for txout in outputs: token_uid = bytes.fromhex(txout.token_uid) - if token_uid == settings.HATHOR_TOKEN_UID: + if token_uid == HATHOR_TOKEN_UID: token_index = 0 elif token_uid in token_dict: token_index = token_dict[token_uid] @@ -436,7 +442,7 @@ def sign_transaction(self, tx: Transaction, tx_storage: 'TransactionStorage') -> _input.data = P2PKH.create_input_data(public_key_bytes, signature) def handle_change_tx(self, sum_inputs: int, sum_outputs: int, - token_uid: bytes = settings.HATHOR_TOKEN_UID) -> Optional[WalletOutputInfo]: + token_uid: bytes = HATHOR_TOKEN_UID) -> Optional[WalletOutputInfo]: """Creates an output transaction with the change value :param sum_inputs: Sum of the input amounts @@ -462,7 +468,7 @@ def handle_change_tx(self, sum_inputs: int, sum_outputs: int, def get_inputs_from_amount( self, amount: int, tx_storage: 'TransactionStorage', - token_uid: bytes = settings.HATHOR_TOKEN_UID, max_ts: Optional[int] = None + token_uid: bytes = HATHOR_TOKEN_UID, max_ts: Optional[int] = None ) -> tuple[list[WalletInputInfo], int]: """Creates inputs from our pool of unspent tx given a value @@ -514,7 +520,7 @@ def can_spend_block(self, tx_storage: 'TransactionStorage', tx_id: bytes) -> boo tx = tx_storage.get_transaction(tx_id) if tx.is_block: assert isinstance(tx, Block) - if tx_storage.get_height_best_block() - tx.get_height() < settings.REWARD_SPEND_MIN_BLOCKS: + if tx_storage.get_height_best_block() - tx.get_height() < self.settings.REWARD_SPEND_MIN_BLOCKS: return False return True diff --git a/hathor/wallet/hd_wallet.py b/hathor/wallet/hd_wallet.py index 3c64d0c1f..c93545a2e 100644 --- a/hathor/wallet/hd_wallet.py +++ b/hathor/wallet/hd_wallet.py @@ -17,6 +17,7 @@ from mnemonic import Mnemonic +from hathor.conf.settings import HathorSettings from hathor.pubsub import HathorEvents from hathor.wallet import BaseWallet from hathor.wallet.exceptions import InvalidWords @@ -58,7 +59,8 @@ class HDWallet(BaseWallet): def __init__(self, *, words: Optional[Any] = None, language: str = 'english', passphrase: bytes = b'', gap_limit: int = 20, word_count: int = 24, directory: str = './', pubsub: Optional[Any] = None, - reactor: Optional[Any] = None, initial_key_generation: Optional[Any] = None) -> None: + reactor: Optional[Any] = None, initial_key_generation: Optional[Any] = None, + settings: HathorSettings | None = None) -> None: """ :param words: words to generate the seed. It's a string with the words separated by a single space. If None we generate new words when starting the wallet @@ -84,7 +86,7 @@ def __init__(self, *, words: Optional[Any] = None, language: str = 'english', pa :raises ValueError: Raised on invalid word_count """ - super().__init__(directory=directory, pubsub=pubsub, reactor=reactor) + super().__init__(directory=directory, pubsub=pubsub, reactor=reactor, settings=settings) # dict[string(base58), BIP32Key] self.keys: dict[str, Any] = {} diff --git a/hathor/wallet/wallet.py b/hathor/wallet/wallet.py index acc87cd17..2d52d3907 100644 --- a/hathor/wallet/wallet.py +++ b/hathor/wallet/wallet.py @@ -21,6 +21,7 @@ from cryptography.hazmat.primitives.asymmetric import ec from twisted.internet.interfaces import IDelayedCall +from hathor.conf.settings import HathorSettings from hathor.crypto.util import get_public_key_bytes_compressed from hathor.pubsub import HathorEvents from hathor.wallet import BaseWallet @@ -30,7 +31,8 @@ class Wallet(BaseWallet): def __init__(self, keys: Optional[Any] = None, directory: str = './', filename: str = 'keys.json', - pubsub: Optional[Any] = None, reactor: Optional[Any] = None) -> None: + pubsub: Optional[Any] = None, reactor: Optional[Any] = None, settings: HathorSettings | None = None + ) -> None: """ A wallet will hold key pair objects and the unspent and spent transactions associated with the keys. @@ -49,7 +51,7 @@ def __init__(self, keys: Optional[Any] = None, directory: str = './', filename: :param pubsub: If not given, a new one is created. :type pubsub: :py:class:`hathor.pubsub.PubSubManager` """ - super().__init__(directory=directory, pubsub=pubsub, reactor=reactor) + super().__init__(directory=directory, pubsub=pubsub, reactor=reactor, settings=settings) self.filepath = os.path.join(directory, filename) self.keys: dict[str, Any] = keys or {} # dict[string(b58_address), KeyPair] From d569b9a2faa2c4055b160903c7e048b9ce4b1b60 Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Fri, 18 Oct 2024 16:14:25 -0300 Subject: [PATCH 38/61] refactor: move can_validate_full method (#1153) --- hathor/manager.py | 2 +- hathor/p2p/sync_v2/agent.py | 2 +- .../sync_v2/blockchain_streaming_client.py | 2 +- hathor/transaction/base_transaction.py | 34 ------------------- .../storage/transaction_storage.py | 23 ++++++++++++- 5 files changed, 25 insertions(+), 38 deletions(-) diff --git a/hathor/manager.py b/hathor/manager.py index a385fb3d8..0b6bd94b1 100644 --- a/hathor/manager.py +++ b/hathor/manager.py @@ -483,7 +483,7 @@ def _initialize_components_full_verification(self) -> None: # TODO: deal with invalid tx tx._update_parents_children_metadata() - if tx.can_validate_full(): + if self.tx_storage.can_validate_full(tx): tx.update_initial_metadata() if tx.is_genesis: assert tx.validate_checkpoint(self.checkpoints) diff --git a/hathor/p2p/sync_v2/agent.py b/hathor/p2p/sync_v2/agent.py index 7046f755c..b27ced303 100644 --- a/hathor/p2p/sync_v2/agent.py +++ b/hathor/p2p/sync_v2/agent.py @@ -1171,7 +1171,7 @@ def handle_data(self, payload: str) -> None: else: # If we have not requested the data, it is a new transaction being propagated # in the network, thus, we propagate it as well. - if tx.can_validate_full(): + if self.tx_storage.can_validate_full(tx): self.log.debug('tx received in real time from peer', tx=tx.hash_hex, peer=self.protocol.get_peer_id()) try: self.vertex_handler.on_new_vertex(tx, propagate_to_peers=True, fails_silently=False) diff --git a/hathor/p2p/sync_v2/blockchain_streaming_client.py b/hathor/p2p/sync_v2/blockchain_streaming_client.py index f00395d79..6f0a3f236 100644 --- a/hathor/p2p/sync_v2/blockchain_streaming_client.py +++ b/hathor/p2p/sync_v2/blockchain_streaming_client.py @@ -130,7 +130,7 @@ def handle_blocks(self, blk: Block) -> None: else: self.log.debug('block received', blk_id=blk.hash.hex()) - if blk.can_validate_full(): + if self.tx_storage.can_validate_full(blk): try: self.vertex_handler.on_new_vertex(blk, propagate_to_peers=False, fails_silently=False) except HathorError: diff --git a/hathor/transaction/base_transaction.py b/hathor/transaction/base_transaction.py index db6fce85b..c85e40618 100644 --- a/hathor/transaction/base_transaction.py +++ b/hathor/transaction/base_transaction.py @@ -461,29 +461,6 @@ def add_address_from_output(output: 'TxOutput') -> None: return addresses - def can_validate_full(self) -> bool: - """ Check if this transaction is ready to be fully validated, either all deps are full-valid or one is invalid. - """ - assert self.storage is not None - assert self._hash is not None - if self.is_genesis: - return True - deps = self.get_all_dependencies() - all_exist = True - all_valid = True - # either they all exist and are fully valid - for dep in deps: - meta = self.storage.get_metadata(dep) - if meta is None: - all_exist = False - continue - if not meta.validation.is_fully_connected(): - all_valid = False - if meta.validation.is_invalid(): - # or any of them is invalid (which would make this one invalid too) - return True - return all_exist and all_valid - def set_validation(self, validation: ValidationState) -> None: """ This method will set the internal validation state AND the appropriate voided_by marker. @@ -850,17 +827,6 @@ def clone(self, *, include_metadata: bool = True, include_storage: bool = True) def get_token_uid(self, index: int) -> TokenUid: raise NotImplementedError - def is_ready_for_validation(self) -> bool: - """Check whether the transaction is ready to be validated: all dependencies exist and are fully connected.""" - assert self.storage is not None - for dep_hash in self.get_all_dependencies(): - dep_meta = self.storage.get_metadata(dep_hash) - if dep_meta is None: - return False - if not dep_meta.validation.is_fully_connected(): - return False - return True - @property def static_metadata(self) -> StaticMetadataT: """Get this vertex's static metadata. Assumes it has been initialized.""" diff --git a/hathor/transaction/storage/transaction_storage.py b/hathor/transaction/storage/transaction_storage.py index aa367fa8c..f91e3b795 100644 --- a/hathor/transaction/storage/transaction_storage.py +++ b/hathor/transaction/storage/transaction_storage.py @@ -30,7 +30,7 @@ from hathor.indexes.height_index import HeightInfo from hathor.profiler import get_cpu_profiler from hathor.pubsub import PubSubManager -from hathor.transaction.base_transaction import BaseTransaction, TxOutput +from hathor.transaction.base_transaction import BaseTransaction, TxOutput, Vertex from hathor.transaction.block import Block from hathor.transaction.exceptions import RewardLocked from hathor.transaction.storage.exceptions import ( @@ -1133,6 +1133,27 @@ def migrate_static_metadata(self, log: BoundLogger) -> None: """ raise NotImplementedError + def can_validate_full(self, vertex: Vertex) -> bool: + """ Check if a vertex is ready to be fully validated, either all deps are full-valid or one is invalid. + """ + if vertex.is_genesis: + return True + deps = vertex.get_all_dependencies() + all_exist = True + all_valid = True + # either they all exist and are fully valid + for dep in deps: + meta = self.get_metadata(dep) + if meta is None: + all_exist = False + continue + if not meta.validation.is_fully_connected(): + all_valid = False + if meta.validation.is_invalid(): + # or any of them is invalid (which would make this one invalid too) + return True + return all_exist and all_valid + class BaseTransactionStorage(TransactionStorage): indexes: Optional[IndexesManager] From 26a36c0e687aefb0424c0c39118a9270ce57c58e Mon Sep 17 00:00:00 2001 From: Marcelo Salhab Brogliato Date: Wed, 26 Apr 2023 17:31:59 -0500 Subject: [PATCH 39/61] feat(consensus): Change meta.score and meta.accumulated_weight to int type Co-authored-by: Jan Segre --- hathor/consensus/block_consensus.py | 45 +- hathor/consensus/transaction_consensus.py | 9 +- hathor/event/model/event_data.py | 2 + hathor/manager.py | 12 +- hathor/mining/block_template.py | 2 +- hathor/transaction/base_transaction.py | 17 +- hathor/transaction/resources/decode_tx.py | 4 +- hathor/transaction/resources/transaction.py | 4 +- .../resources/transaction_confirmation.py | 2 +- .../change_score_acc_weight_metadata.py | 33 ++ .../storage/transaction_storage.py | 6 +- hathor/transaction/transaction_metadata.py | 29 +- hathor/util.py | 6 - hathor/utils/weight.py | 37 ++ hathor/wallet/resources/send_tokens.py | 3 +- .../resources/thin_wallet/send_tokens.py | 3 +- tests/event/test_base_event.py | 2 + .../event/test_event_simulation_scenarios.py | 426 +++++++++--------- tests/event/test_tx_metadata.py | 18 +- tests/event/websocket/test_protocol.py | 3 +- tests/resources/transaction/test_mining.py | 8 +- .../test_transaction_confirmation.py | 5 +- tests/tx/test_accumulated_weight.py | 6 +- tests/tx/test_blockchain.py | 50 +- tests/tx/test_indexes.py | 2 +- tests/tx/test_mining.py | 7 +- tests/tx/test_tx_storage.py | 4 + tests/utils.py | 6 +- 28 files changed, 429 insertions(+), 322 deletions(-) create mode 100644 hathor/transaction/storage/migrations/change_score_acc_weight_metadata.py create mode 100644 hathor/utils/weight.py diff --git a/hathor/consensus/block_consensus.py b/hathor/consensus/block_consensus.py index e7a0186cb..419a66268 100644 --- a/hathor/consensus/block_consensus.py +++ b/hathor/consensus/block_consensus.py @@ -18,8 +18,9 @@ from structlog import get_logger from hathor.conf.get_settings import get_global_settings -from hathor.transaction import BaseTransaction, Block, Transaction, sum_weights +from hathor.transaction import BaseTransaction, Block, Transaction from hathor.util import classproperty +from hathor.utils.weight import weight_to_work if TYPE_CHECKING: from hathor.consensus.context import ConsensusAlgorithmContext @@ -117,7 +118,7 @@ def update_voided_info(self, block: Block) -> None: for h in voided_by: tx = storage.get_transaction(h) tx_meta = tx.get_metadata() - tx_meta.accumulated_weight = sum_weights(tx_meta.accumulated_weight, block.weight) + tx_meta.accumulated_weight += weight_to_work(block.weight) self.context.save(tx) # Check conflicts of the transactions voiding us. @@ -156,27 +157,27 @@ def update_voided_info(self, block: Block) -> None: is_connected_to_the_best_chain=is_connected_to_the_best_chain) # First, void this block. + # We need to void this block first, because otherwise it would always be one of the heads. self.mark_as_voided(block, skip_remove_first_block_markers=True) # Get the score of the best chains. - # We need to void this block first, because otherwise it would always be one of the heads. heads = [cast(Block, storage.get_transaction(h)) for h in storage.get_best_block_tips()] - best_score = None + best_score: int | None = None for head in heads: head_meta = head.get_metadata(force_reload=True) if best_score is None: best_score = head_meta.score else: # All heads must have the same score. - assert abs(best_score - head_meta.score) < 1e-10 - assert isinstance(best_score, (int, float)) + assert best_score == head_meta.score + assert best_score is not None # Calculate the score. # We cannot calculate score before getting the heads. score = self.calculate_score(block) # Finally, check who the winner is. - if score <= best_score - self._settings.WEIGHT_TOL: + if score < best_score: # Just update voided_by from parents. self.update_voided_by_from_parents(block) @@ -197,7 +198,7 @@ def update_voided_info(self, block: Block) -> None: common_block = self._find_first_parent_in_best_chain(block) self.add_voided_by_to_multiple_chains(block, heads, common_block) - if score >= best_score + self._settings.WEIGHT_TOL: + if score > best_score: # We have a new winner candidate. self.update_score_and_mark_as_the_best_chain_if_possible(block) # As `update_score_and_mark_as_the_best_chain_if_possible` may affect `voided_by`, @@ -285,28 +286,30 @@ def update_score_and_mark_as_the_best_chain_if_possible(self, block: Block) -> N self.update_score_and_mark_as_the_best_chain(block) self.remove_voided_by_from_chain(block) + best_score: int if self.update_voided_by_from_parents(block): storage = block.storage heads = [cast(Block, storage.get_transaction(h)) for h in storage.get_best_block_tips()] - best_score = 0.0 + best_score = 0 best_heads: list[Block] for head in heads: head_meta = head.get_metadata(force_reload=True) - if head_meta.score <= best_score - self._settings.WEIGHT_TOL: + if head_meta.score < best_score: continue - if head_meta.score >= best_score + self._settings.WEIGHT_TOL: + if head_meta.score > best_score: best_heads = [head] best_score = head_meta.score else: - assert abs(best_score - head_meta.score) < 1e-10 + assert best_score == head_meta.score best_heads.append(head) - assert isinstance(best_score, (int, float)) and best_score > 0 + assert isinstance(best_score, int) and best_score > 0 assert len(best_heads) > 0 first_block = self._find_first_parent_in_best_chain(best_heads[0]) self.add_voided_by_to_multiple_chains(best_heads[0], [block], first_block) if len(best_heads) == 1: + assert best_heads[0].hash != block.hash self.update_score_and_mark_as_the_best_chain_if_possible(best_heads[0]) def update_score_and_mark_as_the_best_chain(self, block: Block) -> None: @@ -444,7 +447,7 @@ def remove_first_block_markers(self, block: Block) -> None: self.context.save(tx) def _score_block_dfs(self, block: BaseTransaction, used: set[bytes], - mark_as_best_chain: bool, newest_timestamp: int) -> float: + mark_as_best_chain: bool, newest_timestamp: int) -> int: """ Internal method to run a DFS. It is used by `calculate_score()`. """ assert block.storage is not None @@ -453,7 +456,7 @@ def _score_block_dfs(self, block: BaseTransaction, used: set[bytes], storage = block.storage from hathor.transaction import Block - score = block.weight + score = weight_to_work(block.weight) for parent in block.get_parents(): if parent.is_block: assert isinstance(parent, Block) @@ -462,7 +465,7 @@ def _score_block_dfs(self, block: BaseTransaction, used: set[bytes], x = meta.score else: x = self._score_block_dfs(parent, used, mark_as_best_chain, newest_timestamp) - score = sum_weights(score, x) + score += x else: from hathor.transaction.storage.traversal import BFSTimestampWalk @@ -487,7 +490,7 @@ def _score_block_dfs(self, block: BaseTransaction, used: set[bytes], meta.first_block = block.hash self.context.save(tx) - score = sum_weights(score, tx.weight) + score += weight_to_work(tx.weight) # Always save the score when it is calculated. meta = block.get_metadata() @@ -499,12 +502,12 @@ def _score_block_dfs(self, block: BaseTransaction, used: set[bytes], # Thus, if we have already calculated it, we just check the consistency of the calculation. # Unfortunately we may have to calculate it more than once when a new block arrives in a side # side because the `first_block` points only to the best chain. - assert abs(meta.score - score) < 1e-10, \ + assert meta.score == score, \ 'hash={} meta.score={} score={}'.format(block.hash.hex(), meta.score, score) return score - def calculate_score(self, block: Block, *, mark_as_best_chain: bool = False) -> float: + def calculate_score(self, block: Block, *, mark_as_best_chain: bool = False) -> int: """ Calculate block's score, which is the accumulated work of the verified transactions and blocks. :param: mark_as_best_chain: If `True`, the transactions' will point `meta.first_block` to @@ -514,9 +517,9 @@ def calculate_score(self, block: Block, *, mark_as_best_chain: bool = False) -> if block.is_genesis: if mark_as_best_chain: meta = block.get_metadata() - meta.score = block.weight + meta.score = weight_to_work(block.weight) self.context.save(block) - return block.weight + return weight_to_work(block.weight) parent = self._find_first_parent_in_best_chain(block) newest_timestamp = parent.timestamp diff --git a/hathor/consensus/transaction_consensus.py b/hathor/consensus/transaction_consensus.py index 311ebd9d7..12d55b270 100644 --- a/hathor/consensus/transaction_consensus.py +++ b/hathor/consensus/transaction_consensus.py @@ -17,8 +17,9 @@ from structlog import get_logger from hathor.conf.get_settings import get_global_settings -from hathor.transaction import BaseTransaction, Block, Transaction, TxInput, sum_weights +from hathor.transaction import BaseTransaction, Block, Transaction, TxInput from hathor.util import classproperty +from hathor.utils.weight import weight_to_work if TYPE_CHECKING: from hathor.consensus.context import ConsensusAlgorithmContext @@ -193,13 +194,13 @@ def update_voided_info(self, tx: Transaction) -> None: continue tx2 = tx.storage.get_transaction(h) tx2_meta = tx2.get_metadata() - tx2_meta.accumulated_weight = sum_weights(tx2_meta.accumulated_weight, tx.weight) + tx2_meta.accumulated_weight += weight_to_work(tx.weight) self.context.save(tx2) # Then, we add ourselves. meta = tx.get_metadata() assert not meta.voided_by or meta.voided_by == {tx.hash} - assert meta.accumulated_weight == tx.weight + assert meta.accumulated_weight == weight_to_work(tx.weight) if tx.hash in self.context.consensus.soft_voided_tx_ids: voided_by.add(self._settings.SOFT_VOIDED_ID) voided_by.add(tx.hash) @@ -298,7 +299,7 @@ def check_conflicts(self, tx: Transaction) -> None: candidate.update_accumulated_weight(stop_value=meta.accumulated_weight) tx_meta = candidate.get_metadata() d = tx_meta.accumulated_weight - meta.accumulated_weight - if abs(d) < self._settings.WEIGHT_TOL: + if d == 0: tie_list.append(candidate) elif d > 0: is_highest = False diff --git a/hathor/event/model/event_data.py b/hathor/event/model/event_data.py index a24ceca1c..1903ef74e 100644 --- a/hathor/event/model/event_data.py +++ b/hathor/event/model/event_data.py @@ -55,6 +55,8 @@ class TxMetadata(BaseModel, extra=Extra.ignore): twins: list[str] accumulated_weight: float score: float + accumulated_weight_raw: str + score_raw: str first_block: Optional[str] height: int validation: str diff --git a/hathor/manager.py b/hathor/manager.py index 0b6bd94b1..e7ee57592 100644 --- a/hathor/manager.py +++ b/hathor/manager.py @@ -53,14 +53,15 @@ from hathor.reactor import ReactorProtocol as Reactor from hathor.reward_lock import is_spent_reward_locked from hathor.stratum import StratumFactory -from hathor.transaction import BaseTransaction, Block, MergeMinedBlock, Transaction, TxVersion, sum_weights +from hathor.transaction import BaseTransaction, Block, MergeMinedBlock, Transaction, TxVersion from hathor.transaction.exceptions import TxValidationError from hathor.transaction.storage.exceptions import TransactionDoesNotExist from hathor.transaction.storage.transaction_storage import TransactionStorage from hathor.transaction.storage.tx_allow_scope import TxAllowScope from hathor.transaction.vertex_parser import VertexParser from hathor.types import Address, VertexId -from hathor.util import EnvironmentInfo, LogDuration, Random, calculate_min_significant_weight +from hathor.util import EnvironmentInfo, LogDuration, Random +from hathor.utils.weight import calculate_min_significant_weight, weight_to_work from hathor.verification.verification_service import VerificationService from hathor.vertex_handler import VertexHandler from hathor.wallet import BaseWallet @@ -835,8 +836,8 @@ def _make_block_template(self, parent_block: Block, parent_txs: 'ParentTxs', cur timestamp = min(max(current_timestamp, timestamp_min), timestamp_max) parent_block_metadata = parent_block.get_metadata() # this is the min weight to cause an increase of twice the WEIGHT_TOL, we make sure to generate a template with - # at least this weight (note that the user of the API can set its own weight, the block sumit API will also - # protect agains a weight that is too small but using WEIGHT_TOL instead of 2*WEIGHT_TOL) + # at least this weight (note that the user of the API can set its own weight, the block submit API will also + # protect against a weight that is too small but using WEIGHT_TOL instead of 2*WEIGHT_TOL) min_significant_weight = calculate_min_significant_weight( parent_block_metadata.score, 2 * self._settings.WEIGHT_TOL @@ -856,6 +857,7 @@ def _make_block_template(self, parent_block: Block, parent_txs: 'ParentTxs', cur assert 1 <= len(parents) <= 3, 'Impossible number of parents' if __debug__ and len(parents) == 3: assert len(parents_any) == 0, 'Extra parents to choose from that cannot be chosen' + score = parent_block_metadata.score + weight_to_work(weight) return BlockTemplate( versions={TxVersion.REGULAR_BLOCK.value, TxVersion.MERGE_MINED_BLOCK.value}, reward=self.daa.get_tokens_issued_per_block(height), @@ -866,7 +868,7 @@ def _make_block_template(self, parent_block: Block, parent_txs: 'ParentTxs', cur parents=parents, parents_any=parents_any, height=height, - score=sum_weights(parent_block_metadata.score, weight), + score=score, signal_bits=self._bit_signaling_service.generate_signal_bits(block=parent_block) ) diff --git a/hathor/mining/block_template.py b/hathor/mining/block_template.py index 7884ba8e4..6ab4ba776 100644 --- a/hathor/mining/block_template.py +++ b/hathor/mining/block_template.py @@ -36,7 +36,7 @@ class BlockTemplate(NamedTuple): parents: list[bytes] # required parents, will always have a block and at most 2 txs parents_any: list[bytes] # list of extra parents to choose from when there are more options height: int # metadata - score: float # metadata + score: int # metadata signal_bits: int # signal bits for blocks generated from this template def generate_minimally_valid_block(self) -> BaseTransaction: diff --git a/hathor/transaction/base_transaction.py b/hathor/transaction/base_transaction.py index c85e40618..41cbab100 100644 --- a/hathor/transaction/base_transaction.py +++ b/hathor/transaction/base_transaction.py @@ -37,6 +37,7 @@ from hathor.transaction.validation_state import ValidationState from hathor.types import TokenUid, TxOutputScript, VertexId from hathor.util import classproperty +from hathor.utils.weight import weight_to_work if TYPE_CHECKING: from _hashlib import HASH @@ -602,11 +603,12 @@ def get_metadata(self, *, force_reload: bool = False, use_storage: bool = True) metadata = self.storage.get_metadata(self.hash) self._metadata = metadata if not metadata: - score = self.weight if self.is_genesis else 0 + score = weight_to_work(self.weight) if self.is_genesis else 0 + accumulated_weight = weight_to_work(self.weight) metadata = TransactionMetadata( settings=self._settings, hash=self._hash, - accumulated_weight=self.weight, + accumulated_weight=accumulated_weight, score=score, ) self._metadata = metadata @@ -621,10 +623,11 @@ def reset_metadata(self) -> None: """ from hathor.transaction.transaction_metadata import ValidationState assert self.storage is not None - score = self.weight if self.is_genesis else 0 + score = weight_to_work(self.weight) if self.is_genesis else 0 + accumulated_weight = weight_to_work(self.weight) self._metadata = TransactionMetadata(hash=self._hash, score=score, - accumulated_weight=self.weight) + accumulated_weight=accumulated_weight) if self.is_genesis: self._metadata.validation = ValidationState.CHECKPOINT_FULL self._metadata.voided_by = set() @@ -656,7 +659,7 @@ def update_accumulated_weight(self, *, stop_value: float = inf, save_file: bool if metadata.accumulated_weight > stop_value: return metadata - accumulated_weight = self.weight + accumulated_weight = weight_to_work(self.weight) # TODO Another optimization is that, when we calculate the acc weight of a transaction, we # also partially calculate the acc weight of its descendants. If it were a DFS, when returning @@ -671,7 +674,7 @@ def update_accumulated_weight(self, *, stop_value: float = inf, save_file: bool from hathor.transaction.storage.traversal import BFSTimestampWalk bfs_walk = BFSTimestampWalk(self.storage, is_dag_funds=True, is_dag_verifications=True, is_left_to_right=True) for tx in bfs_walk.run(self, skip_root=True): - accumulated_weight = sum_weights(accumulated_weight, tx.weight) + accumulated_weight += weight_to_work(tx.weight) if accumulated_weight > stop_value: break @@ -706,7 +709,7 @@ def _update_parents_children_metadata(self) -> None: def _update_initial_accumulated_weight(self) -> None: """Update the vertex initial accumulated_weight.""" metadata = self.get_metadata() - metadata.accumulated_weight = self.weight + metadata.accumulated_weight = weight_to_work(self.weight) def update_timestamp(self, now: int) -> None: """Update this tx's timestamp diff --git a/hathor/transaction/resources/decode_tx.py b/hathor/transaction/resources/decode_tx.py index bdd409a92..c97847d50 100644 --- a/hathor/transaction/resources/decode_tx.py +++ b/hathor/transaction/resources/decode_tx.py @@ -149,8 +149,8 @@ def render_GET(self, request): 'conflict_with': [], 'voided_by': [], 'twins': [], - 'accumulated_weight': 10, - 'score': 12, + 'accumulated_weight': '1024', + 'score': '4096', 'first_block': None }, 'spent_outputs': { diff --git a/hathor/transaction/resources/transaction.py b/hathor/transaction/resources/transaction.py index 0adc27639..c2c11ee65 100644 --- a/hathor/transaction/resources/transaction.py +++ b/hathor/transaction/resources/transaction.py @@ -427,8 +427,8 @@ def get_list_tx(self, request): 'conflict_with': [], 'voided_by': [], 'twins': [], - 'accumulated_weight': 10, - 'score': 12, + 'accumulated_weight': '1024', + 'score': '4096', 'first_block': None }, 'spent_outputs': { diff --git a/hathor/transaction/resources/transaction_confirmation.py b/hathor/transaction/resources/transaction_confirmation.py index d60526491..153d2b8f8 100644 --- a/hathor/transaction/resources/transaction_confirmation.py +++ b/hathor/transaction/resources/transaction_confirmation.py @@ -125,7 +125,7 @@ def render_GET(self, request): 'success': { 'summary': 'Success', 'value': { - 'accumulated_weight': 15.4, + 'accumulated_weight': 43237, 'confirmation_level': 0.88, 'stop_value': 14.5, 'accumulated_bigger': True, diff --git a/hathor/transaction/storage/migrations/change_score_acc_weight_metadata.py b/hathor/transaction/storage/migrations/change_score_acc_weight_metadata.py new file mode 100644 index 000000000..5a1fdfaea --- /dev/null +++ b/hathor/transaction/storage/migrations/change_score_acc_weight_metadata.py @@ -0,0 +1,33 @@ +# Copyright 2021 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import TYPE_CHECKING + +from hathor.transaction.storage.migrations import BaseMigration + +if TYPE_CHECKING: + from hathor.transaction.storage import TransactionStorage + + +class Migration(BaseMigration): + def skip_empty_db(self) -> bool: + return True + + def get_db_name(self) -> str: + return 'change_score_acc_weight_metadata' + + def run(self, storage: 'TransactionStorage') -> None: + raise Exception('Cannot migrate your database due to an incompatible change in the metadata. ' + 'Please, delete your data folder and use the latest available snapshot or sync ' + 'from beginning.') diff --git a/hathor/transaction/storage/transaction_storage.py b/hathor/transaction/storage/transaction_storage.py index f91e3b795..d4def4867 100644 --- a/hathor/transaction/storage/transaction_storage.py +++ b/hathor/transaction/storage/transaction_storage.py @@ -44,6 +44,7 @@ add_feature_activation_bit_counts_metadata, add_feature_activation_bit_counts_metadata2, add_min_height_metadata, + change_score_acc_weight_metadata, migrate_static_metadata, remove_first_nop_features, remove_second_nop_features, @@ -103,6 +104,7 @@ class TransactionStorage(ABC): add_feature_activation_bit_counts_metadata2.Migration, remove_second_nop_features.Migration, migrate_static_metadata.Migration, + change_score_acc_weight_metadata.Migration, ] _migrations: list[BaseMigration] @@ -611,7 +613,7 @@ def get_best_block_tips(self, timestamp: Optional[float] = None, *, skip_cache: if timestamp is None and not skip_cache and self._best_block_tips_cache is not None: return self._best_block_tips_cache[:] - best_score = 0.0 + best_score: int = 0 best_tip_blocks: list[bytes] = [] for block_hash in (x.data for x in self.get_block_tips(timestamp)): @@ -620,7 +622,7 @@ def get_best_block_tips(self, timestamp: Optional[float] = None, *, skip_cache: if meta.voided_by and meta.voided_by != set([block_hash]): # If anyone but the block itself is voiding this block, then it must be skipped. continue - if abs(meta.score - best_score) < 1e-10: + if meta.score == best_score: best_tip_blocks.append(block_hash) elif meta.score > best_score: best_score = meta.score diff --git a/hathor/transaction/transaction_metadata.py b/hathor/transaction/transaction_metadata.py index e083d4d86..fa9d2c977 100644 --- a/hathor/transaction/transaction_metadata.py +++ b/hathor/transaction/transaction_metadata.py @@ -22,6 +22,7 @@ from hathor.feature_activation.model.feature_state import FeatureState from hathor.transaction.validation_state import ValidationState from hathor.util import json_dumpb, json_loadb, practically_equal +from hathor.utils.weight import work_to_weight if TYPE_CHECKING: from weakref import ReferenceType # noqa: F401 @@ -40,8 +41,8 @@ class TransactionMetadata: received_by: list[int] children: list[bytes] twins: list[bytes] - accumulated_weight: float - score: float + accumulated_weight: int + score: int first_block: Optional[bytes] validation: ValidationState @@ -60,8 +61,8 @@ def __init__( self, spent_outputs: Optional[dict[int, list[bytes]]] = None, hash: Optional[bytes] = None, - accumulated_weight: float = 0, - score: float = 0, + accumulated_weight: int = 0, + score: int = 0, settings: HathorSettings | None = None, ) -> None: from hathor.transaction.genesis import is_genesis @@ -195,7 +196,7 @@ def __eq__(self, other: Any) -> bool: return True - def to_json(self) -> dict[str, Any]: + def to_storage_json(self) -> dict[str, Any]: data: dict[str, Any] = {} data['hash'] = self.hash and self.hash.hex() data['spent_outputs'] = [] @@ -206,8 +207,8 @@ def to_json(self) -> dict[str, Any]: data['conflict_with'] = [x.hex() for x in set(self.conflict_with)] if self.conflict_with else [] data['voided_by'] = [x.hex() for x in self.voided_by] if self.voided_by else [] data['twins'] = [x.hex() for x in self.twins] - data['accumulated_weight'] = self.accumulated_weight - data['score'] = self.score + data['accumulated_weight_raw'] = str(self.accumulated_weight) + data['score_raw'] = str(self.score) vertex = self.get_tx() data['min_height'] = vertex.static_metadata.min_height @@ -232,6 +233,12 @@ def to_json(self) -> dict[str, Any]: data['validation'] = self.validation.name.lower() return data + def to_json(self) -> dict[str, Any]: + data = self.to_storage_json() + data['accumulated_weight'] = work_to_weight(self.accumulated_weight) + data['score'] = work_to_weight(self.score) + return data + def to_json_extended(self, tx_storage: 'TransactionStorage') -> dict[str, Any]: data = self.to_json() first_block_height: Optional[int] @@ -268,8 +275,8 @@ def create_from_json(cls, data: dict[str, Any]) -> 'TransactionMetadata': else: meta.twins = [] - meta.accumulated_weight = data['accumulated_weight'] - meta.score = data.get('score', 0) + meta.accumulated_weight = int(data['accumulated_weight_raw']) + meta.score = int(data.get('score_raw', 0)) feature_states_raw = data.get('feature_states') if feature_states_raw: @@ -294,7 +301,7 @@ def from_bytes(cls, data: bytes) -> 'TransactionMetadata': def to_bytes(self) -> bytes: """Serialize a TransactionMetadata instance to bytes. This should be used for storage.""" - json_dict = self.to_json() + json_dict = self.to_storage_json() # The `to_json()` method includes these fields for backwards compatibility with APIs, but since they're not # part of metadata, they should not be serialized. @@ -317,7 +324,7 @@ def clone(self) -> 'TransactionMetadata': :rtype: :py:class:`hathor.transaction.TransactionMetadata` """ # XXX: using json serialization for simplicity, should it use pickle? manual fields? other alternative? - return self.create_from_json(self.to_json()) + return self.create_from_json(self.to_storage_json()) def add_voided_by(self, item: bytes) -> None: """Add `item` to `self.voided_by`. Note that this method does not save the change.""" diff --git a/hathor/util.py b/hathor/util.py index 13a14c20f..755a1d381 100644 --- a/hathor/util.py +++ b/hathor/util.py @@ -805,12 +805,6 @@ def get_hathor_core_version(): return hathor.__version__ -def calculate_min_significant_weight(score: float, tol: float) -> float: - """ This function will return the min significant weight to increase score by tol. - """ - return score + math.log2(2 ** tol - 1) - - def bytes_to_vertexid(data: bytes) -> VertexId: # XXX: using raw string for the docstring so we can more easily write byte literals r""" Function to validate bytes and return a VertexId, raises ValueError if not valid. diff --git a/hathor/utils/weight.py b/hathor/utils/weight.py new file mode 100644 index 000000000..214e5e6ff --- /dev/null +++ b/hathor/utils/weight.py @@ -0,0 +1,37 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import math + + +def weight_to_work(weight: float) -> int: + """Convert weight to work rounding up to the nearest integer.""" + return math.floor(0.5 + 2**weight) + + +def work_to_weight(work: int) -> float: + """Convert work to weight.""" + if work <= 1: + return 0.0 + return math.log2(work) + + +def calculate_min_significant_weight(score: int, tol: float) -> float: + """ This function will return the min significant weight to increase score by tol. + + When most peers are updated to store work as integers for their internal score and accumulated weight metadata, + this function will not be needed anymore. It's only use currently is to make sure miner nodes will produce blocks + with weight that are high enough for outdated nodes to be able to observe the score increasing. + """ + return work_to_weight(score) + math.log2(2 ** tol - 1) diff --git a/hathor/wallet/resources/send_tokens.py b/hathor/wallet/resources/send_tokens.py index 5c6ddacc4..2d3cc7492 100644 --- a/hathor/wallet/resources/send_tokens.py +++ b/hathor/wallet/resources/send_tokens.py @@ -309,7 +309,8 @@ def render_OPTIONS(self, request): 'inputs': [], 'outputs': [], 'tokens': [], - 'accumulated_weight': 14 + 'accumulated_weight': 14.0, + 'accumulated_weight_raw': '16384' } } } diff --git a/hathor/wallet/resources/thin_wallet/send_tokens.py b/hathor/wallet/resources/thin_wallet/send_tokens.py index 7dd141829..0ab18f5b9 100644 --- a/hathor/wallet/resources/thin_wallet/send_tokens.py +++ b/hathor/wallet/resources/thin_wallet/send_tokens.py @@ -464,7 +464,8 @@ def render_OPTIONS(self, request): 'inputs': [], 'outputs': [], 'tokens': [], - 'accumulated_weight': 14 + 'accumulated_weight': 14.0, + 'accumulated_weight_raw': '16384' } } }, diff --git a/tests/event/test_base_event.py b/tests/event/test_base_event.py index 300157944..fe842764e 100644 --- a/tests/event/test_base_event.py +++ b/tests/event/test_base_event.py @@ -61,6 +61,8 @@ def test_create_base_event(event_id: int, group_id: int | None) -> None: twins=[], accumulated_weight=10.0, score=20.0, + accumulated_weight_raw="1024", + score_raw="1048576", height=100, validation='validation' ) diff --git a/tests/event/test_event_simulation_scenarios.py b/tests/event/test_event_simulation_scenarios.py index 3c1ef99ec..2b7e413b1 100644 --- a/tests/event/test_event_simulation_scenarios.py +++ b/tests/event/test_event_simulation_scenarios.py @@ -55,9 +55,9 @@ def test_only_load(self) -> None: # LOAD_STATED EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=0, timestamp=1578878880.0, type=EventType.LOAD_STARTED, data=EmptyData(), group_id=None), latest_event_id=4, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for each genesis (1 block and 2 txs) - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=4, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=4, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=4, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=4, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=4, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=4, stream_id=stream_id), # noqa: E501 # LOAD_FINISHED EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=4, timestamp=1578878880.0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=4, stream_id=stream_id) # noqa: E501 ] @@ -78,17 +78,17 @@ def test_single_chain_one_block(self) -> None: # LOAD_STATED EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=0, timestamp=1578878880.0, type=EventType.LOAD_STARTED, data=EmptyData(), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for each genesis (1 block and 2 txs) - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 # LOAD_FINISHED EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=4, timestamp=1578878880.0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each genesis tx (2), adding the new block as their child # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf'], twins=[], accumulated_weight=2.0, score=2.0, first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf'], twins=[], accumulated_weight=2.0, score=2.0, first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new block - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id) # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id) # noqa: E501 ] responses = _remove_timestamp(responses) @@ -107,54 +107,54 @@ def test_single_chain_blocks_and_transactions(self) -> None: # LOAD_STATED EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=0, timestamp=1578878880.0, type=EventType.LOAD_STARTED, data=EmptyData(), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for each genesis (1 block and 2 txs) - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # LOAD_FINISHED EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=4, timestamp=1578878880.0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each genesis tx (2), adding the new block as their child # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new block - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED and one NEW_VERTEX_ACCEPTED for 10 new blocks - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=9, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=10, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=11, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, first_block=None, height=3, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=12, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, first_block=None, height=3, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=13, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, first_block=None, height=4, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=14, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, first_block=None, height=4, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=15, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, first_block=None, height=5, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=16, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, first_block=None, height=5, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=17, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, first_block=None, height=6, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=18, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, first_block=None, height=6, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=19, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, first_block=None, height=7, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=20, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, first_block=None, height=7, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=21, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, first_block=None, height=8, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=22, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, first_block=None, height=8, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=23, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, first_block=None, height=9, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=24, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, first_block=None, height=9, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=25, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=26, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=27, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, first_block=None, height=11, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=28, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, first_block=None, height=11, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=9, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, accumulated_weight_raw="4", score_raw="20", first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=10, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, accumulated_weight_raw="4", score_raw="20", first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=11, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, accumulated_weight_raw="4", score_raw="24", first_block=None, height=3, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=12, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, accumulated_weight_raw="4", score_raw="24", first_block=None, height=3, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=13, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, accumulated_weight_raw="4", score_raw="28", first_block=None, height=4, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=14, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, accumulated_weight_raw="4", score_raw="28", first_block=None, height=4, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=15, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, accumulated_weight_raw="4", score_raw="32", first_block=None, height=5, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=16, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, accumulated_weight_raw="4", score_raw="32", first_block=None, height=5, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=17, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, accumulated_weight_raw="4", score_raw="36", first_block=None, height=6, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=18, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, accumulated_weight_raw="4", score_raw="36", first_block=None, height=6, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=19, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, accumulated_weight_raw="4", score_raw="40", first_block=None, height=7, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=20, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, accumulated_weight_raw="4", score_raw="40", first_block=None, height=7, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=21, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, accumulated_weight_raw="4", score_raw="44", first_block=None, height=8, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=22, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, accumulated_weight_raw="4", score_raw="44", first_block=None, height=8, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=23, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, accumulated_weight_raw="4", score_raw="48", first_block=None, height=9, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=24, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, accumulated_weight_raw="4", score_raw="48", first_block=None, height=9, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=25, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, accumulated_weight_raw="4", score_raw="52", first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=26, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, accumulated_weight_raw="4", score_raw="52", first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=27, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, accumulated_weight_raw="4", score_raw="56", first_block=None, height=11, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=28, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, accumulated_weight_raw="4", score_raw="56", first_block=None, height=11, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new tx (below), and one VERTEX_METADATA_CHANGED for a block, adding the new tx as spending their output # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=29, timestamp=1578878970.5, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.656776158409354, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=30, timestamp=1578878970.5, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[SpentOutput(index=0, tx_ids=['5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650'])], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=29, timestamp=1578878970.5, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.656777477108076, score=0.0, accumulated_weight_raw="413285", score_raw="0", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=30, timestamp=1578878970.5, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[SpentOutput(index=0, tx_ids=['5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650'])], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new tx - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=31, timestamp=1578878970.5, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.656776158409354, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=31, timestamp=1578878970.5, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.656777477108076, score=0.0, accumulated_weight_raw="413285", score_raw="0", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new tx (below), and one VERTEX_METADATA_CHANGED for a tx, adding the new tx as spending their output and children # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=32, timestamp=1578879030.75, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=18.4904519466213, inputs=[TxInput(tx_id='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', index=0, spent_output=TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)))], outputs=[TxOutput(value=3400, token_data=0, script='dqkUmkey79Rbhjq4BtHYCm2mT8hDprWIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLatLcoaATFMqECb5fD5rdW2nF9WGyw9os', timelock=None)), TxOutput(value=2000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.4904519466213, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=33, timestamp=1578879030.75, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', spent_outputs=[SpentOutput(index=0, tx_ids=['d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6'])], conflict_with=[], voided_by=[], received_by=[], children=['d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6'], twins=[], accumulated_weight=18.656776158409354, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=32, timestamp=1578879030.75, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=18.4904519466213, inputs=[TxInput(tx_id='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', index=0, spent_output=TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)))], outputs=[TxOutput(value=3400, token_data=0, script='dqkUmkey79Rbhjq4BtHYCm2mT8hDprWIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLatLcoaATFMqECb5fD5rdW2nF9WGyw9os', timelock=None)), TxOutput(value=2000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.49045136082641, score=0.0, accumulated_weight_raw="368282", score_raw="0", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=33, timestamp=1578879030.75, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', spent_outputs=[SpentOutput(index=0, tx_ids=['d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6'])], conflict_with=[], voided_by=[], received_by=[], children=['d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6'], twins=[], accumulated_weight=18.656777477108076, score=0.0, accumulated_weight_raw="413285", score_raw="0", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new tx - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=34, timestamp=1578879030.75, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=18.4904519466213, inputs=[TxInput(tx_id='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', index=0, spent_output=TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)))], outputs=[TxOutput(value=3400, token_data=0, script='dqkUmkey79Rbhjq4BtHYCm2mT8hDprWIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLatLcoaATFMqECb5fD5rdW2nF9WGyw9os', timelock=None)), TxOutput(value=2000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.4904519466213, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=34, timestamp=1578879030.75, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=18.4904519466213, inputs=[TxInput(tx_id='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', index=0, spent_output=TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)))], outputs=[TxOutput(value=3400, token_data=0, script='dqkUmkey79Rbhjq4BtHYCm2mT8hDprWIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLatLcoaATFMqECb5fD5rdW2nF9WGyw9os', timelock=None)), TxOutput(value=2000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.49045136082641, score=0.0, accumulated_weight_raw="368282", score_raw="0", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each confirmed transaction (first block changed) # noqa E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=35, timestamp=1578879091.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUTisHvpM4sDeINzxF5auK/8bP6UaIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HDeSe6qKqjSLwtnjLBV84NddtZQyNb9HUU', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', 'd2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', '5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=19.576585413276128, first_block=None, height=12, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=36, timestamp=1578879091.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=18.4904519466213, inputs=[TxInput(tx_id='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', index=0, spent_output=TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)))], outputs=[TxOutput(value=3400, token_data=0, script='dqkUmkey79Rbhjq4BtHYCm2mT8hDprWIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLatLcoaATFMqECb5fD5rdW2nF9WGyw9os', timelock=None)), TxOutput(value=2000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9'], twins=[], accumulated_weight=18.4904519466213, score=0.0, first_block='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=37, timestamp=1578879091.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', spent_outputs=[SpentOutput(index=0, tx_ids=['d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6'])], conflict_with=[], voided_by=[], received_by=[], children=['d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', '7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9'], twins=[], accumulated_weight=18.656776158409354, score=0.0, first_block='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=35, timestamp=1578879091.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUTisHvpM4sDeINzxF5auK/8bP6UaIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HDeSe6qKqjSLwtnjLBV84NddtZQyNb9HUU', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', 'd2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', '5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=19.576585834390443, accumulated_weight_raw="256", score_raw="781879", first_block=None, height=12, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=36, timestamp=1578879091.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=18.4904519466213, inputs=[TxInput(tx_id='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', index=0, spent_output=TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)))], outputs=[TxOutput(value=3400, token_data=0, script='dqkUmkey79Rbhjq4BtHYCm2mT8hDprWIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLatLcoaATFMqECb5fD5rdW2nF9WGyw9os', timelock=None)), TxOutput(value=2000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9'], twins=[], accumulated_weight=18.49045136082641, score=0.0, accumulated_weight_raw="368282", score_raw="0", first_block='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=37, timestamp=1578879091.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', spent_outputs=[SpentOutput(index=0, tx_ids=['d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6'])], conflict_with=[], voided_by=[], received_by=[], children=['d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', '7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9'], twins=[], accumulated_weight=18.656777477108076, score=0.0, accumulated_weight_raw="413285", score_raw="0", first_block='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new block - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=38, timestamp=1578879091.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUTisHvpM4sDeINzxF5auK/8bP6UaIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HDeSe6qKqjSLwtnjLBV84NddtZQyNb9HUU', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', 'd2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', '5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=19.576585413276128, first_block=None, height=12, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id) # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=38, timestamp=1578879091.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUTisHvpM4sDeINzxF5auK/8bP6UaIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HDeSe6qKqjSLwtnjLBV84NddtZQyNb9HUU', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', 'd2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', '5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=19.576585834390443, accumulated_weight_raw="256", score_raw="781879", first_block=None, height=12, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id) # noqa: E501 ] responses = _remove_timestamp(responses) @@ -173,37 +173,37 @@ def test_reorg(self) -> None: # LOAD_STATED EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=0, timestamp=1578878880.0, type=EventType.LOAD_STARTED, data=EmptyData(), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for each genesis (1 block and 2 txs) - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 # LOAD_FINISHED EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=4, timestamp=1578878880.0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each genesis tx (2), adding the new block as their child # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, timestamp=1578878940.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', nonce=0, timestamp=1578878940, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, timestamp=1578878940.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a'], twins=[], accumulated_weight=2.0, score=2.0, first_block='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, timestamp=1578878940.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a'], twins=[], accumulated_weight=2.0, score=2.0, first_block='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, timestamp=1578878940.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', nonce=0, timestamp=1578878940, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, timestamp=1578878940.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, timestamp=1578878940.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new block from manager1 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, timestamp=1578878940.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', nonce=0, timestamp=1578878940, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, timestamp=1578878940.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', nonce=0, timestamp=1578878940, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each genesis tx (2), adding the new block as their child # noqa: E501 # Also one VERTEX_METADATA_CHANGED for the previous block, voiding it - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=9, timestamp=1578879064.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', nonce=0, timestamp=1578879000, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUfBo1MGBHkHtXDktO+BxtBdh5T5GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HHqKa5Y6viZ8fkH2bd1qQBdsZnrtsmruqS', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', spent_outputs=[], conflict_with=[], voided_by=['1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533'], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=10, timestamp=1578879064.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', nonce=0, timestamp=1578878940, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', spent_outputs=[], conflict_with=[], voided_by=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a'], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=11, timestamp=1578879064.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', '1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533'], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=12, timestamp=1578879064.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', '1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533'], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=9, timestamp=1578879064.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', nonce=0, timestamp=1578879000, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUfBo1MGBHkHtXDktO+BxtBdh5T5GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HHqKa5Y6viZ8fkH2bd1qQBdsZnrtsmruqS', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', spent_outputs=[], conflict_with=[], voided_by=['1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533'], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=10, timestamp=1578879064.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', nonce=0, timestamp=1578878940, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', spent_outputs=[], conflict_with=[], voided_by=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a'], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=11, timestamp=1578879064.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', '1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=12, timestamp=1578879064.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', '1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new block from manager2 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=13, timestamp=1578879064.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', nonce=0, timestamp=1578879000, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUfBo1MGBHkHtXDktO+BxtBdh5T5GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HHqKa5Y6viZ8fkH2bd1qQBdsZnrtsmruqS', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', spent_outputs=[], conflict_with=[], voided_by=['1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533'], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=13, timestamp=1578879064.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', nonce=0, timestamp=1578879000, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUfBo1MGBHkHtXDktO+BxtBdh5T5GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HHqKa5Y6viZ8fkH2bd1qQBdsZnrtsmruqS', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', spent_outputs=[], conflict_with=[], voided_by=['1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533'], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 # REORG_STARTED caused by a new block from manager2 (below) EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=14, timestamp=1578879064.25, type=EventType.REORG_STARTED, data=ReorgData(reorg_size=1, previous_best_block='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', new_best_block='38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1', common_block='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792'), group_id=0), latest_event_id=20, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each genesis tx (2), adding the new block as their child # noqa: E501 # Also one VERTEX_METADATA_CHANGED for the previous block, un-voiding it as it's now part of the best blockchain # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=15, timestamp=1578879064.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1', nonce=0, timestamp=1578879001, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUgQrqLefPfPVpkXlfvvAp943epyOIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HJHSdTickduA1MF9PTbzBQi6Z7stNAzwAu', timelock=None))], parents=['1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full'), aux_pow=None), group_id=0), latest_event_id=20, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=16, timestamp=1578879064.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', nonce=0, timestamp=1578879000, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUfBo1MGBHkHtXDktO+BxtBdh5T5GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HHqKa5Y6viZ8fkH2bd1qQBdsZnrtsmruqS', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=0), latest_event_id=20, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=17, timestamp=1578879064.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', '1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', '38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1'], twins=[], accumulated_weight=2.0, score=2.0, first_block='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', height=0, validation='full'), aux_pow=None), group_id=0), latest_event_id=20, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=18, timestamp=1578879064.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', '1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', '38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1'], twins=[], accumulated_weight=2.0, score=2.0, first_block='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', height=0, validation='full'), aux_pow=None), group_id=0), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=15, timestamp=1578879064.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1', nonce=0, timestamp=1578879001, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUgQrqLefPfPVpkXlfvvAp943epyOIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HJHSdTickduA1MF9PTbzBQi6Z7stNAzwAu', timelock=None))], parents=['1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.321928094887363, accumulated_weight_raw="4", score_raw="20", first_block=None, height=2, validation='full'), aux_pow=None), group_id=0), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=16, timestamp=1578879064.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', nonce=0, timestamp=1578879000, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUfBo1MGBHkHtXDktO+BxtBdh5T5GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HHqKa5Y6viZ8fkH2bd1qQBdsZnrtsmruqS', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=0), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=17, timestamp=1578879064.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', '1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', '38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', height=0, validation='full'), aux_pow=None), group_id=0), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=18, timestamp=1578879064.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', '1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', '38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', height=0, validation='full'), aux_pow=None), group_id=0), latest_event_id=20, stream_id=stream_id), # noqa: E501 # REORG_FINISHED EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=19, timestamp=1578879064.25, type=EventType.REORG_FINISHED, data=EmptyData(), group_id=0), latest_event_id=20, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new block from manager2 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=20, timestamp=1578879064.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1', nonce=0, timestamp=1578879001, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUgQrqLefPfPVpkXlfvvAp943epyOIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HJHSdTickduA1MF9PTbzBQi6Z7stNAzwAu', timelock=None))], parents=['1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id) # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=20, timestamp=1578879064.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1', nonce=0, timestamp=1578879001, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUgQrqLefPfPVpkXlfvvAp943epyOIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HJHSdTickduA1MF9PTbzBQi6Z7stNAzwAu', timelock=None))], parents=['1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.321928094887363, accumulated_weight_raw="4", score_raw="20", first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id) # noqa: E501 ] responses = _remove_timestamp(responses) @@ -222,56 +222,56 @@ def test_unvoided_transaction(self) -> None: # LOAD_STATED EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=0, type=EventType.LOAD_STARTED, timestamp=0, data=EmptyData(), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for each genesis (1 block and 2 txs) - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # LOAD_FINISHED EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=4, type=EventType.LOAD_FINISHED, timestamp=0, data=EmptyData(), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each genesis tx (2), adding the new block as their child # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new block - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED and one NEW_VERTEX_ACCEPTED for 10 new blocks - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=9, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=10, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=11, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, first_block=None, height=3, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=12, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, first_block=None, height=3, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=13, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, first_block=None, height=4, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=14, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, first_block=None, height=4, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=15, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, first_block=None, height=5, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=16, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, first_block=None, height=5, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=17, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, first_block=None, height=6, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=18, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, first_block=None, height=6, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=19, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, first_block=None, height=7, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=20, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, first_block=None, height=7, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=21, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, first_block=None, height=8, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=22, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, first_block=None, height=8, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=23, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, first_block=None, height=9, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=24, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, first_block=None, height=9, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=25, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=26, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=27, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, first_block=None, height=11, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=28, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, first_block=None, height=11, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=9, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, accumulated_weight_raw="4", score_raw="20", first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=10, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, accumulated_weight_raw="4", score_raw="20", first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=11, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, accumulated_weight_raw="4", score_raw="24", first_block=None, height=3, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=12, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, accumulated_weight_raw="4", score_raw="24", first_block=None, height=3, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=13, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, accumulated_weight_raw="4", score_raw="28", first_block=None, height=4, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=14, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, accumulated_weight_raw="4", score_raw="28", first_block=None, height=4, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=15, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, accumulated_weight_raw="4", score_raw="32", first_block=None, height=5, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=16, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, accumulated_weight_raw="4", score_raw="32", first_block=None, height=5, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=17, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, accumulated_weight_raw="4", score_raw="36", first_block=None, height=6, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=18, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, accumulated_weight_raw="4", score_raw="36", first_block=None, height=6, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=19, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, accumulated_weight_raw="4", score_raw="40", first_block=None, height=7, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=20, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, accumulated_weight_raw="4", score_raw="40", first_block=None, height=7, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=21, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, accumulated_weight_raw="4", score_raw="44", first_block=None, height=8, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=22, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, accumulated_weight_raw="4", score_raw="44", first_block=None, height=8, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=23, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, accumulated_weight_raw="4", score_raw="48", first_block=None, height=9, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=24, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, accumulated_weight_raw="4", score_raw="48", first_block=None, height=9, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=25, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, accumulated_weight_raw="4", score_raw="52", first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=26, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, accumulated_weight_raw="4", score_raw="52", first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=27, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, accumulated_weight_raw="4", score_raw="56", first_block=None, height=11, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=28, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, accumulated_weight_raw="4", score_raw="56", first_block=None, height=11, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new tx (below), and one VERTEX_METADATA_CHANGED for a block, adding the new tx as spending their output # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=29, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=19.0005, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=19.0005, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=30, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[SpentOutput(index=0, tx_ids=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'])], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=29, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=19.0005, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=19.00050072657387, score=0.0, accumulated_weight_raw="524470", score_raw="0", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=30, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[SpentOutput(index=0, tx_ids=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'])], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new tx - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=31, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=19.0005, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=19.0005, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=31, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=19.0005, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=19.00050072657387, score=0.0, accumulated_weight_raw="524470", score_raw="0", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new tx (below), one VERTEX_METADATA_CHANGED for a block, adding the new tx as spending their output, and one VERTEX_METADATA_CHANGED adding the new tx as twin/conflict of the previous tx # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=32, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=19.0, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', spent_outputs=[], conflict_with=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], voided_by=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], received_by=[], children=[], twins=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], accumulated_weight=19.0, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=33, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=19.0005, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', spent_outputs=[], conflict_with=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], voided_by=[], received_by=[], children=[], twins=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], accumulated_weight=19.0005, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=34, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[SpentOutput(index=0, tx_ids=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', '0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'])], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=32, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=19.0, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', spent_outputs=[], conflict_with=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], voided_by=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], received_by=[], children=[], twins=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], accumulated_weight=19.0, score=0.0, accumulated_weight_raw="524288", score_raw="0", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=33, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=19.0005, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', spent_outputs=[], conflict_with=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], voided_by=[], received_by=[], children=[], twins=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], accumulated_weight=19.00050072657387, score=0.0, accumulated_weight_raw="524470", score_raw="0", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=34, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[SpentOutput(index=0, tx_ids=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', '0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'])], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new tx that is a twin of the previous one. It's voided. - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=35, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=19.0, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', spent_outputs=[], conflict_with=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], voided_by=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], received_by=[], children=[], twins=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], accumulated_weight=19.0, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=35, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=19.0, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', spent_outputs=[], conflict_with=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], voided_by=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], received_by=[], children=[], twins=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], accumulated_weight=19.0, score=0.0, accumulated_weight_raw="524288", score_raw="0", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each twin tx, inverting the voided state of them. # noqa E501 # The order of events is important, we receive the voided txs first, then reverse topological ordering. - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=36, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=19.0005, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', spent_outputs=[], conflict_with=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], voided_by=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], received_by=[], children=[], twins=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], accumulated_weight=19.0005, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=37, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H8XUjiUx24WLXUN63da34hX6bEs29GJjSs', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=19.000858282039708, first_block=None, height=12, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=38, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=19.0, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', spent_outputs=[], conflict_with=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], voided_by=[], received_by=[], children=['24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a'], twins=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], accumulated_weight=19.000704269011248, score=0.0, first_block='24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=36, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=19.0005, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', spent_outputs=[], conflict_with=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], voided_by=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], received_by=[], children=[], twins=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], accumulated_weight=19.00050072657387, score=0.0, accumulated_weight_raw="524470", score_raw="0", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=37, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H8XUjiUx24WLXUN63da34hX6bEs29GJjSs', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=19.000858282039708, accumulated_weight_raw="256", score_raw="524600", first_block=None, height=12, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=38, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=19.0, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', spent_outputs=[], conflict_with=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], voided_by=[], received_by=[], children=['24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a'], twins=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], accumulated_weight=19.000704269011248, score=0.0, accumulated_weight_raw="524544", score_raw="0", first_block='24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new block - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=39, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H8XUjiUx24WLXUN63da34hX6bEs29GJjSs', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=19.000858282039708, first_block=None, height=12, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=39, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H8XUjiUx24WLXUN63da34hX6bEs29GJjSs', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=19.000858282039708, accumulated_weight_raw="256", score_raw="524600", first_block=None, height=12, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 ] responses = _remove_timestamp(responses) @@ -290,61 +290,61 @@ def test_invalid_mempool(self) -> None: # LOAD_STATED EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=0, timestamp=1578878880.0, type=EventType.LOAD_STARTED, data=EmptyData(), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for each genesis (1 block and 2 txs) - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 # LOAD_FINISHED EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=4, timestamp=1578878880.0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each genesis tx (2), adding the new block as their child # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new block - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED and one NEW_VERTEX_ACCEPTED for 10 new blocks - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=9, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=10, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=11, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, first_block=None, height=3, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=12, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, first_block=None, height=3, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=13, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, first_block=None, height=4, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=14, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, first_block=None, height=4, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=15, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, first_block=None, height=5, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=16, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, first_block=None, height=5, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=17, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, first_block=None, height=6, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=18, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, first_block=None, height=6, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=19, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, first_block=None, height=7, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=20, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, first_block=None, height=7, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=21, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, first_block=None, height=8, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=22, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, first_block=None, height=8, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=23, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, first_block=None, height=9, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=24, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, first_block=None, height=9, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=25, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=26, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=27, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, first_block=None, height=11, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=28, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, first_block=None, height=11, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=9, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, accumulated_weight_raw="4", score_raw="20", first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=10, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, accumulated_weight_raw="4", score_raw="20", first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=11, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, accumulated_weight_raw="4", score_raw="24", first_block=None, height=3, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=12, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, accumulated_weight_raw="4", score_raw="24", first_block=None, height=3, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=13, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, accumulated_weight_raw="4", score_raw="28", first_block=None, height=4, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=14, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, accumulated_weight_raw="4", score_raw="28", first_block=None, height=4, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=15, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, accumulated_weight_raw="4", score_raw="32", first_block=None, height=5, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=16, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, accumulated_weight_raw="4", score_raw="32", first_block=None, height=5, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=17, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, accumulated_weight_raw="4", score_raw="36", first_block=None, height=6, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=18, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, accumulated_weight_raw="4", score_raw="36", first_block=None, height=6, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=19, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, accumulated_weight_raw="4", score_raw="40", first_block=None, height=7, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=20, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, accumulated_weight_raw="4", score_raw="40", first_block=None, height=7, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=21, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, accumulated_weight_raw="4", score_raw="44", first_block=None, height=8, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=22, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, accumulated_weight_raw="4", score_raw="44", first_block=None, height=8, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=23, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, accumulated_weight_raw="4", score_raw="48", first_block=None, height=9, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=24, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, accumulated_weight_raw="4", score_raw="48", first_block=None, height=9, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=25, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, accumulated_weight_raw="4", score_raw="52", first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=26, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, accumulated_weight_raw="4", score_raw="52", first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=27, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, accumulated_weight_raw="4", score_raw="56", first_block=None, height=11, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=28, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, accumulated_weight_raw="4", score_raw="56", first_block=None, height=11, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new tx (below), and one VERTEX_METADATA_CHANGED for a block, adding the new tx as spending their output # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=29, timestamp=1578878970.5, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.656776158409354, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=30, timestamp=1578878970.5, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[SpentOutput(index=0, tx_ids=['5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650'])], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=29, timestamp=1578878970.5, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.656777477108076, score=0.0, accumulated_weight_raw="413285", score_raw="0", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=30, timestamp=1578878970.5, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[SpentOutput(index=0, tx_ids=['5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650'])], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new tx - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=31, timestamp=1578878970.5, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.656776158409354, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=31, timestamp=1578878970.5, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.656777477108076, score=0.0, accumulated_weight_raw="413285", score_raw="0", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 # REORG_STARTED caused by a block with lower height but higher weight (below) EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=32, timestamp=0, type=EventType.REORG_STARTED, data=ReorgData(reorg_size=2, previous_best_block='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', new_best_block='2e3122412eb129c7f0d03e37d8a5637da9354df980a2259332b2b14e7a340d94', common_block='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'), group_id=0), latest_event_id=41, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for each block that was voided by the reorg - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=33, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, first_block=None, height=11, validation='full'), aux_pow=None), group_id=0), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=34, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, first_block=None, height=10, validation='full'), aux_pow=None), group_id=0), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=33, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, accumulated_weight_raw="4", score_raw="56", first_block=None, height=11, validation='full'), aux_pow=None), group_id=0), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=34, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, accumulated_weight_raw="4", score_raw="52", first_block=None, height=10, validation='full'), aux_pow=None), group_id=0), latest_event_id=41, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for the new block - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=35, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='2e3122412eb129c7f0d03e37d8a5637da9354df980a2259332b2b14e7a340d94', nonce=0, timestamp=1578879030, signal_bits=0, version=0, weight=10.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='', decoded=None)], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='2e3122412eb129c7f0d03e37d8a5637da9354df980a2259332b2b14e7a340d94', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=10.0, score=10.066089190457772, first_block=None, height=10, validation='full'), aux_pow=None), group_id=0), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=35, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='2e3122412eb129c7f0d03e37d8a5637da9354df980a2259332b2b14e7a340d94', nonce=0, timestamp=1578879030, signal_bits=0, version=0, weight=10.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='', decoded=None)], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='2e3122412eb129c7f0d03e37d8a5637da9354df980a2259332b2b14e7a340d94', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=10.0, score=10.066089190457772, accumulated_weight_raw="1024", score_raw="1072", first_block=None, height=10, validation='full'), aux_pow=None), group_id=0), latest_event_id=41, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for the block that had its output unspent, since the previous tx was removed - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=36, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[SpentOutput(index=0, tx_ids=[])], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=0), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=36, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[SpentOutput(index=0, tx_ids=[])], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=0), latest_event_id=41, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for each parent of the tx that was removed - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=37, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '2e3122412eb129c7f0d03e37d8a5637da9354df980a2259332b2b14e7a340d94'], twins=[], accumulated_weight=2.0, score=2.0, first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=0), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=38, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '2e3122412eb129c7f0d03e37d8a5637da9354df980a2259332b2b14e7a340d94'], twins=[], accumulated_weight=2.0, score=2.0, first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=0), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=37, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '2e3122412eb129c7f0d03e37d8a5637da9354df980a2259332b2b14e7a340d94'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=0), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=38, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '2e3122412eb129c7f0d03e37d8a5637da9354df980a2259332b2b14e7a340d94'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=0), latest_event_id=41, stream_id=stream_id), # noqa: E501 # One VERTEX_REMOVED for the tx above EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=39, timestamp=0, type=EventType.VERTEX_REMOVED, data=TxDataWithoutMeta(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None), group_id=0), latest_event_id=41, stream_id=stream_id), # noqa: E501 # REORG_FINISHED EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=40, timestamp=0, type=EventType.REORG_FINISHED, data=EmptyData(), group_id=0), latest_event_id=41, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for the block that caused the reorg - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=41, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='2e3122412eb129c7f0d03e37d8a5637da9354df980a2259332b2b14e7a340d94', nonce=0, timestamp=1578879030, signal_bits=0, version=0, weight=10.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='', decoded=None)], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='2e3122412eb129c7f0d03e37d8a5637da9354df980a2259332b2b14e7a340d94', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=10.0, score=10.066089190457772, first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id) # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=41, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='2e3122412eb129c7f0d03e37d8a5637da9354df980a2259332b2b14e7a340d94', nonce=0, timestamp=1578879030, signal_bits=0, version=0, weight=10.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='', decoded=None)], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='2e3122412eb129c7f0d03e37d8a5637da9354df980a2259332b2b14e7a340d94', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=10.0, score=10.066089190457772, accumulated_weight_raw="1024", score_raw="1072", first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id) # noqa: E501 ] responses = _remove_timestamp(responses) @@ -363,54 +363,54 @@ def test_empty_script(self) -> None: # LOAD_STATED EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=0, timestamp=0, type=EventType.LOAD_STARTED, data=EmptyData(), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for each genesis (1 block and 2 txs) - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # LOAD_FINISHED EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=4, timestamp=0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each genesis tx (2), adding the new block as their first block # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new block - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED and one NEW_VERTEX_ACCEPTED for 10 new blocks - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=9, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=10, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=11, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, first_block=None, height=3, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=12, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, first_block=None, height=3, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=13, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, first_block=None, height=4, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=14, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, first_block=None, height=4, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=15, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, first_block=None, height=5, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=16, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, first_block=None, height=5, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=17, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, first_block=None, height=6, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=18, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, first_block=None, height=6, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=19, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, first_block=None, height=7, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=20, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, first_block=None, height=7, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=21, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, first_block=None, height=8, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=22, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, first_block=None, height=8, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=23, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, first_block=None, height=9, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=24, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, first_block=None, height=9, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=25, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, first_block=None, height=10, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=26, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, first_block=None, height=10, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=27, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, first_block=None, height=11, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=28, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, first_block=None, height=11, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=9, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, accumulated_weight_raw="4", score_raw="20", first_block=None, height=2, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=10, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, accumulated_weight_raw="4", score_raw="20", first_block=None, height=2, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=11, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, accumulated_weight_raw="4", score_raw="24", first_block=None, height=3, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=12, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, accumulated_weight_raw="4", score_raw="24", first_block=None, height=3, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=13, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, accumulated_weight_raw="4", score_raw="28", first_block=None, height=4, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=14, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, accumulated_weight_raw="4", score_raw="28", first_block=None, height=4, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=15, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, accumulated_weight_raw="4", score_raw="32", first_block=None, height=5, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=16, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, accumulated_weight_raw="4", score_raw="32", first_block=None, height=5, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=17, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, accumulated_weight_raw="4", score_raw="36", first_block=None, height=6, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=18, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, accumulated_weight_raw="4", score_raw="36", first_block=None, height=6, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=19, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, accumulated_weight_raw="4", score_raw="40", first_block=None, height=7, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=20, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, accumulated_weight_raw="4", score_raw="40", first_block=None, height=7, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=21, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, accumulated_weight_raw="4", score_raw="44", first_block=None, height=8, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=22, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, accumulated_weight_raw="4", score_raw="44", first_block=None, height=8, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=23, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, accumulated_weight_raw="4", score_raw="48", first_block=None, height=9, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=24, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, accumulated_weight_raw="4", score_raw="48", first_block=None, height=9, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=25, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, accumulated_weight_raw="4", score_raw="52", first_block=None, height=10, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=26, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, accumulated_weight_raw="4", score_raw="52", first_block=None, height=10, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=27, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, accumulated_weight_raw="4", score_raw="56", first_block=None, height=11, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=28, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, accumulated_weight_raw="4", score_raw="56", first_block=None, height=11, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new tx (below), and one VERTEX_METADATA_CHANGED for a block, adding the new tx as spending their output # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=29, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.449427506558003, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='', decoded=None)], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.449427506558003, score=0.0, first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=30, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[SpentOutput(index=0, tx_ids=['ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d'])], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=29, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.449427506558003, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='', decoded=None)], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.44942676691887, score=0.0, accumulated_weight_raw="357957", score_raw="0", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=30, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[SpentOutput(index=0, tx_ids=['ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d'])], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new tx - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=31, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.449427506558003, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='', decoded=None)], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.449427506558003, score=0.0, first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=31, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.449427506558003, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='', decoded=None)], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.44942676691887, score=0.0, accumulated_weight_raw="357957", score_raw="0", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new tx (below), and one VERTEX_METADATA_CHANGED for a tx, adding the new tx as spending their output and children # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=32, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=15.990494828748208, inputs=[TxInput(tx_id='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', index=1, spent_output=TxOutput(value=1000, token_data=0, script='', decoded=None))], outputs=[TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=15.990494828748208, score=0.0, first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=33, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.449427506558003, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='', decoded=None)], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', spent_outputs=[SpentOutput(index=1, tx_ids=['3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11'])], conflict_with=[], voided_by=[], received_by=[], children=['3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11'], twins=[], accumulated_weight=18.449427506558003, score=0.0, first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=32, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=15.990494828748208, inputs=[TxInput(tx_id='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', index=1, spent_output=TxOutput(value=1000, token_data=0, script='', decoded=None))], outputs=[TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=15.990502884098087, score=0.0, accumulated_weight_raw="65106", score_raw="0", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=33, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.449427506558003, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='', decoded=None)], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', spent_outputs=[SpentOutput(index=1, tx_ids=['3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11'])], conflict_with=[], voided_by=[], received_by=[], children=['3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11'], twins=[], accumulated_weight=18.44942676691887, score=0.0, accumulated_weight_raw="357957", score_raw="0", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new tx - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=34, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=15.990494828748208, inputs=[TxInput(tx_id='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', index=1, spent_output=TxOutput(value=1000, token_data=0, script='', decoded=None))], outputs=[TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=15.990494828748208, score=0.0, first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=34, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=15.990494828748208, inputs=[TxInput(tx_id='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', index=1, spent_output=TxOutput(value=1000, token_data=0, script='', decoded=None))], outputs=[TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=15.990502884098087, score=0.0, accumulated_weight_raw="65106", score_raw="0", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each confirmed transaction (first block changed) # noqa E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=35, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmkey79Rbhjq4BtHYCm2mT8hDprWIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLatLcoaATFMqECb5fD5rdW2nF9WGyw9os', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', 'ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=18.691575942773007, first_block=None, height=12, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=36, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=15.990494828748208, inputs=[TxInput(tx_id='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', index=1, spent_output=TxOutput(value=1000, token_data=0, script='', decoded=None))], outputs=[TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d'], twins=[], accumulated_weight=15.990494828748208, score=0.0, first_block='da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d', height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=37, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.449427506558003, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='', decoded=None)], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', spent_outputs=[SpentOutput(index=1, tx_ids=['3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11'])], conflict_with=[], voided_by=[], received_by=[], children=['3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', 'da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d'], twins=[], accumulated_weight=18.449427506558003, score=0.0, first_block='da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d', height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=35, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmkey79Rbhjq4BtHYCm2mT8hDprWIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLatLcoaATFMqECb5fD5rdW2nF9WGyw9os', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', 'ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=18.691576556156242, accumulated_weight_raw="256", score_raw="423375", first_block=None, height=12, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=36, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=15.990494828748208, inputs=[TxInput(tx_id='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', index=1, spent_output=TxOutput(value=1000, token_data=0, script='', decoded=None))], outputs=[TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d'], twins=[], accumulated_weight=15.990502884098087, score=0.0, accumulated_weight_raw="65106", score_raw="0", first_block='da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d', height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=37, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.449427506558003, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='', decoded=None)], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', spent_outputs=[SpentOutput(index=1, tx_ids=['3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11'])], conflict_with=[], voided_by=[], received_by=[], children=['3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', 'da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d'], twins=[], accumulated_weight=18.44942676691887, score=0.0, accumulated_weight_raw="357957", score_raw="0", first_block='da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d', height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new block - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=38, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmkey79Rbhjq4BtHYCm2mT8hDprWIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLatLcoaATFMqECb5fD5rdW2nF9WGyw9os', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', 'ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=18.691575942773007, first_block=None, height=12, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id)] # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=38, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmkey79Rbhjq4BtHYCm2mT8hDprWIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLatLcoaATFMqECb5fD5rdW2nF9WGyw9os', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', 'ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=18.691576556156242, accumulated_weight_raw="256", score_raw="423375", first_block=None, height=12, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id)] # noqa: E501 responses = _remove_timestamp(responses) expected = _remove_timestamp(expected) @@ -428,54 +428,54 @@ def test_custom_script(self) -> None: # LOAD_STATED EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=0, timestamp=0, type=EventType.LOAD_STARTED, data=EmptyData(), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for each genesis (1 block and 2 txs) - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # LOAD_FINISHED EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=4, timestamp=0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each genesis tx (2), adding the new block as their first block # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', '8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19', '1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62', '3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6', 'ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f', '61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea', '95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084', 'c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a', 'db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1', '09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc', '8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218'], twins=[], accumulated_weight=2.0, score=2.0, first_block='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', '8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19', '1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62', '3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6', 'ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f', '61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea', '95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084', 'c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a', 'db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1', '09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc', '8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218'], twins=[], accumulated_weight=2.0, score=2.0, first_block='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', '8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19', '1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62', '3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6', 'ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f', '61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea', '95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084', 'c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a', 'db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1', '09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc', '8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', '8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19', '1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62', '3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6', 'ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f', '61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea', '95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084', 'c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a', 'db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1', '09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc', '8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new block - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED and one NEW_VERTEX_ACCEPTED for 10 new blocks - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=9, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62'], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=10, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62'], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=11, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6'], twins=[], accumulated_weight=2.0, score=4.584962500721156, first_block=None, height=3, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=12, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6'], twins=[], accumulated_weight=2.0, score=4.584962500721156, first_block=None, height=3, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=13, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f'], twins=[], accumulated_weight=2.0, score=4.807354922057604, first_block=None, height=4, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=14, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f'], twins=[], accumulated_weight=2.0, score=4.807354922057604, first_block=None, height=4, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=15, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea'], twins=[], accumulated_weight=2.0, score=5.0, first_block=None, height=5, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=16, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea'], twins=[], accumulated_weight=2.0, score=5.0, first_block=None, height=5, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=17, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084'], twins=[], accumulated_weight=2.0, score=5.169925001442312, first_block=None, height=6, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=18, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084'], twins=[], accumulated_weight=2.0, score=5.169925001442312, first_block=None, height=6, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=19, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a'], twins=[], accumulated_weight=2.0, score=5.321928094887363, first_block=None, height=7, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=20, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a'], twins=[], accumulated_weight=2.0, score=5.321928094887363, first_block=None, height=7, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=21, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1'], twins=[], accumulated_weight=2.0, score=5.459431618637297, first_block=None, height=8, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=22, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1'], twins=[], accumulated_weight=2.0, score=5.459431618637297, first_block=None, height=8, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=23, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc'], twins=[], accumulated_weight=2.0, score=5.584962500721156, first_block=None, height=9, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=24, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc'], twins=[], accumulated_weight=2.0, score=5.584962500721156, first_block=None, height=9, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=25, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218'], twins=[], accumulated_weight=2.0, score=5.700439718141092, first_block=None, height=10, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=26, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218'], twins=[], accumulated_weight=2.0, score=5.700439718141092, first_block=None, height=10, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=27, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUBvl1aaAtzoh8a9vaZoqXA6JxK4OIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H7A1HBirZ4EhWtCWLcAy4yw6ybWcKnjdfG', timelock=None))], parents=['09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, first_block=None, height=11, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=28, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUBvl1aaAtzoh8a9vaZoqXA6JxK4OIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H7A1HBirZ4EhWtCWLcAy4yw6ybWcKnjdfG', timelock=None))], parents=['09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, first_block=None, height=11, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=9, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62'], twins=[], accumulated_weight=2.0, score=4.321928094887363, accumulated_weight_raw="4", score_raw="20", first_block=None, height=2, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=10, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62'], twins=[], accumulated_weight=2.0, score=4.321928094887363, accumulated_weight_raw="4", score_raw="20", first_block=None, height=2, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=11, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6'], twins=[], accumulated_weight=2.0, score=4.584962500721156, accumulated_weight_raw="4", score_raw="24", first_block=None, height=3, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=12, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6'], twins=[], accumulated_weight=2.0, score=4.584962500721156, accumulated_weight_raw="4", score_raw="24", first_block=None, height=3, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=13, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f'], twins=[], accumulated_weight=2.0, score=4.807354922057604, accumulated_weight_raw="4", score_raw="28", first_block=None, height=4, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=14, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f'], twins=[], accumulated_weight=2.0, score=4.807354922057604, accumulated_weight_raw="4", score_raw="28", first_block=None, height=4, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=15, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea'], twins=[], accumulated_weight=2.0, score=5.0, accumulated_weight_raw="4", score_raw="32", first_block=None, height=5, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=16, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea'], twins=[], accumulated_weight=2.0, score=5.0, accumulated_weight_raw="4", score_raw="32", first_block=None, height=5, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=17, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084'], twins=[], accumulated_weight=2.0, score=5.169925001442312, accumulated_weight_raw="4", score_raw="36", first_block=None, height=6, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=18, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084'], twins=[], accumulated_weight=2.0, score=5.169925001442312, accumulated_weight_raw="4", score_raw="36", first_block=None, height=6, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=19, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a'], twins=[], accumulated_weight=2.0, score=5.321928094887363, accumulated_weight_raw="4", score_raw="40", first_block=None, height=7, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=20, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a'], twins=[], accumulated_weight=2.0, score=5.321928094887363, accumulated_weight_raw="4", score_raw="40", first_block=None, height=7, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=21, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1'], twins=[], accumulated_weight=2.0, score=5.459431618637297, accumulated_weight_raw="4", score_raw="44", first_block=None, height=8, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=22, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1'], twins=[], accumulated_weight=2.0, score=5.459431618637297, accumulated_weight_raw="4", score_raw="44", first_block=None, height=8, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=23, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc'], twins=[], accumulated_weight=2.0, score=5.584962500721156, accumulated_weight_raw="4", score_raw="48", first_block=None, height=9, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=24, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc'], twins=[], accumulated_weight=2.0, score=5.584962500721156, accumulated_weight_raw="4", score_raw="48", first_block=None, height=9, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=25, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218'], twins=[], accumulated_weight=2.0, score=5.700439718141092, accumulated_weight_raw="4", score_raw="52", first_block=None, height=10, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=26, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218'], twins=[], accumulated_weight=2.0, score=5.700439718141092, accumulated_weight_raw="4", score_raw="52", first_block=None, height=10, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=27, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUBvl1aaAtzoh8a9vaZoqXA6JxK4OIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H7A1HBirZ4EhWtCWLcAy4yw6ybWcKnjdfG', timelock=None))], parents=['09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, accumulated_weight_raw="4", score_raw="56", first_block=None, height=11, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=28, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUBvl1aaAtzoh8a9vaZoqXA6JxK4OIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H7A1HBirZ4EhWtCWLcAy4yw6ybWcKnjdfG', timelock=None))], parents=['09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, accumulated_weight_raw="4", score_raw="56", first_block=None, height=11, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new tx (below), and one VERTEX_METADATA_CHANGED for a block, adding the new tx as spending their output # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=29, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.55128132611371, inputs=[TxInput(tx_id='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H8XUjiUx24WLXUN63da34hX6bEs29GJjSs', timelock=None)), TxOutput(value=1000, token_data=0, script='CXNvbWVfZGF0YYhR', decoded=None)], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.55128132611371, score=0.0, first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=30, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', spent_outputs=[SpentOutput(index=0, tx_ids=['cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec'])], conflict_with=[], voided_by=[], received_by=[], children=['8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=29, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.55128132611371, inputs=[TxInput(tx_id='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H8XUjiUx24WLXUN63da34hX6bEs29GJjSs', timelock=None)), TxOutput(value=1000, token_data=0, script='CXNvbWVfZGF0YYhR', decoded=None)], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.55128018336992, score=0.0, accumulated_weight_raw="384142", score_raw="0", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=30, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', spent_outputs=[SpentOutput(index=0, tx_ids=['cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec'])], conflict_with=[], voided_by=[], received_by=[], children=['8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new tx - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=31, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.55128132611371, inputs=[TxInput(tx_id='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H8XUjiUx24WLXUN63da34hX6bEs29GJjSs', timelock=None)), TxOutput(value=1000, token_data=0, script='CXNvbWVfZGF0YYhR', decoded=None)], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.55128132611371, score=0.0, first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=31, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.55128132611371, inputs=[TxInput(tx_id='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H8XUjiUx24WLXUN63da34hX6bEs29GJjSs', timelock=None)), TxOutput(value=1000, token_data=0, script='CXNvbWVfZGF0YYhR', decoded=None)], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.55128018336992, score=0.0, accumulated_weight_raw="384142", score_raw="0", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new tx (below), and one VERTEX_METADATA_CHANGED for a tx, adding the new tx as spending their output and children # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=32, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=16.12160141040609, inputs=[TxInput(tx_id='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', index=1, spent_output=TxOutput(value=1000, token_data=0, script='CXNvbWVfZGF0YYhR', decoded=None))], outputs=[TxOutput(value=1000, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=16.12160141040609, score=0.0, first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=33, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.55128132611371, inputs=[TxInput(tx_id='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H8XUjiUx24WLXUN63da34hX6bEs29GJjSs', timelock=None)), TxOutput(value=1000, token_data=0, script='CXNvbWVfZGF0YYhR', decoded=None)], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', spent_outputs=[SpentOutput(index=1, tx_ids=['3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3'])], conflict_with=[], voided_by=[], received_by=[], children=['3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3'], twins=[], accumulated_weight=18.55128132611371, score=0.0, first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=32, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=16.12160141040609, inputs=[TxInput(tx_id='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', index=1, spent_output=TxOutput(value=1000, token_data=0, script='CXNvbWVfZGF0YYhR', decoded=None))], outputs=[TxOutput(value=1000, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=16.12159422192467, score=0.0, accumulated_weight_raw="71299", score_raw="0", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=33, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.55128132611371, inputs=[TxInput(tx_id='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H8XUjiUx24WLXUN63da34hX6bEs29GJjSs', timelock=None)), TxOutput(value=1000, token_data=0, script='CXNvbWVfZGF0YYhR', decoded=None)], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', spent_outputs=[SpentOutput(index=1, tx_ids=['3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3'])], conflict_with=[], voided_by=[], received_by=[], children=['3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3'], twins=[], accumulated_weight=18.55128018336992, score=0.0, accumulated_weight_raw="384142", score_raw="0", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new tx - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=34, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=16.12160141040609, inputs=[TxInput(tx_id='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', index=1, spent_output=TxOutput(value=1000, token_data=0, script='CXNvbWVfZGF0YYhR', decoded=None))], outputs=[TxOutput(value=1000, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=16.12160141040609, score=0.0, first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=34, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=16.12160141040609, inputs=[TxInput(tx_id='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', index=1, spent_output=TxOutput(value=1000, token_data=0, script='CXNvbWVfZGF0YYhR', decoded=None))], outputs=[TxOutput(value=1000, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=16.12159422192467, score=0.0, accumulated_weight_raw="71299", score_raw="0", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each confirmed transaction (first block changed) # noqa E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=35, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUTisHvpM4sDeINzxF5auK/8bP6UaIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HDeSe6qKqjSLwtnjLBV84NddtZQyNb9HUU', timelock=None))], parents=['8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218', '3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', 'cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=18.79789471506282, first_block=None, height=12, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=36, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=16.12160141040609, inputs=[TxInput(tx_id='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', index=1, spent_output=TxOutput(value=1000, token_data=0, script='CXNvbWVfZGF0YYhR', decoded=None))], outputs=[TxOutput(value=1000, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302'], twins=[], accumulated_weight=16.12160141040609, score=0.0, first_block='99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302', height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=37, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.55128132611371, inputs=[TxInput(tx_id='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H8XUjiUx24WLXUN63da34hX6bEs29GJjSs', timelock=None)), TxOutput(value=1000, token_data=0, script='CXNvbWVfZGF0YYhR', decoded=None)], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', spent_outputs=[SpentOutput(index=1, tx_ids=['3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3'])], conflict_with=[], voided_by=[], received_by=[], children=['3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', '99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302'], twins=[], accumulated_weight=18.55128132611371, score=0.0, first_block='99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302', height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=35, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUTisHvpM4sDeINzxF5auK/8bP6UaIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HDeSe6qKqjSLwtnjLBV84NddtZQyNb9HUU', timelock=None))], parents=['8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218', '3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', 'cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=18.79789262729119, accumulated_weight_raw="256", score_raw="455753", first_block=None, height=12, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=36, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=16.12160141040609, inputs=[TxInput(tx_id='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', index=1, spent_output=TxOutput(value=1000, token_data=0, script='CXNvbWVfZGF0YYhR', decoded=None))], outputs=[TxOutput(value=1000, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302'], twins=[], accumulated_weight=16.12159422192467, score=0.0, accumulated_weight_raw="71299", score_raw="0", first_block='99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302', height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=37, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.55128132611371, inputs=[TxInput(tx_id='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H8XUjiUx24WLXUN63da34hX6bEs29GJjSs', timelock=None)), TxOutput(value=1000, token_data=0, script='CXNvbWVfZGF0YYhR', decoded=None)], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', spent_outputs=[SpentOutput(index=1, tx_ids=['3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3'])], conflict_with=[], voided_by=[], received_by=[], children=['3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', '99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302'], twins=[], accumulated_weight=18.55128018336992, score=0.0, accumulated_weight_raw="384142", score_raw="0", first_block='99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302', height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new block - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=38, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUTisHvpM4sDeINzxF5auK/8bP6UaIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HDeSe6qKqjSLwtnjLBV84NddtZQyNb9HUU', timelock=None))], parents=['8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218', '3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', 'cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=18.79789471506282, first_block=None, height=12, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id) # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=38, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUTisHvpM4sDeINzxF5auK/8bP6UaIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HDeSe6qKqjSLwtnjLBV84NddtZQyNb9HUU', timelock=None))], parents=['8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218', '3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', 'cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=18.79789262729119, accumulated_weight_raw="256", score_raw="455753", first_block=None, height=12, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id) # noqa: E501 ] responses = _remove_timestamp(responses) diff --git a/tests/event/test_tx_metadata.py b/tests/event/test_tx_metadata.py index cfe47c40d..fe3782617 100644 --- a/tests/event/test_tx_metadata.py +++ b/tests/event/test_tx_metadata.py @@ -28,8 +28,10 @@ def test_from_spent_output_instance() -> None: received_by=[], children=[], twins=[], - accumulated_weight=0, - score=0, + accumulated_weight=0.0, + score=0.0, + accumulated_weight_raw="0", + score_raw="0", first_block=None, height=0, validation='some_validation' @@ -55,8 +57,10 @@ def test_from_spent_output_list() -> None: received_by=[], children=[], twins=[], - accumulated_weight=0, - score=0, + accumulated_weight=0.0, + score=0.0, + accumulated_weight_raw="0", + score_raw="0", first_block=None, height=0, validation='some_validation' @@ -83,8 +87,10 @@ def test_from_spent_output_dict() -> None: received_by=[], children=[], twins=[], - accumulated_weight=0, - score=0, + accumulated_weight=0.0, + score=0.0, + accumulated_weight_raw="0", + score_raw="0", first_block=None, height=0, validation='some_validation' diff --git a/tests/event/websocket/test_protocol.py b/tests/event/websocket/test_protocol.py index 01a614e50..a13778876 100644 --- a/tests/event/websocket/test_protocol.py +++ b/tests/event/websocket/test_protocol.py @@ -103,7 +103,8 @@ def test_send_event_response() -> None: b'"parents":[],' b'"tokens":[],"token_name":null,"token_symbol":null,"aux_pow":null,"metadata":{"hash":"abc",' b'"spent_outputs":[],"conflict_with":[],"voided_by":[],"received_by":[],"children":[],' - b'"twins":[],"accumulated_weight":10.0,"score":20.0,"first_block":null,"height":100,' + b'"twins":[],"accumulated_weight":10.0,"score":20.0,"accumulated_weight_raw":"1024",' + b'"score_raw":"1048576","first_block":null,"height":100,' b'"validation":"validation"}},"group_id":null},"latest_event_id":10,' b'"stream_id":"stream_id"}') diff --git a/tests/resources/transaction/test_mining.py b/tests/resources/transaction/test_mining.py index 80ba0bbaa..64df41444 100644 --- a/tests/resources/transaction/test_mining.py +++ b/tests/resources/transaction/test_mining.py @@ -40,7 +40,9 @@ def test_get_block_template_with_address(self): 'height': 1, 'min_height': 0, 'first_block': None, - 'feature_activation_bit_counts': [0, 0, 0, 0] + 'feature_activation_bit_counts': [0, 0, 0, 0], + 'accumulated_weight_raw': '2', + 'score_raw': '0', }, 'tokens': [], 'data': '', @@ -73,7 +75,9 @@ def test_get_block_template_without_address(self): 'height': 1, 'min_height': 0, 'first_block': None, - 'feature_activation_bit_counts': [0, 0, 0, 0] + 'feature_activation_bit_counts': [0, 0, 0, 0], + 'accumulated_weight_raw': '2', + 'score_raw': '0', }, 'tokens': [], 'data': '', diff --git a/tests/resources/transaction/test_transaction_confirmation.py b/tests/resources/transaction/test_transaction_confirmation.py index a04eeb641..3b95f7718 100644 --- a/tests/resources/transaction/test_transaction_confirmation.py +++ b/tests/resources/transaction/test_transaction_confirmation.py @@ -2,6 +2,7 @@ from hathor.simulator.utils import add_new_blocks from hathor.transaction.resources import TransactionAccWeightResource +from hathor.utils.weight import weight_to_work from tests import unittest from tests.resources.base_resource import StubSite, _BaseResourceTest from tests.utils import add_blocks_unlock_reward, add_new_transactions @@ -24,7 +25,7 @@ def test_get_data(self): ) data_success = response_success.json_value() self.assertTrue(data_success['success']) - self.assertEqual(data_success['accumulated_weight'], genesis_tx.weight) + self.assertEqual(data_success['accumulated_weight'], weight_to_work(genesis_tx.weight)) self.assertEqual(data_success['confirmation_level'], 0) # Adding blocks to have funds @@ -38,7 +39,7 @@ def test_get_data(self): {b'id': bytes(tx.hash.hex(), 'utf-8')} ) data_success2 = response_success2.json_value() - self.assertGreater(data_success2['accumulated_weight'], tx.weight) + self.assertGreater(data_success2['accumulated_weight'], weight_to_work(tx.weight)) self.assertEqual(data_success2['confirmation_level'], 1) # Test sending hash that does not exist diff --git a/tests/tx/test_accumulated_weight.py b/tests/tx/test_accumulated_weight.py index 74507b754..746bcaf30 100644 --- a/tests/tx/test_accumulated_weight.py +++ b/tests/tx/test_accumulated_weight.py @@ -1,6 +1,6 @@ from hathor.simulator.utils import add_new_blocks -from hathor.transaction import sum_weights from hathor.transaction.storage import TransactionMemoryStorage +from hathor.utils.weight import weight_to_work from tests import unittest from tests.utils import add_blocks_unlock_reward, add_new_transactions @@ -39,9 +39,9 @@ def test_accumulated_weight_indirect_block(self): # indirectly. expected = 0 for tx in tx_list: - expected = sum_weights(expected, tx.weight) + expected += weight_to_work(tx.weight) for block in blocks: - expected = sum_weights(expected, block.weight) + expected += weight_to_work(block.weight) meta = tx0.update_accumulated_weight() self.assertAlmostEqual(meta.accumulated_weight, expected) diff --git a/tests/tx/test_blockchain.py b/tests/tx/test_blockchain.py index 923cf3f96..ae0af3c50 100644 --- a/tests/tx/test_blockchain.py +++ b/tests/tx/test_blockchain.py @@ -2,8 +2,8 @@ from hathor.daa import DifficultyAdjustmentAlgorithm, TestMode from hathor.simulator.utils import add_new_blocks -from hathor.transaction import sum_weights from hathor.transaction.storage import TransactionMemoryStorage +from hathor.utils.weight import weight_to_work from tests import unittest from tests.utils import add_new_transactions @@ -37,27 +37,27 @@ def test_single_chain(self): manager = self.create_peer('testnet', tx_storage=self.tx_storage) # The initial score is the sum of the genesis - score = self.genesis_blocks[0].weight + score = weight_to_work(self.genesis_blocks[0].weight) for tx in self.genesis_txs: - score = sum_weights(score, tx.weight) + score += weight_to_work(tx.weight) # Mine 100 blocks in a row with no transaction but the genesis blocks = add_new_blocks(manager, 100, advance_clock=15) for i, block in enumerate(blocks): meta = block.get_metadata(force_reload=True) - score = sum_weights(score, block.weight) + score += weight_to_work(block.weight) self.assertAlmostEqual(score, meta.score) # Add some transactions between blocks txs = add_new_transactions(manager, 30, advance_clock=15) for tx in txs: - score = sum_weights(score, tx.weight) + score += weight_to_work(tx.weight) # Mine 50 more blocks in a row with no transactions between them blocks = add_new_blocks(manager, 50) for i, block in enumerate(blocks): meta = block.get_metadata() - score = sum_weights(score, block.weight) + score += weight_to_work(block.weight) self.assertAlmostEqual(score, meta.score) consensus_context = manager.consensus_algorithm.create_context() self.assertAlmostEqual(consensus_context.block_algorithm.calculate_score(block), meta.score) @@ -66,12 +66,12 @@ def test_single_chain(self): for _ in range(15): txs = add_new_transactions(manager, 10, advance_clock=15) for tx in txs: - score = sum_weights(score, tx.weight) + score += weight_to_work(tx.weight) blocks = add_new_blocks(manager, 1) for i, block in enumerate(blocks): meta = block.get_metadata() - score = sum_weights(score, block.weight) + score += weight_to_work(block.weight) self.assertAlmostEqual(score, meta.score) consensus_context = manager.consensus_algorithm.create_context() self.assertAlmostEqual(consensus_context.block_algorithm.calculate_score(block), meta.score) @@ -86,27 +86,27 @@ def test_single_fork_not_best(self): manager = self.create_peer('testnet', tx_storage=self.tx_storage) # The initial score is the sum of the genesis - score = self.genesis_blocks[0].weight + score = weight_to_work(self.genesis_blocks[0].weight) for tx in self.genesis_txs: - score = sum_weights(score, tx.weight) + score += weight_to_work(tx.weight) # Mine 30 blocks in a row with no transactions blocks = add_new_blocks(manager, 30, advance_clock=15) for i, block in enumerate(blocks): meta = block.get_metadata() - score = sum_weights(score, block.weight) + score += weight_to_work(block.weight) self.assertAlmostEqual(score, meta.score) # Add some transactions between blocks txs = add_new_transactions(manager, 5, advance_clock=15) for tx in txs: - score = sum_weights(score, tx.weight) + score += weight_to_work(tx.weight) # Mine 1 blocks blocks = add_new_blocks(manager, 1, advance_clock=15) for i, block in enumerate(blocks): meta = block.get_metadata() - score = sum_weights(score, block.weight) + score += weight_to_work(block.weight) self.assertAlmostEqual(score, meta.score) # Generate a block which will be a fork in the middle of the chain @@ -119,7 +119,7 @@ def test_single_fork_not_best(self): blocks = add_new_blocks(manager, 8, advance_clock=15) for i, block in enumerate(blocks): meta = block.get_metadata() - score = sum_weights(score, block.weight) + score += weight_to_work(block.weight) self.assertAlmostEqual(score, meta.score) # Fork block must have the same parents as blocks[0] as well as the same score @@ -134,20 +134,20 @@ def test_single_fork_not_best(self): # Add some transactions between blocks txs = add_new_transactions(manager, 5, advance_clock=15) for tx in txs: - score = sum_weights(score, tx.weight) + score += weight_to_work(tx.weight) # Mine 5 blocks in a row # These blocks belong to case (i). blocks = add_new_blocks(manager, 5, advance_clock=15) for i, block in enumerate(blocks): meta = block.get_metadata() - score = sum_weights(score, block.weight) + score += weight_to_work(block.weight) self.assertAlmostEqual(score, meta.score) # Add some transactions between blocks txs = add_new_transactions(manager, 2, advance_clock=15) for tx in txs: - score = sum_weights(score, tx.weight) + score += weight_to_work(tx.weight) # Propagate a block connected to the voided chain # These blocks belongs to case (iii). @@ -159,7 +159,7 @@ def test_single_fork_not_best(self): # Add some transactions between blocks txs = add_new_transactions(manager, 2, advance_clock=15) for tx in txs: - score = sum_weights(score, tx.weight) + score += weight_to_work(tx.weight) # Propagate a block connected to the voided chain # This block belongs to case (iv). @@ -176,28 +176,28 @@ def test_multiple_forks(self): manager = self.create_peer('testnet', tx_storage=self.tx_storage) # The initial score is the sum of the genesis - score = self.genesis_blocks[0].weight + score = weight_to_work(self.genesis_blocks[0].weight) for tx in self.genesis_txs: - score = sum_weights(score, tx.weight) + score += weight_to_work(tx.weight) # Mine 30 blocks in a row with no transactions, case (i). blocks = add_new_blocks(manager, 30, advance_clock=15) for i, block in enumerate(blocks): meta = block.get_metadata() - score = sum_weights(score, block.weight) + score += weight_to_work(block.weight) self.assertAlmostEqual(score, meta.score) # Add some transactions between blocks txs1 = add_new_transactions(manager, 5, advance_clock=15) for tx in txs1: - score = sum_weights(score, tx.weight) + score += weight_to_work(tx.weight) # Mine 1 blocks, case (i). blocks = add_new_blocks(manager, 1, advance_clock=15) block_before_fork = blocks[0] for i, block in enumerate(blocks): meta = block.get_metadata() - score = sum_weights(score, block.weight) + score += weight_to_work(block.weight) self.assertAlmostEqual(score, meta.score) for tx in txs1: @@ -207,13 +207,13 @@ def test_multiple_forks(self): # Add some transactions between blocks txs2 = add_new_transactions(manager, 3, advance_clock=15) for tx in txs2: - score = sum_weights(score, tx.weight) + score += weight_to_work(tx.weight) # Mine 5 blocks in a row, case (i). blocks = add_new_blocks(manager, 5, advance_clock=15) for i, block in enumerate(blocks): meta = block.get_metadata() - score = sum_weights(score, block.weight) + score += weight_to_work(block.weight) self.assertAlmostEqual(score, meta.score) # Mine 4 blocks, starting a fork. diff --git a/tests/tx/test_indexes.py b/tests/tx/test_indexes.py index 2dd4457d3..15f238ea4 100644 --- a/tests/tx/test_indexes.py +++ b/tests/tx/test_indexes.py @@ -264,7 +264,7 @@ def check_utxos(*args): address=decode_address(address)) block2.parents[1:] = [txA2.hash, txB2.hash] block2.timestamp = block1.timestamp - block2.weight = 1.2 + block2.weight = 4 self.manager.cpu_mining_service.resolve(block2) self.manager.propagate_tx(block2, fails_silently=False) self.graphviz.labels[block2.hash] = 'block2' diff --git a/tests/tx/test_mining.py b/tests/tx/test_mining.py index 2a9a0a260..49e6d1c41 100644 --- a/tests/tx/test_mining.py +++ b/tests/tx/test_mining.py @@ -2,8 +2,9 @@ from hathor.mining import BlockTemplate from hathor.simulator.utils import add_new_blocks -from hathor.transaction import Block, sum_weights +from hathor.transaction import Block from hathor.transaction.storage import TransactionMemoryStorage +from hathor.utils.weight import weight_to_work from tests import unittest @@ -51,7 +52,7 @@ def test_block_template_after_genesis(self) -> None: parents=block_templates[0].parents, parents_any=[], height=1, # genesis is 0 - score=sum_weights(self.genesis_blocks[0].weight, 1.0), + score=weight_to_work(self.genesis_blocks[0].weight) + weight_to_work(1), signal_bits=0 )) @@ -80,7 +81,7 @@ def test_regular_block_template(self) -> None: parents=block_templates[0].parents, parents_any=[], height=101, # genesis is 0 - score=sum_weights(blocks[-1].get_metadata().score, 1.0), + score=blocks[-1].get_metadata().score + weight_to_work(1), signal_bits=0 )) diff --git a/tests/tx/test_tx_storage.py b/tests/tx/test_tx_storage.py index 420412fce..8931850b9 100644 --- a/tests/tx/test_tx_storage.py +++ b/tests/tx/test_tx_storage.py @@ -488,6 +488,10 @@ def test_storage_new_blocks(self): # Block3 has the same parents as block2. block3 = self._add_new_block(parents=block2.parents) + self.assertEqual(block3.weight, block2.weight) + meta2 = block2.get_metadata() + meta3 = block3.get_metadata() + self.assertEqual(meta2.score, meta3.score) tip_blocks = [x.data for x in self.tx_storage.get_block_tips()] self.assertEqual(set(tip_blocks), {block2.hash, block3.hash}) diff --git a/tests/utils.py b/tests/utils.py index ad7a81a57..a2566a3ca 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -591,8 +591,10 @@ class EventMocker: received_by=[], children=[], twins=[], - accumulated_weight=10, - score=20, + accumulated_weight=10.0, + score=20.0, + accumulated_weight_raw="1024", + score_raw="1048576", height=100, validation='validation' ) From 2536592c0f0e573150c194b35750e173e1344032 Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Thu, 24 Oct 2024 17:40:20 -0300 Subject: [PATCH 40/61] chore: remove all migrations (#1149) --- hathor/transaction/storage/cache_storage.py | 5 -- hathor/transaction/storage/memory_storage.py | 7 -- ..._feature_activation_bit_counts_metadata.py | 37 ---------- ...feature_activation_bit_counts_metadata2.py | 41 ------------ .../migrations/add_min_height_metadata.py | 35 ---------- .../migrations/migrate_static_metadata.py | 67 ------------------- .../migrations/remove_first_nop_features.py | 58 ---------------- .../migrations/remove_second_nop_features.py | 54 --------------- hathor/transaction/storage/rocksdb_storage.py | 44 +----------- .../storage/transaction_storage.py | 26 +------ 10 files changed, 2 insertions(+), 372 deletions(-) delete mode 100644 hathor/transaction/storage/migrations/add_feature_activation_bit_counts_metadata.py delete mode 100644 hathor/transaction/storage/migrations/add_feature_activation_bit_counts_metadata2.py delete mode 100644 hathor/transaction/storage/migrations/add_min_height_metadata.py delete mode 100644 hathor/transaction/storage/migrations/migrate_static_metadata.py delete mode 100644 hathor/transaction/storage/migrations/remove_first_nop_features.py delete mode 100644 hathor/transaction/storage/migrations/remove_second_nop_features.py diff --git a/hathor/transaction/storage/cache_storage.py b/hathor/transaction/storage/cache_storage.py index 1539309f9..965a61179 100644 --- a/hathor/transaction/storage/cache_storage.py +++ b/hathor/transaction/storage/cache_storage.py @@ -15,7 +15,6 @@ from collections import OrderedDict from typing import Any, Iterator, Optional -from structlog.stdlib import BoundLogger from twisted.internet import threads from typing_extensions import override @@ -251,7 +250,3 @@ def get_value(self, key: str) -> Optional[str]: def flush(self): self._flush_to_storage(self.dirty_txs.copy()) - - @override - def migrate_static_metadata(self, log: BoundLogger) -> None: - return self.store.migrate_static_metadata(log) diff --git a/hathor/transaction/storage/memory_storage.py b/hathor/transaction/storage/memory_storage.py index 85a68b491..31742d823 100644 --- a/hathor/transaction/storage/memory_storage.py +++ b/hathor/transaction/storage/memory_storage.py @@ -14,7 +14,6 @@ from typing import Any, Iterator, Optional, TypeVar -from structlog.stdlib import BoundLogger from typing_extensions import override from hathor.conf.settings import HathorSettings @@ -126,9 +125,3 @@ def remove_value(self, key: str) -> None: def get_value(self, key: str) -> Optional[str]: return self.attributes.get(key) - - @override - def migrate_static_metadata(self, log: BoundLogger) -> None: - # This method is only ever used by the `migrate_static_metadata` migration, and therefore must not be - # implemented for the memory storage. - raise NotImplementedError diff --git a/hathor/transaction/storage/migrations/add_feature_activation_bit_counts_metadata.py b/hathor/transaction/storage/migrations/add_feature_activation_bit_counts_metadata.py deleted file mode 100644 index 20c93b3b5..000000000 --- a/hathor/transaction/storage/migrations/add_feature_activation_bit_counts_metadata.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright 2023 Hathor Labs -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from typing import TYPE_CHECKING - -from structlog import get_logger - -from hathor.transaction.storage.migrations import BaseMigration - -if TYPE_CHECKING: - from hathor.transaction.storage import TransactionStorage - -logger = get_logger() - - -class Migration(BaseMigration): - def skip_empty_db(self) -> bool: - return True - - def get_db_name(self) -> str: - return 'add_feature_activation_bit_counts_metadata' - - def run(self, storage: 'TransactionStorage') -> None: - # We can skip this migration as it will run again in `add_feature_activation_bit_counts_metadata2`. - log = logger.new() - log.info('Skipping unnecessary migration.') diff --git a/hathor/transaction/storage/migrations/add_feature_activation_bit_counts_metadata2.py b/hathor/transaction/storage/migrations/add_feature_activation_bit_counts_metadata2.py deleted file mode 100644 index eb59daa6b..000000000 --- a/hathor/transaction/storage/migrations/add_feature_activation_bit_counts_metadata2.py +++ /dev/null @@ -1,41 +0,0 @@ -# Copyright 2023 Hathor Labs -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from typing import TYPE_CHECKING - -from structlog import get_logger - -from hathor.transaction.storage.migrations import BaseMigration -from hathor.util import progress - -if TYPE_CHECKING: - from hathor.transaction.storage import TransactionStorage - -logger = get_logger() - - -class Migration(BaseMigration): - def skip_empty_db(self) -> bool: - return True - - def get_db_name(self) -> str: - return 'add_feature_activation_bit_counts_metadata2' - - def run(self, storage: 'TransactionStorage') -> None: - log = logger.new() - topological_iterator = storage.topological_iterator() - - for vertex in progress(topological_iterator, log=log, total=None): - if vertex.is_block: - vertex.update_initial_metadata() diff --git a/hathor/transaction/storage/migrations/add_min_height_metadata.py b/hathor/transaction/storage/migrations/add_min_height_metadata.py deleted file mode 100644 index 8d31dab13..000000000 --- a/hathor/transaction/storage/migrations/add_min_height_metadata.py +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright 2021 Hathor Labs -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from typing import TYPE_CHECKING - -from hathor.transaction.storage.migrations import BaseMigration - -if TYPE_CHECKING: - from hathor.transaction.storage import TransactionStorage - - -class Migration(BaseMigration): - def skip_empty_db(self) -> bool: - return True - - def get_db_name(self) -> str: - return 'add_min_height_metadata' - - def run(self, storage: 'TransactionStorage') -> None: - # XXX: this migration assumes all existing metadata is currently complete (up to the point before the - # migration) and correct, which could not be the case if we're on a full_verification initialization, maybe - # migrations shouldn't run on full_verification? - for tx in storage.topological_iterator(): - tx.update_initial_metadata() diff --git a/hathor/transaction/storage/migrations/migrate_static_metadata.py b/hathor/transaction/storage/migrations/migrate_static_metadata.py deleted file mode 100644 index 2edb3adf4..000000000 --- a/hathor/transaction/storage/migrations/migrate_static_metadata.py +++ /dev/null @@ -1,67 +0,0 @@ -# Copyright 2023 Hathor Labs -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from typing import TYPE_CHECKING - -from structlog import get_logger - -from hathor.conf.get_settings import get_global_settings -from hathor.transaction import Block, Transaction -from hathor.transaction.static_metadata import BlockStaticMetadata, TransactionStaticMetadata -from hathor.transaction.storage.migrations import BaseMigration -from hathor.util import progress - -if TYPE_CHECKING: - from hathor.transaction.storage import TransactionStorage - -logger = get_logger() - - -class Migration(BaseMigration): - def skip_empty_db(self) -> bool: - return True - - def get_db_name(self) -> str: - return 'migrate_static_metadata' - - def run(self, storage: 'TransactionStorage') -> None: - """This migration takes attributes from existing vertex metadata and saves them as static metadata.""" - log = logger.new() - settings = get_global_settings() - - # First we migrate static metadata using the storage itself since it uses internal structures. - log.info('creating static metadata...') - storage.migrate_static_metadata(log) - - # Now that static metadata is set, we can use the topological iterator normally - log.info('removing old metadata and validating...') - topological_iter = storage.topological_iterator() - - for vertex in progress(topological_iter, log=log, total=None): - # We re-save the vertex's metadata so it's serialized with the new `to_bytes()` method, excluding fields - # that were migrated. - storage.save_transaction(vertex, only_metadata=True) - - # We re-create the static metadata from scratch and compare it with the value that was created by the - # migration above, as a sanity check. - if isinstance(vertex, Block): - assert vertex.static_metadata == BlockStaticMetadata.create_from_storage( - vertex, settings, storage - ) - elif isinstance(vertex, Transaction): - assert vertex.static_metadata == TransactionStaticMetadata.create_from_storage( - vertex, settings, storage - ) - else: - raise NotImplementedError diff --git a/hathor/transaction/storage/migrations/remove_first_nop_features.py b/hathor/transaction/storage/migrations/remove_first_nop_features.py deleted file mode 100644 index c5ede9997..000000000 --- a/hathor/transaction/storage/migrations/remove_first_nop_features.py +++ /dev/null @@ -1,58 +0,0 @@ -# Copyright 2023 Hathor Labs -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from typing import TYPE_CHECKING - -from structlog import get_logger - -from hathor.conf.get_settings import get_global_settings -from hathor.transaction import Block -from hathor.transaction.storage.migrations import BaseMigration -from hathor.util import progress - -if TYPE_CHECKING: - from hathor.transaction.storage import TransactionStorage - -logger = get_logger() - - -class Migration(BaseMigration): - def skip_empty_db(self) -> bool: - return True - - def get_db_name(self) -> str: - return 'remove_first_nop_features' - - def run(self, storage: 'TransactionStorage') -> None: - """ - This migration clears the Feature Activation metadata related to the first Phased Testing on testnet. - """ - settings = get_global_settings() - log = logger.new() - - if settings.NETWORK_NAME != 'testnet-golf': - # If it's not testnet, we don't have to clear anything. - log.info('Skipping testnet-only migration.') - return - - topological_iterator = storage.topological_iterator() - - for vertex in progress(topological_iterator, log=log, total=None): - if isinstance(vertex, Block): - meta = vertex.get_metadata() - # This is the start_height of the **second** Phased Testing, so we clear anything before it. - if vertex.static_metadata.height < 3_386_880: - meta.feature_states = None - - storage.save_transaction(vertex, only_metadata=True) diff --git a/hathor/transaction/storage/migrations/remove_second_nop_features.py b/hathor/transaction/storage/migrations/remove_second_nop_features.py deleted file mode 100644 index dd322b1f7..000000000 --- a/hathor/transaction/storage/migrations/remove_second_nop_features.py +++ /dev/null @@ -1,54 +0,0 @@ -# Copyright 2023 Hathor Labs -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from typing import TYPE_CHECKING - -from structlog import get_logger - -from hathor.conf.get_settings import get_global_settings -from hathor.transaction.storage.migrations import BaseMigration -from hathor.util import progress - -if TYPE_CHECKING: - from hathor.transaction.storage import TransactionStorage - -logger = get_logger() - - -class Migration(BaseMigration): - def skip_empty_db(self) -> bool: - return True - - def get_db_name(self) -> str: - return 'remove_second_nop_features' - - def run(self, storage: 'TransactionStorage') -> None: - """ - This migration clears the Feature Activation metadata related to the second Phased Testing on testnet. - """ - settings = get_global_settings() - log = logger.new() - - if settings.NETWORK_NAME != 'testnet-golf': - # If it's not testnet, we don't have to clear anything. - log.info('Skipping testnet-only migration.') - return - - topological_iterator = storage.topological_iterator() - - for vertex in progress(topological_iterator, log=log, total=None): - if vertex.is_block: - meta = vertex.get_metadata() - meta.feature_states = None - storage.save_transaction(vertex, only_metadata=True) diff --git a/hathor/transaction/storage/rocksdb_storage.py b/hathor/transaction/storage/rocksdb_storage.py index 38df6ef30..5b97cf741 100644 --- a/hathor/transaction/storage/rocksdb_storage.py +++ b/hathor/transaction/storage/rocksdb_storage.py @@ -15,18 +15,16 @@ from typing import TYPE_CHECKING, Iterator, Optional from structlog import get_logger -from structlog.stdlib import BoundLogger from typing_extensions import override from hathor.conf.settings import HathorSettings from hathor.indexes import IndexesManager from hathor.storage import RocksDBStorage -from hathor.transaction.static_metadata import BlockStaticMetadata, TransactionStaticMetadata, VertexStaticMetadata +from hathor.transaction.static_metadata import VertexStaticMetadata from hathor.transaction.storage.exceptions import TransactionDoesNotExist from hathor.transaction.storage.migrations import MigrationState from hathor.transaction.storage.transaction_storage import BaseTransactionStorage from hathor.transaction.vertex_parser import VertexParser -from hathor.util import json_loadb, progress if TYPE_CHECKING: import rocksdb @@ -233,43 +231,3 @@ def get_value(self, key: str) -> Optional[str]: return None else: return data.decode() - - @override - def migrate_static_metadata(self, log: BoundLogger) -> None: - metadata_iter = self._db.iteritems(self._cf_meta) - metadata_iter.seek_to_first() - - # We have to iterate over metadata instead of vertices because the storage doesn't allow us to get a vertex if - # its static metadata is not set. We also use raw dict metadata because `metadata.create_from_json()` doesn't - # include attributes that should be static, which are exactly the ones we need for this migration. - for (_, vertex_id), metadata_bytes in progress(metadata_iter, log=log, total=None): - raw_metadata = json_loadb(metadata_bytes) - height = raw_metadata['height'] - min_height = raw_metadata['min_height'] - bit_counts = raw_metadata.get('feature_activation_bit_counts') - - assert isinstance(height, int) - assert isinstance(min_height, int) - - static_metadata: VertexStaticMetadata - is_block = (vertex_id == self._settings.GENESIS_BLOCK_HASH or height != 0) - - if is_block: - assert isinstance(bit_counts, list) - for item in bit_counts: - assert isinstance(item, int) - - static_metadata = BlockStaticMetadata( - height=height, - min_height=min_height, - feature_activation_bit_counts=bit_counts, - feature_states={}, # This will be populated in a future PR - ) - else: - assert bit_counts is None or bit_counts == [] - static_metadata = TransactionStaticMetadata( - min_height=min_height - ) - - # Save it manually to the CF - self._db.put((self._cf_static_meta, vertex_id), static_metadata.json_dumpb()) diff --git a/hathor/transaction/storage/transaction_storage.py b/hathor/transaction/storage/transaction_storage.py index d4def4867..653a0041f 100644 --- a/hathor/transaction/storage/transaction_storage.py +++ b/hathor/transaction/storage/transaction_storage.py @@ -22,7 +22,6 @@ from intervaltree.interval import Interval from structlog import get_logger -from structlog.stdlib import BoundLogger from hathor.conf.settings import HathorSettings from hathor.execution_manager import ExecutionManager @@ -38,17 +37,7 @@ TransactionIsNotABlock, TransactionNotInAllowedScopeError, ) -from hathor.transaction.storage.migrations import ( - BaseMigration, - MigrationState, - add_feature_activation_bit_counts_metadata, - add_feature_activation_bit_counts_metadata2, - add_min_height_metadata, - change_score_acc_weight_metadata, - migrate_static_metadata, - remove_first_nop_features, - remove_second_nop_features, -) +from hathor.transaction.storage.migrations import BaseMigration, MigrationState, change_score_acc_weight_metadata from hathor.transaction.storage.tx_allow_scope import TxAllowScope, tx_allow_context from hathor.transaction.transaction import Transaction from hathor.transaction.transaction_metadata import TransactionMetadata @@ -98,12 +87,6 @@ class TransactionStorage(ABC): # history of migrations that have to be applied in the order defined here _migration_factories: list[type[BaseMigration]] = [ - add_min_height_metadata.Migration, - add_feature_activation_bit_counts_metadata.Migration, - remove_first_nop_features.Migration, - add_feature_activation_bit_counts_metadata2.Migration, - remove_second_nop_features.Migration, - migrate_static_metadata.Migration, change_score_acc_weight_metadata.Migration, ] @@ -1128,13 +1111,6 @@ def get_block(self, block_id: VertexId) -> Block: assert isinstance(block, Block) return block - @abstractmethod - def migrate_static_metadata(self, log: BoundLogger) -> None: - """ - Migrate metadata attributes to static metadata. This is only used for the `migrate_static_metadata` migration. - """ - raise NotImplementedError - def can_validate_full(self, vertex: Vertex) -> bool: """ Check if a vertex is ready to be fully validated, either all deps are full-valid or one is invalid. """ From 123166e0933261ad5dbfc797c0034124347e83ca Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Fri, 25 Oct 2024 10:18:22 -0300 Subject: [PATCH 41/61] refactor(vertex-handler): remove p2p_manager dependency (#1154) --- hathor/builder/builder.py | 1 - hathor/builder/cli_builder.py | 1 - hathor/manager.py | 13 ++++++------ hathor/p2p/sync_v1/agent.py | 10 +++++++--- hathor/p2p/sync_v2/agent.py | 20 +++++++++---------- .../sync_v2/blockchain_streaming_client.py | 10 ++-------- hathor/p2p/sync_v2/mempool.py | 4 +++- hathor/profiler/cpu.py | 7 +++++-- .../storage/transaction_storage.py | 5 +++++ hathor/vertex_handler/vertex_handler.py | 15 +++----------- 10 files changed, 40 insertions(+), 46 deletions(-) diff --git a/hathor/builder/builder.py b/hathor/builder/builder.py index 228791355..ea3afd8f7 100644 --- a/hathor/builder/builder.py +++ b/hathor/builder/builder.py @@ -614,7 +614,6 @@ def _get_or_create_vertex_handler(self) -> VertexHandler: tx_storage=self._get_or_create_tx_storage(), verification_service=self._get_or_create_verification_service(), consensus=self._get_or_create_consensus(), - p2p_manager=self._get_or_create_p2p_manager(), feature_service=self._get_or_create_feature_service(), pubsub=self._get_or_create_pubsub(), wallet=self._get_or_create_wallet(), diff --git a/hathor/builder/cli_builder.py b/hathor/builder/cli_builder.py index 493d31b6d..b2ffc747a 100644 --- a/hathor/builder/cli_builder.py +++ b/hathor/builder/cli_builder.py @@ -334,7 +334,6 @@ def create_manager(self, reactor: Reactor) -> HathorManager: tx_storage=tx_storage, verification_service=verification_service, consensus=consensus_algorithm, - p2p_manager=p2p_manager, feature_service=self.feature_service, pubsub=pubsub, wallet=self.wallet, diff --git a/hathor/manager.py b/hathor/manager.py index e7ee57592..cc86dd9dc 100644 --- a/hathor/manager.py +++ b/hathor/manager.py @@ -48,7 +48,6 @@ from hathor.p2p.manager import ConnectionsManager from hathor.p2p.peer import PrivatePeer from hathor.p2p.peer_id import PeerId -from hathor.profiler import get_cpu_profiler from hathor.pubsub import HathorEvents, PubSubManager from hathor.reactor import ReactorProtocol as Reactor from hathor.reward_lock import is_spent_reward_locked @@ -70,7 +69,6 @@ from hathor.websocket.factory import HathorAdminWebsocketFactory logger = get_logger() -cpu = get_cpu_profiler() class HathorManager: @@ -172,8 +170,6 @@ def __init__( self.is_started: bool = False - self.cpu = cpu - # XXX: first checkpoint must be genesis (height=0) self.checkpoints: list[Checkpoint] = checkpoints or [] self.checkpoints_ready: list[bool] = [False] * len(self.checkpoints) @@ -962,7 +958,6 @@ def propagate_tx(self, tx: BaseTransaction, fails_silently: bool = True) -> bool return self.on_new_tx(tx, fails_silently=fails_silently, propagate_to_peers=True) - @cpu.profiler('on_new_tx') def on_new_tx( self, tx: BaseTransaction, @@ -979,14 +974,18 @@ def on_new_tx( :param fails_silently: if False will raise an exception when tx cannot be added :param propagate_to_peers: if True will relay the tx to other peers if it is accepted """ - return self.vertex_handler.on_new_vertex( + success = self.vertex_handler.on_new_vertex( tx, quiet=quiet, fails_silently=fails_silently, - propagate_to_peers=propagate_to_peers, reject_locked_reward=reject_locked_reward, ) + if propagate_to_peers and success: + self.connections.send_tx_to_peers(tx) + + return success + def has_sync_version_capability(self) -> bool: return self._settings.CAPABILITY_SYNC_VERSION in self.capabilities diff --git a/hathor/p2p/sync_v1/agent.py b/hathor/p2p/sync_v1/agent.py index 72ef6b6d0..68fe401ec 100644 --- a/hathor/p2p/sync_v1/agent.py +++ b/hathor/p2p/sync_v1/agent.py @@ -636,8 +636,10 @@ def handle_data(self, payload: str) -> None: self.log.info('tx received in real time from peer', tx=tx.hash_hex, peer=self.protocol.get_peer_id()) # If we have not requested the data, it is a new transaction being propagated # in the network, thus, we propagate it as well. - result = self.manager.on_new_tx(tx, propagate_to_peers=True) - self.update_received_stats(tx, result) + success = self.manager.vertex_handler.on_new_vertex(tx) + if success: + self.protocol.connections.send_tx_to_peers(tx) + self.update_received_stats(tx, success) def update_received_stats(self, tx: 'BaseTransaction', result: bool) -> None: """ Update protocol metrics when receiving a new tx @@ -685,7 +687,9 @@ def on_tx_success(self, tx: 'BaseTransaction') -> 'BaseTransaction': success = True else: # Add tx to the DAG. - success = self.manager.on_new_tx(tx) + success = self.manager.vertex_handler.on_new_vertex(tx) + if success: + self.protocol.connections.send_tx_to_peers(tx) # Updating stats data self.update_received_stats(tx, success) return tx diff --git a/hathor/p2p/sync_v2/agent.py b/hathor/p2p/sync_v2/agent.py index b27ced303..5393080b4 100644 --- a/hathor/p2p/sync_v2/agent.py +++ b/hathor/p2p/sync_v2/agent.py @@ -488,7 +488,9 @@ def handle_tips(self, payload: str) -> None: data = [bytes.fromhex(x) for x in data] # filter-out txs we already have try: - self._receiving_tips.extend(VertexId(tx_id) for tx_id in data if not self.partial_vertex_exists(tx_id)) + self._receiving_tips.extend( + VertexId(tx_id) for tx_id in data if not self.tx_storage.partial_vertex_exists(tx_id) + ) except ValueError: self.protocol.send_error_and_close_connection('Invalid trasaction ID received') # XXX: it's OK to do this *after* the extend because the payload is limited by the line protocol @@ -553,12 +555,6 @@ def send_message(self, cmd: ProtocolMessages, payload: Optional[str] = None) -> assert self.protocol.state is not None self.protocol.state.send_message(cmd, payload) - def partial_vertex_exists(self, vertex_id: VertexId) -> bool: - """ Return true if the vertex exists no matter its validation state. - """ - with self.tx_storage.allow_partially_validated_context(): - return self.tx_storage.transaction_exists(vertex_id) - @inlineCallbacks def find_best_common_block(self, my_best_block: _HeightInfo, @@ -621,11 +617,11 @@ def on_block_complete(self, blk: Block, vertex_list: list[BaseTransaction]) -> G try: for tx in vertex_list: if not self.tx_storage.transaction_exists(tx.hash): - self.vertex_handler.on_new_vertex(tx, propagate_to_peers=False, fails_silently=False) + self.vertex_handler.on_new_vertex(tx, fails_silently=False) yield deferLater(self.reactor, 0, lambda: None) if not self.tx_storage.transaction_exists(blk.hash): - self.vertex_handler.on_new_vertex(blk, propagate_to_peers=False, fails_silently=False) + self.vertex_handler.on_new_vertex(blk, fails_silently=False) except InvalidNewTransaction: self.protocol.send_error_and_close_connection('invalid vertex received') @@ -1163,7 +1159,7 @@ def handle_data(self, payload: str) -> None: tx.storage = self.protocol.node.tx_storage - if self.partial_vertex_exists(tx.hash): + if self.tx_storage.partial_vertex_exists(tx.hash): # transaction already added to the storage, ignore it # XXX: maybe we could add a hash blacklist and punish peers propagating known bad txs self.tx_storage.compare_bytes_with_local_tx(tx) @@ -1174,7 +1170,9 @@ def handle_data(self, payload: str) -> None: if self.tx_storage.can_validate_full(tx): self.log.debug('tx received in real time from peer', tx=tx.hash_hex, peer=self.protocol.get_peer_id()) try: - self.vertex_handler.on_new_vertex(tx, propagate_to_peers=True, fails_silently=False) + success = self.vertex_handler.on_new_vertex(tx, fails_silently=False) + if success: + self.protocol.connections.send_tx_to_peers(tx) except InvalidNewTransaction: self.protocol.send_error_and_close_connection('invalid vertex received') else: diff --git a/hathor/p2p/sync_v2/blockchain_streaming_client.py b/hathor/p2p/sync_v2/blockchain_streaming_client.py index 6f0a3f236..e78ec056b 100644 --- a/hathor/p2p/sync_v2/blockchain_streaming_client.py +++ b/hathor/p2p/sync_v2/blockchain_streaming_client.py @@ -27,7 +27,6 @@ from hathor.p2p.sync_v2.streamers import StreamEnd from hathor.transaction import Block from hathor.transaction.exceptions import HathorError -from hathor.types import VertexId if TYPE_CHECKING: from hathor.p2p.sync_v2.agent import NodeBlockSync, _HeightInfo @@ -75,11 +74,6 @@ def fails(self, reason: 'StreamingError') -> None: """Fail the execution by resolving the deferred with an error.""" self._deferred.errback(reason) - def partial_vertex_exists(self, vertex_id: VertexId) -> bool: - """Return true if the vertex exists no matter its validation state.""" - with self.tx_storage.allow_partially_validated_context(): - return self.tx_storage.transaction_exists(vertex_id) - def handle_blocks(self, blk: Block) -> None: """This method is called by the sync agent when a BLOCKS message is received.""" if self._deferred.called: @@ -105,7 +99,7 @@ def handle_blocks(self, blk: Block) -> None: # Check for repeated blocks. is_duplicated = False - if self.partial_vertex_exists(blk.hash): + if self.tx_storage.partial_vertex_exists(blk.hash): # We reached a block we already have. Skip it. self._blk_repeated += 1 is_duplicated = True @@ -132,7 +126,7 @@ def handle_blocks(self, blk: Block) -> None: if self.tx_storage.can_validate_full(blk): try: - self.vertex_handler.on_new_vertex(blk, propagate_to_peers=False, fails_silently=False) + self.vertex_handler.on_new_vertex(blk, fails_silently=False) except HathorError: self.fails(InvalidVertexError(blk.hash.hex())) return diff --git a/hathor/p2p/sync_v2/mempool.py b/hathor/p2p/sync_v2/mempool.py index 806c16849..03651642e 100644 --- a/hathor/p2p/sync_v2/mempool.py +++ b/hathor/p2p/sync_v2/mempool.py @@ -140,7 +140,9 @@ def _add_tx(self, tx: BaseTransaction) -> None: if self.tx_storage.transaction_exists(tx.hash): return try: - self.vertex_handler.on_new_vertex(tx, fails_silently=False) + success = self.vertex_handler.on_new_vertex(tx, fails_silently=False) + if success: + self.sync_agent.protocol.connections.send_tx_to_peers(tx) except InvalidNewTransaction: self.sync_agent.protocol.send_error_and_close_connection('invalid vertex received') raise diff --git a/hathor/profiler/cpu.py b/hathor/profiler/cpu.py index 63064df7e..34bad0512 100644 --- a/hathor/profiler/cpu.py +++ b/hathor/profiler/cpu.py @@ -15,12 +15,15 @@ import time from collections import defaultdict from functools import wraps -from typing import Any, Callable, Union +from typing import Callable, ParamSpec, TypeVar, Union from twisted.internet.task import LoopingCall Key = tuple[str, ...] +T = TypeVar('T') +P = ParamSpec('P') + class ProcItem: """Store information for each process.""" @@ -184,7 +187,7 @@ def update(self) -> None: t1 = time.process_time() self.measures[('profiler',)].add_time(t1 - t0) - def profiler(self, key: Union[str, Callable[..., str]]) -> Callable[[Callable[..., Any]], Any]: + def profiler(self, key: Union[str, Callable[..., str]]) -> Callable[[Callable[P, T]], Callable[P, T]]: """Decorator to collect data. The `key` must be the key itself or a method that returns the key. diff --git a/hathor/transaction/storage/transaction_storage.py b/hathor/transaction/storage/transaction_storage.py index 653a0041f..a6ee50aa9 100644 --- a/hathor/transaction/storage/transaction_storage.py +++ b/hathor/transaction/storage/transaction_storage.py @@ -1132,6 +1132,11 @@ def can_validate_full(self, vertex: Vertex) -> bool: return True return all_exist and all_valid + def partial_vertex_exists(self, vertex_id: VertexId) -> bool: + """Return true if the vertex exists no matter its validation state.""" + with self.allow_partially_validated_context(): + return self.transaction_exists(vertex_id) + class BaseTransactionStorage(TransactionStorage): indexes: Optional[IndexesManager] diff --git a/hathor/vertex_handler/vertex_handler.py b/hathor/vertex_handler/vertex_handler.py index 473516a56..59650e83e 100644 --- a/hathor/vertex_handler/vertex_handler.py +++ b/hathor/vertex_handler/vertex_handler.py @@ -20,7 +20,7 @@ from hathor.consensus import ConsensusAlgorithm from hathor.exception import HathorError, InvalidNewTransaction from hathor.feature_activation.feature_service import FeatureService -from hathor.p2p.manager import ConnectionsManager +from hathor.profiler import get_cpu_profiler from hathor.pubsub import HathorEvents, PubSubManager from hathor.reactor import ReactorProtocol from hathor.transaction import BaseTransaction, Block @@ -30,6 +30,7 @@ from hathor.wallet import BaseWallet logger = get_logger() +cpu = get_cpu_profiler() class VertexHandler: @@ -40,7 +41,6 @@ class VertexHandler: '_tx_storage', '_verification_service', '_consensus', - '_p2p_manager', '_feature_service', '_pubsub', '_wallet', @@ -55,7 +55,6 @@ def __init__( tx_storage: TransactionStorage, verification_service: VerificationService, consensus: ConsensusAlgorithm, - p2p_manager: ConnectionsManager, feature_service: FeatureService, pubsub: PubSubManager, wallet: BaseWallet | None, @@ -67,19 +66,18 @@ def __init__( self._tx_storage = tx_storage self._verification_service = verification_service self._consensus = consensus - self._p2p_manager = p2p_manager self._feature_service = feature_service self._pubsub = pubsub self._wallet = wallet self._log_vertex_bytes = log_vertex_bytes + @cpu.profiler('on_new_vertex') def on_new_vertex( self, vertex: BaseTransaction, *, quiet: bool = False, fails_silently: bool = True, - propagate_to_peers: bool = True, reject_locked_reward: bool = True, ) -> bool: """ New method for adding transactions or blocks that steps the validation state machine. @@ -87,7 +85,6 @@ def on_new_vertex( :param vertex: transaction to be added :param quiet: if True will not log when a new tx is accepted :param fails_silently: if False will raise an exception when tx cannot be added - :param propagate_to_peers: if True will relay the tx to other peers if it is accepted """ is_valid = self._validate_vertex( vertex, @@ -102,7 +99,6 @@ def on_new_vertex( self._post_consensus( vertex, quiet=quiet, - propagate_to_peers=propagate_to_peers, reject_locked_reward=reject_locked_reward ) @@ -177,7 +173,6 @@ def _post_consensus( vertex: BaseTransaction, *, quiet: bool, - propagate_to_peers: bool, reject_locked_reward: bool, ) -> None: """ Handle operations that need to happen once the tx becomes fully validated. @@ -208,10 +203,6 @@ def _post_consensus( self._log_new_object(vertex, 'new {}', quiet=quiet) - if propagate_to_peers: - # Propagate to our peers. - self._p2p_manager.send_tx_to_peers(vertex) - def _log_new_object(self, tx: BaseTransaction, message_fmt: str, *, quiet: bool) -> None: """ A shortcut for logging additional information for block/txs. """ From 550767bcb08abc69456e8954b690a9887eadb9b8 Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Tue, 29 Oct 2024 12:39:51 -0300 Subject: [PATCH 42/61] chore: remove all unused features (#1150) --- hathor/conf/mainnet.py | 36 ------------------------ hathor/conf/mainnet.yml | 42 ---------------------------- hathor/feature_activation/feature.py | 8 +----- 3 files changed, 1 insertion(+), 85 deletions(-) diff --git a/hathor/conf/mainnet.py b/hathor/conf/mainnet.py index a0cc0372d..f580f9481 100644 --- a/hathor/conf/mainnet.py +++ b/hathor/conf/mainnet.py @@ -201,42 +201,6 @@ ])), FEATURE_ACTIVATION=FeatureActivationSettings( features={ - Feature.NOP_FEATURE_1: Criteria( - bit=0, - start_height=4_213_440, # N - timeout_height=4_253_760, # N + 2 * 20160 (2 weeks after the start) - minimum_activation_height=4_273_920, # N + 3 * 20160 (3 weeks after the start) - lock_in_on_timeout=False, - version='0.59.0', - signal_support_by_default=True, - ), - Feature.NOP_FEATURE_2: Criteria( - bit=1, - start_height=4_213_440, # N - timeout_height=4_253_760, # N + 2 * 20160 (2 weeks after the start) - minimum_activation_height=0, - lock_in_on_timeout=False, - version='0.59.0', - signal_support_by_default=False, - ), - Feature.NOP_FEATURE_3: Criteria( - bit=2, - start_height=4_273_920, # N (on 2024/02/22, the best block is 4_251_000 on mainnet) - timeout_height=4_475_520, # N + 10 * 20160 (10 weeks after the start) - minimum_activation_height=4_495_680, # N + 11 * 20160 (11 weeks after the start) - lock_in_on_timeout=False, - version='0.59.0', - signal_support_by_default=True, - ), - Feature.NOP_FEATURE_4: Criteria( - bit=3, - start_height=4_273_920, # N (on 2024/02/22, the best block is 4_251_000 on mainnet) - timeout_height=4_475_520, # N + 10 * 20160 (10 weeks after the start) - minimum_activation_height=0, - lock_in_on_timeout=False, - version='0.59.0', - signal_support_by_default=False, - ), Feature.INCREASE_MAX_MERKLE_PATH_LENGTH: Criteria( bit=0, # N = 4_475_520 diff --git a/hathor/conf/mainnet.yml b/hathor/conf/mainnet.yml index dea15cf56..d32845449 100644 --- a/hathor/conf/mainnet.yml +++ b/hathor/conf/mainnet.yml @@ -182,48 +182,6 @@ SOFT_VOIDED_TX_IDS: FEATURE_ACTIVATION: features: - #### First Phased Testing features on mainnet #### - - NOP_FEATURE_1: - bit: 0 - start_height: 4_213_440 # N - timeout_height: 4_253_760 # N + 2 * 20160 (2 weeks after the start) - minimum_activation_height: 4_273_920 # N + 3 * 20160 (3 weeks after the start) - lock_in_on_timeout: false - version: 0.59.0 - signal_support_by_default: true - - NOP_FEATURE_2: - bit: 1 - start_height: 4_213_440 # N - timeout_height: 4_253_760 # N + 2 * 20160 (2 weeks after the start) - minimum_activation_height: 0 - lock_in_on_timeout: false - version: 0.59.0 - signal_support_by_default: false - - #### Second Phased Testing features on mainnet #### - - NOP_FEATURE_3: - bit: 2 - start_height: 4_273_920 # N (on 2024/02/22, the best block is 4_251_000 on mainnet) - timeout_height: 4_475_520 # N + 10 * 20160 (10 weeks after the start) - minimum_activation_height: 4_495_680 # N + 11 * 20160 (11 weeks after the start) - lock_in_on_timeout: false - version: 0.59.0 - signal_support_by_default: true - - NOP_FEATURE_4: - bit: 3 - start_height: 4_273_920 # N (on 2024/02/22, the best block is 4_251_000 on mainnet) - timeout_height: 4_475_520 # N + 10 * 20160 (10 weeks after the start) - minimum_activation_height: 0 - lock_in_on_timeout: false - version: 0.59.0 - signal_support_by_default: false - - #### Actual features #### - INCREASE_MAX_MERKLE_PATH_LENGTH: bit: 0 # N = 4_475_520 diff --git a/hathor/feature_activation/feature.py b/hathor/feature_activation/feature.py index 05b08226e..58a51a3f5 100644 --- a/hathor/feature_activation/feature.py +++ b/hathor/feature_activation/feature.py @@ -23,15 +23,9 @@ class Feature(str, Enum): should NOT be changed either, as configuration uses them for setting feature activation criteria. """ - # Mainnet Phased Testing features + # These NOP features are used in tests NOP_FEATURE_1 = 'NOP_FEATURE_1' NOP_FEATURE_2 = 'NOP_FEATURE_2' NOP_FEATURE_3 = 'NOP_FEATURE_3' - # TODO: Those can be removed in a future PR - # Testnet Phased Testing features - NOP_FEATURE_4 = 'NOP_FEATURE_4' - NOP_FEATURE_5 = 'NOP_FEATURE_5' - NOP_FEATURE_6 = 'NOP_FEATURE_6' - INCREASE_MAX_MERKLE_PATH_LENGTH = 'INCREASE_MAX_MERKLE_PATH_LENGTH' From 4200cc234529de66ed625dc49b4966eb783a7efa Mon Sep 17 00:00:00 2001 From: Jan Segre Date: Fri, 25 Oct 2024 15:51:40 +0200 Subject: [PATCH 43/61] chore: remove the limited support there was for Windows --- .github/workflows/main.yml | 2 +- hathor/cli/main.py | 5 ++--- hathor/cli/run_node.py | 13 +++++++------ hathor/cli/top.py | 4 ++-- hathor/reactor/reactor.py | 5 ----- poetry.lock | 4 ++-- pyproject.toml | 3 +-- tests/conftest.py | 5 ----- tests/p2p/test_connections.py | 5 ----- tests/resources/test_profiler.py | 3 --- tests/tx/test_prometheus.py | 4 ---- 11 files changed, 15 insertions(+), 38 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 54293da10..fd0c39948 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -25,7 +25,7 @@ jobs: full_matrix = { 'python': ['3.10', '3.11', '3.12'], # available OS's: https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idruns-on - 'os': ['ubuntu-22.04', 'macos-12', 'windows-2022'], + 'os': ['ubuntu-22.04', 'macos-12'], } # this is the fastest one: reduced_matrix = { diff --git a/hathor/cli/main.py b/hathor/cli/main.py index 6a597745b..41e77ad05 100644 --- a/hathor/cli/main.py +++ b/hathor/cli/main.py @@ -58,6 +58,7 @@ def __init__(self) -> None: shell, side_dag, stratum_mining, + top, twin_tx, tx_generator, wallet, @@ -70,9 +71,7 @@ def __init__(self) -> None: self.add_cmd('mining', 'run_stratum_miner', stratum_mining, 'Run a mining process (running node required)') self.add_cmd('hathor', 'run_node', run_node, 'Run a node') self.add_cmd('hathor', 'gen_peer_id', peer_id, 'Generate a new random peer-id') - if sys.platform != 'win32': - from . import top - self.add_cmd('hathor', 'top', top, 'CPU profiler viewer') + self.add_cmd('hathor', 'top', top, 'CPU profiler viewer') self.add_cmd('side-dag', 'run_node_with_side_dag', side_dag, 'Run a side-dag') self.add_cmd('side-dag', 'gen_poa_keys', generate_poa_keys, 'Generate a private/public key pair and its ' 'address to be used in Proof-of-Authority') diff --git a/hathor/cli/run_node.py b/hathor/cli/run_node.py index 9dbc3005c..9498194ab 100644 --- a/hathor/cli/run_node.py +++ b/hathor/cli/run_node.py @@ -165,7 +165,10 @@ def create_parser(cls) -> ArgumentParser: return parser def prepare(self, *, register_resources: bool = True) -> None: + import resource + from setproctitle import setproctitle + setproctitle('{}hathor-core'.format(self._args.procname_prefix)) if self._args.recursion_limit: @@ -173,12 +176,10 @@ def prepare(self, *, register_resources: bool = True) -> None: else: sys.setrecursionlimit(5000) - if sys.platform != 'win32': - import resource - (nofile_soft, _) = resource.getrlimit(resource.RLIMIT_NOFILE) - if nofile_soft < 256: - print('Maximum number of open file descriptors is too low. Minimum required is 256.') - sys.exit(-2) + (nofile_soft, _) = resource.getrlimit(resource.RLIMIT_NOFILE) + if nofile_soft < 256: + print('Maximum number of open file descriptors is too low. Minimum required is 256.') + sys.exit(-2) self.check_unsafe_arguments() self.check_python_version() diff --git a/hathor/cli/top.py b/hathor/cli/top.py index 4adfac8f3..bae913709 100644 --- a/hathor/cli/top.py +++ b/hathor/cli/top.py @@ -24,8 +24,8 @@ from math import floor from typing import Any, Callable, Optional -# XXX: as annoying as it is, a simple `if: raise` is not enough, but putting the whole module inside works -if sys.platform != 'win32': +# XXX: support for Windows removed, this should be un-indented +if True: import curses import curses.ascii diff --git a/hathor/reactor/reactor.py b/hathor/reactor/reactor.py index b1ff7e4d7..b92c80062 100644 --- a/hathor/reactor/reactor.py +++ b/hathor/reactor/reactor.py @@ -55,15 +55,10 @@ def initialize_global_reactor(*, use_asyncio_reactor: bool = False) -> ReactorPr if use_asyncio_reactor: import asyncio - import sys from twisted.internet import asyncioreactor from twisted.internet.error import ReactorAlreadyInstalledError - if sys.platform == 'win32': - # See: https://docs.twistedmatrix.com/en/twisted-22.10.0/api/twisted.internet.asyncioreactor.AsyncioSelectorReactor.html # noqa: E501 - asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) - try: asyncioreactor.install(asyncio.get_event_loop()) except ReactorAlreadyInstalledError as e: diff --git a/poetry.lock b/poetry.lock index 5782686ae..2ae3ad105 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. [[package]] name = "aiohappyeyeballs" @@ -2486,4 +2486,4 @@ sentry = ["sentry-sdk", "structlog-sentry"] [metadata] lock-version = "2.0" python-versions = ">=3.10,<4" -content-hash = "cbdab9a3fa79583a3fb818013dc26a7c4dbcc78a5d832f59239334128718c37a" +content-hash = "05a728b943ae8b639bbb369f400bb7ed5b6c0c5205abaf355194c7168b4798c7" diff --git a/pyproject.toml b/pyproject.toml index 70fa83eed..67415b6b4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -64,13 +64,12 @@ mnemonic = "~0.20" prometheus_client = "~0.15.0" pyopenssl = "=24.2.1" pycoin = "~0.92.20230326" -pywin32 = {version = "306", markers = "sys_platform == 'win32'"} requests = "=2.32.3" service_identity = "~21.1.0" pexpect = "~4.8.0" intervaltree = "~3.1.0" structlog = "~22.3.0" -rocksdb = {git = "https://github.com/hathornetwork/python-rocksdb.git", markers = "sys_platform != 'win32'"} +rocksdb = {git = "https://github.com/hathornetwork/python-rocksdb.git"} aiohttp = "~3.10.3" idna = "~3.4" setproctitle = "^1.3.3" diff --git a/tests/conftest.py b/tests/conftest.py index 33fb90950..be711c139 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,14 +1,9 @@ import os -import sys from hathor.conf import UNITTESTS_SETTINGS_FILEPATH from hathor.reactor import initialize_global_reactor os.environ['HATHOR_CONFIG_YAML'] = os.environ.get('HATHOR_TEST_CONFIG_YAML', UNITTESTS_SETTINGS_FILEPATH) -if sys.platform == 'win32': - # XXX: because rocksdb isn't available on Windows, we force using memory-storage for tests so most of them can run - os.environ['HATHOR_TEST_MEMORY_STORAGE'] = 'true' - # TODO: We should remove this call from the module level. initialize_global_reactor(use_asyncio_reactor=True) diff --git a/tests/p2p/test_connections.py b/tests/p2p/test_connections.py index a9e33b79f..570424c84 100644 --- a/tests/p2p/test_connections.py +++ b/tests/p2p/test_connections.py @@ -1,14 +1,9 @@ -import sys - -import pytest - from hathor.p2p.entrypoint import Entrypoint from tests import unittest from tests.utils import run_server class ConnectionsTest(unittest.TestCase): - @pytest.mark.skipif(sys.platform == 'win32', reason='run_server is very finicky on Windows') def test_connections(self) -> None: process = run_server() process2 = run_server(listen=8006, status=8086, bootstrap='tcp://127.0.0.1:8005') diff --git a/tests/resources/test_profiler.py b/tests/resources/test_profiler.py index 0004428ea..8dfa5c333 100644 --- a/tests/resources/test_profiler.py +++ b/tests/resources/test_profiler.py @@ -1,10 +1,8 @@ import os import re import shutil -import sys import tempfile -import pytest from twisted.internet.defer import inlineCallbacks from hathor.profiler.resources import ProfilerResource @@ -19,7 +17,6 @@ def setUp(self): super().setUp() self.web = StubSite(ProfilerResource(self.manager)) - @pytest.mark.skipif(sys.platform == 'win32', reason='shutil.rmtree fails on Windows') @inlineCallbacks def test_post(self): # Options diff --git a/tests/tx/test_prometheus.py b/tests/tx/test_prometheus.py index 529b7e48f..5783db6ca 100644 --- a/tests/tx/test_prometheus.py +++ b/tests/tx/test_prometheus.py @@ -1,10 +1,7 @@ import os import shutil -import sys import tempfile -import pytest - from hathor.prometheus import PrometheusMetricsExporter from hathor.simulator.utils import add_new_blocks from tests import unittest @@ -20,7 +17,6 @@ def setUp(self): self.network = 'testnet' self.manager = self.create_peer(self.network, unlock_wallet=True) - @pytest.mark.skipif(sys.platform == 'win32', reason='set_new_metrics fails on Windows') def test_wallet(self): tmpdir = tempfile.mkdtemp() tmpfile = tempfile.NamedTemporaryFile(dir=tmpdir, suffix='.prom', delete=False) From 487e7319b72620f1e7833103cb68c5841fd3ae50 Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Thu, 7 Nov 2024 11:41:28 -0300 Subject: [PATCH 44/61] refactor(p2p): refactor peer address handling (#1173) --- hathor/builder/cli_builder.py | 4 +- hathor/p2p/entrypoint.py | 215 --------------- hathor/p2p/manager.py | 48 ++-- hathor/p2p/peer.py | 67 ++--- hathor/p2p/peer_discovery/bootstrap.py | 8 +- hathor/p2p/peer_discovery/dns.py | 38 +-- hathor/p2p/peer_discovery/peer_discovery.py | 4 +- hathor/p2p/peer_endpoint.py | 277 ++++++++++++++++++++ hathor/p2p/protocol.py | 61 +++-- hathor/p2p/resources/add_peers.py | 13 +- hathor/p2p/states/peer_id.py | 24 +- hathor/p2p/utils.py | 4 +- hathor/simulator/fake_connection.py | 68 ++++- tests/others/test_metrics.py | 4 +- tests/p2p/test_bootstrap.py | 7 +- tests/p2p/test_connections.py | 7 +- tests/p2p/test_peer_id.py | 74 +++++- tests/p2p/test_protocol.py | 201 +++++++++++++- tests/resources/p2p/test_add_peer.py | 4 +- tests/resources/p2p/test_status.py | 12 +- 20 files changed, 747 insertions(+), 393 deletions(-) delete mode 100644 hathor/p2p/entrypoint.py create mode 100644 hathor/p2p/peer_endpoint.py diff --git a/hathor/builder/cli_builder.py b/hathor/builder/cli_builder.py index b2ffc747a..464d9b319 100644 --- a/hathor/builder/cli_builder.py +++ b/hathor/builder/cli_builder.py @@ -34,9 +34,9 @@ from hathor.indexes import IndexesManager, MemoryIndexesManager, RocksDBIndexesManager from hathor.manager import HathorManager from hathor.mining.cpu_mining_service import CpuMiningService -from hathor.p2p.entrypoint import Entrypoint from hathor.p2p.manager import ConnectionsManager from hathor.p2p.peer import PrivatePeer +from hathor.p2p.peer_endpoint import PeerEndpoint from hathor.p2p.utils import discover_hostname, get_genesis_short_hash from hathor.pubsub import PubSubManager from hathor.reactor import ReactorProtocol as Reactor @@ -420,7 +420,7 @@ def create_manager(self, reactor: Reactor) -> HathorManager: p2p_manager.add_peer_discovery(DNSPeerDiscovery(dns_hosts)) if self._args.bootstrap: - entrypoints = [Entrypoint.parse(desc) for desc in self._args.bootstrap] + entrypoints = [PeerEndpoint.parse(desc) for desc in self._args.bootstrap] p2p_manager.add_peer_discovery(BootstrapPeerDiscovery(entrypoints)) if self._args.x_rocksdb_indexes: diff --git a/hathor/p2p/entrypoint.py b/hathor/p2p/entrypoint.py deleted file mode 100644 index 23ead1199..000000000 --- a/hathor/p2p/entrypoint.py +++ /dev/null @@ -1,215 +0,0 @@ -# Copyright 2024 Hathor Labs -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from dataclasses import dataclass -from enum import Enum -from urllib.parse import parse_qs, urlparse - -from twisted.internet.address import IPv4Address, IPv6Address -from twisted.internet.endpoints import TCP4ClientEndpoint -from twisted.internet.interfaces import IStreamClientEndpoint -from typing_extensions import Self - -from hathor.p2p.peer_id import PeerId -from hathor.reactor import ReactorProtocol as Reactor - - -class Protocol(Enum): - TCP = 'tcp' - - -@dataclass(frozen=True, slots=True) -class Entrypoint: - """Endpoint description (returned from DNS query, or received from the p2p network) may contain a peer-id.""" - - protocol: Protocol - host: str - port: int - peer_id: PeerId | None = None - - def __str__(self): - if self.peer_id is None: - return f'{self.protocol.value}://{self.host}:{self.port}' - else: - return f'{self.protocol.value}://{self.host}:{self.port}/?id={self.peer_id}' - - @classmethod - def parse(cls, description: str) -> Self: - """Parse endpoint description into an Entrypoint object. - - Examples: - - >>> str(Entrypoint.parse('tcp://127.0.0.1:40403/')) - 'tcp://127.0.0.1:40403' - - >>> id1 = 'c0f19299c2a4dcbb6613a14011ff07b63d6cb809e4cee25e9c1ccccdd6628696' - >>> Entrypoint.parse(f'tcp://127.0.0.1:40403/?id={id1}') - Entrypoint(protocol=, host='127.0.0.1', port=40403, \ -peer_id=PeerId('c0f19299c2a4dcbb6613a14011ff07b63d6cb809e4cee25e9c1ccccdd6628696')) - - >>> str(Entrypoint.parse(f'tcp://127.0.0.1:40403/?id={id1}')) - 'tcp://127.0.0.1:40403/?id=c0f19299c2a4dcbb6613a14011ff07b63d6cb809e4cee25e9c1ccccdd6628696' - - >>> Entrypoint.parse('tcp://127.0.0.1:40403') - Entrypoint(protocol=, host='127.0.0.1', port=40403, peer_id=None) - - >>> Entrypoint.parse('tcp://127.0.0.1:40403/') - Entrypoint(protocol=, host='127.0.0.1', port=40403, peer_id=None) - - >>> Entrypoint.parse('tcp://foo.bar.baz:40403/') - Entrypoint(protocol=, host='foo.bar.baz', port=40403, peer_id=None) - - >>> str(Entrypoint.parse('tcp://foo.bar.baz:40403/')) - 'tcp://foo.bar.baz:40403' - - >>> Entrypoint.parse('tcp://127.0.0.1:40403/?id=123') - Traceback (most recent call last): - ... - ValueError: non-hexadecimal number found in fromhex() arg at position 3 - - >>> Entrypoint.parse('tcp://127.0.0.1:4040f') - Traceback (most recent call last): - ... - ValueError: Port could not be cast to integer value as '4040f' - - >>> Entrypoint.parse('udp://127.0.0.1:40403/') - Traceback (most recent call last): - ... - ValueError: 'udp' is not a valid Protocol - - >>> Entrypoint.parse('tcp://127.0.0.1/') - Traceback (most recent call last): - ... - ValueError: expected a port - - >>> Entrypoint.parse('tcp://:40403/') - Traceback (most recent call last): - ... - ValueError: expected a host - - >>> Entrypoint.parse('tcp://127.0.0.1:40403/foo') - Traceback (most recent call last): - ... - ValueError: unexpected path: /foo - - >>> id2 = 'bc5119d47bb4ea7c19100bd97fb11f36970482108bd3d45ff101ee4f6bbec872' - >>> Entrypoint.parse(f'tcp://127.0.0.1:40403/?id={id1}&id={id2}') - Traceback (most recent call last): - ... - ValueError: unexpected id count: 2 - """ - url = urlparse(description) - protocol = Protocol(url.scheme) - host = url.hostname - if host is None: - raise ValueError('expected a host') - port = url.port - if port is None: - raise ValueError('expected a port') - if url.path not in {'', '/'}: - raise ValueError(f'unexpected path: {url.path}') - peer_id: PeerId | None = None - - if url.query: - query = parse_qs(url.query) - if 'id' in query: - ids = query['id'] - if len(ids) != 1: - raise ValueError(f'unexpected id count: {len(ids)}') - peer_id = PeerId(ids[0]) - - return cls(protocol, host, port, peer_id) - - @classmethod - def from_hostname_address(cls, hostname: str, address: IPv4Address | IPv6Address) -> Self: - return cls.parse(f'{address.type}://{hostname}:{address.port}') - - def to_client_endpoint(self, reactor: Reactor) -> IStreamClientEndpoint: - """This method generates a twisted client endpoint that has a .connect() method.""" - # XXX: currently we don't support IPv6, but when we do we have to decide between TCP4ClientEndpoint and - # TCP6ClientEndpoint, when the host is an IP address that is easy, but when it is a DNS hostname, we will not - # know which to use until we know which resource records it holds (A or AAAA) - return TCP4ClientEndpoint(reactor, self.host, self.port) - - def equals_ignore_peer_id(self, other: Self) -> bool: - """Compares `self` and `other` ignoring the `peer_id` fields of either. - - Examples: - - >>> ep1 = 'tcp://foo:111' - >>> ep2 = 'tcp://foo:111/?id=c0f19299c2a4dcbb6613a14011ff07b63d6cb809e4cee25e9c1ccccdd6628696' - >>> ep3 = 'tcp://foo:111/?id=bc5119d47bb4ea7c19100bd97fb11f36970482108bd3d45ff101ee4f6bbec872' - >>> ep4 = 'tcp://bar:111/?id=c0f19299c2a4dcbb6613a14011ff07b63d6cb809e4cee25e9c1ccccdd6628696' - >>> ep5 = 'tcp://foo:112/?id=c0f19299c2a4dcbb6613a14011ff07b63d6cb809e4cee25e9c1ccccdd6628696' - >>> Entrypoint.parse(ep1).equals_ignore_peer_id(Entrypoint.parse(ep2)) - True - >>> Entrypoint.parse(ep2).equals_ignore_peer_id(Entrypoint.parse(ep3)) - True - >>> Entrypoint.parse(ep1).equals_ignore_peer_id(Entrypoint.parse(ep4)) - False - >>> Entrypoint.parse(ep2).equals_ignore_peer_id(Entrypoint.parse(ep4)) - False - >>> Entrypoint.parse(ep2).equals_ignore_peer_id(Entrypoint.parse(ep5)) - False - """ - return (self.protocol, self.host, self.port) == (other.protocol, other.host, other.port) - - def peer_id_conflicts_with(self, other: Self) -> bool: - """Returns True if both self and other have a peer_id and they are different, returns False otherwise. - - This method ignores the host. Which is useful for catching the cases where both `self` and `other` have a - declared `peer_id` and they are not equal. - - >>> desc_no_pid = 'tcp://127.0.0.1:40403/' - >>> ep_no_pid = Entrypoint.parse(desc_no_pid) - >>> desc_pid1 = 'tcp://127.0.0.1:40403/?id=c0f19299c2a4dcbb6613a14011ff07b63d6cb809e4cee25e9c1ccccdd6628696' - >>> ep_pid1 = Entrypoint.parse(desc_pid1) - >>> desc_pid2 = 'tcp://127.0.0.1:40403/?id=bc5119d47bb4ea7c19100bd97fb11f36970482108bd3d45ff101ee4f6bbec872' - >>> ep_pid2 = Entrypoint.parse(desc_pid2) - >>> desc2_pid2 = 'tcp://foo.bar:40403/?id=bc5119d47bb4ea7c19100bd97fb11f36970482108bd3d45ff101ee4f6bbec872' - >>> ep2_pid2 = Entrypoint.parse(desc2_pid2) - >>> ep_no_pid.peer_id_conflicts_with(ep_no_pid) - False - >>> ep_no_pid.peer_id_conflicts_with(ep_pid1) - False - >>> ep_pid1.peer_id_conflicts_with(ep_no_pid) - False - >>> ep_pid1.peer_id_conflicts_with(ep_pid2) - True - >>> ep_pid1.peer_id_conflicts_with(ep2_pid2) - True - >>> ep_pid2.peer_id_conflicts_with(ep2_pid2) - False - """ - return self.peer_id is not None and other.peer_id is not None and self.peer_id != other.peer_id - - def is_localhost(self) -> bool: - """Used to determine if the entrypoint host is a localhost address. - - Examples: - - >>> Entrypoint.parse('tcp://127.0.0.1:444').is_localhost() - True - >>> Entrypoint.parse('tcp://localhost:444').is_localhost() - True - >>> Entrypoint.parse('tcp://8.8.8.8:444').is_localhost() - False - >>> Entrypoint.parse('tcp://foo.bar:444').is_localhost() - False - """ - if self.host == '127.0.0.1': - return True - if self.host == 'localhost': - return True - return False diff --git a/hathor/p2p/manager.py b/hathor/p2p/manager.py index 1b94f92d8..d53c7be83 100644 --- a/hathor/p2p/manager.py +++ b/hathor/p2p/manager.py @@ -25,10 +25,10 @@ from twisted.web.client import Agent from hathor.conf.settings import HathorSettings -from hathor.p2p.entrypoint import Entrypoint from hathor.p2p.netfilter.factory import NetfilterFactory from hathor.p2p.peer import PrivatePeer, PublicPeer, UnverifiedPeer from hathor.p2p.peer_discovery import PeerDiscovery +from hathor.p2p.peer_endpoint import PeerAddress, PeerEndpoint from hathor.p2p.peer_id import PeerId from hathor.p2p.peer_storage import UnverifiedPeerStorage, VerifiedPeerStorage from hathor.p2p.protocol import HathorProtocol @@ -60,7 +60,7 @@ class _SyncRotateInfo(NamedTuple): class _ConnectingPeer(NamedTuple): - entrypoint: Entrypoint + entrypoint: PeerEndpoint endpoint_deferred: Deferred @@ -370,7 +370,7 @@ def on_connection_failure(self, failure: Failure, peer: Optional[UnverifiedPeer endpoint: IStreamClientEndpoint) -> None: connecting_peer = self.connecting_peers[endpoint] entrypoint = connecting_peer.entrypoint - self.log.warn('connection failure', entrypoint=entrypoint, failure=failure.getErrorMessage()) + self.log.warn('connection failure', entrypoint=str(entrypoint), failure=failure.getErrorMessage()) self.connecting_peers.pop(endpoint) self.pubsub.publish( @@ -475,7 +475,7 @@ def iter_ready_connections(self) -> Iterable[HathorProtocol]: for conn in self.connected_peers.values(): yield conn - def iter_not_ready_endpoints(self) -> Iterable[Entrypoint]: + def iter_not_ready_endpoints(self) -> Iterable[PeerEndpoint]: """Iterate over not-ready connections.""" for connecting_peer in self.connecting_peers.values(): yield connecting_peer.entrypoint @@ -589,27 +589,28 @@ def connect_to_if_not_connected(self, peer: UnverifiedPeer | PublicPeer, now: in assert peer.id is not None if peer.info.can_retry(now): - self.connect_to(self.rng.choice(peer.info.entrypoints), peer) + addr = self.rng.choice(peer.info.entrypoints) + self.connect_to(addr.with_id(peer.id), peer) def _connect_to_callback( self, protocol: IProtocol, - peer: Optional[UnverifiedPeer | PublicPeer], + peer: UnverifiedPeer | PublicPeer | None, endpoint: IStreamClientEndpoint, - entrypoint: Entrypoint, + entrypoint: PeerEndpoint, ) -> None: """Called when we successfully connect to a peer.""" if isinstance(protocol, HathorProtocol): - protocol.on_outbound_connect(entrypoint) + protocol.on_outbound_connect(entrypoint, peer) else: assert isinstance(protocol, TLSMemoryBIOProtocol) assert isinstance(protocol.wrappedProtocol, HathorProtocol) - protocol.wrappedProtocol.on_outbound_connect(entrypoint) + protocol.wrappedProtocol.on_outbound_connect(entrypoint, peer) self.connecting_peers.pop(endpoint) def connect_to( self, - entrypoint: Entrypoint, + entrypoint: PeerEndpoint, peer: UnverifiedPeer | PublicPeer | None = None, use_ssl: bool | None = None, ) -> None: @@ -618,24 +619,27 @@ def connect_to( If `use_ssl` is True, then the connection will be wraped by a TLS. """ - if entrypoint.peer_id is not None and peer is not None and str(entrypoint.peer_id) != peer.id: + if entrypoint.peer_id is not None and peer is not None and entrypoint.peer_id != peer.id: self.log.debug('skipping because the entrypoint peer_id does not match the actual peer_id', - entrypoint=entrypoint) + entrypoint=str(entrypoint)) return for connecting_peer in self.connecting_peers.values(): - if connecting_peer.entrypoint.equals_ignore_peer_id(entrypoint): - self.log.debug('skipping because we are already connecting to this endpoint', entrypoint=entrypoint) + if connecting_peer.entrypoint.addr == entrypoint.addr: + self.log.debug( + 'skipping because we are already connecting to this endpoint', + entrypoint=str(entrypoint), + ) return - if self.localhost_only and not entrypoint.is_localhost(): - self.log.debug('skip because of simple localhost check', entrypoint=entrypoint) + if self.localhost_only and not entrypoint.addr.is_localhost(): + self.log.debug('skip because of simple localhost check', entrypoint=str(entrypoint)) return if use_ssl is None: use_ssl = self.use_ssl - endpoint = entrypoint.to_client_endpoint(self.reactor) + endpoint = entrypoint.addr.to_client_endpoint(self.reactor) factory: IProtocolFactory if use_ssl: @@ -650,9 +654,9 @@ def connect_to( deferred = endpoint.connect(factory) self.connecting_peers[endpoint] = _ConnectingPeer(entrypoint, deferred) - deferred.addCallback(self._connect_to_callback, peer, endpoint, entrypoint) # type: ignore - deferred.addErrback(self.on_connection_failure, peer, endpoint) # type: ignore - self.log.info('connect to', entrypoint=str(entrypoint), peer=str(peer)) + deferred.addCallback(self._connect_to_callback, peer, endpoint, entrypoint) + deferred.addErrback(self.on_connection_failure, peer, endpoint) + self.log.info('connecting to', entrypoint=str(entrypoint), peer=str(peer)) self.pubsub.publish( HathorEvents.NETWORK_PEER_CONNECTING, peer=peer, @@ -708,13 +712,13 @@ def update_hostname_entrypoints(self, *, old_hostname: str | None, new_hostname: assert self.manager is not None for address in self._listen_addresses: if old_hostname is not None: - old_entrypoint = Entrypoint.from_hostname_address(old_hostname, address) + old_entrypoint = PeerAddress.from_hostname_address(old_hostname, address) if old_entrypoint in self.my_peer.info.entrypoints: self.my_peer.info.entrypoints.remove(old_entrypoint) self._add_hostname_entrypoint(new_hostname, address) def _add_hostname_entrypoint(self, hostname: str, address: IPv4Address | IPv6Address) -> None: - hostname_entrypoint = Entrypoint.from_hostname_address(hostname, address) + hostname_entrypoint = PeerAddress.from_hostname_address(hostname, address) self.my_peer.info.entrypoints.append(hostname_entrypoint) def get_connection_to_drop(self, protocol: HathorProtocol) -> HathorProtocol: diff --git a/hathor/p2p/peer.py b/hathor/p2p/peer.py index 33aa617db..53f43369d 100644 --- a/hathor/p2p/peer.py +++ b/hathor/p2p/peer.py @@ -55,7 +55,7 @@ from hathor.conf.get_settings import get_global_settings from hathor.conf.settings import HathorSettings from hathor.daa import DifficultyAdjustmentAlgorithm -from hathor.p2p.entrypoint import Entrypoint +from hathor.p2p.peer_endpoint import PeerAddress, PeerEndpoint from hathor.p2p.peer_id import PeerId from hathor.p2p.utils import discover_dns, generate_certificate from hathor.util import not_none @@ -74,14 +74,6 @@ class PeerFlags(str, Enum): RETRIES_EXCEEDED = 'retries_exceeded' -def _parse_entrypoint(entrypoint_string: str) -> Entrypoint: - """ Helper function to parse an entrypoint from string.""" - entrypoint = Entrypoint.parse(entrypoint_string) - if entrypoint.peer_id is not None: - raise ValueError('do not add id= to peer.json entrypoints') - return entrypoint - - def _parse_pubkey(pubkey_string: str) -> rsa.RSAPublicKey: """ Helper function to parse a public key from string.""" public_key_der = base64.b64decode(pubkey_string) @@ -114,7 +106,7 @@ class PeerInfo: """ Stores entrypoint and connection attempts information. """ - entrypoints: list[Entrypoint] = field(default_factory=list) + entrypoints: list[PeerAddress] = field(default_factory=list) retry_timestamp: int = 0 # should only try connecting to this peer after this timestamp retry_interval: int = 5 # how long to wait for next connection retry. It will double for each failure retry_attempts: int = 0 # how many retries were made @@ -136,13 +128,11 @@ def _merge(self, other: PeerInfo) -> None: async def validate_entrypoint(self, protocol: HathorProtocol) -> bool: """ Validates if connection entrypoint is one of the peer entrypoints """ - found_entrypoint = False - # If has no entrypoints must be behind a NAT, so we add the flag to the connection if len(self.entrypoints) == 0: protocol.warning_flags.add(protocol.WarningFlags.NO_ENTRYPOINTS) # If there are no entrypoints, we don't need to validate it - found_entrypoint = True + return True # Entrypoint validation with connection string and connection host # Entrypoints have the format tcp://IP|name:port @@ -150,19 +140,13 @@ async def validate_entrypoint(self, protocol: HathorProtocol) -> bool: if protocol.entrypoint is not None: # Connection string has the format tcp://IP:port # So we must consider that the entrypoint could be in name format - if protocol.entrypoint.equals_ignore_peer_id(entrypoint): - # XXX: wrong peer-id should not make it into self.entrypoints - assert not protocol.entrypoint.peer_id_conflicts_with(entrypoint), 'wrong peer-id was added before' - # Found the entrypoint - found_entrypoint = True - break + if protocol.entrypoint.addr == entrypoint: + return True # TODO: don't use `daa.TEST_MODE` for this test_mode = not_none(DifficultyAdjustmentAlgorithm.singleton).TEST_MODE result = await discover_dns(entrypoint.host, test_mode) - if protocol.entrypoint in result: - # Found the entrypoint - found_entrypoint = True - break + if protocol.entrypoint.addr in [endpoint.addr for endpoint in result]: + return True else: # When the peer is the server part of the connection we don't have the full entrypoint description # So we can only validate the host from the protocol @@ -174,20 +158,13 @@ async def validate_entrypoint(self, protocol: HathorProtocol) -> bool: # Connection host has only the IP # So we must consider that the entrypoint could be in name format and we just validate the host if connection_host == entrypoint.host: - found_entrypoint = True - break + return True test_mode = not_none(DifficultyAdjustmentAlgorithm.singleton).TEST_MODE result = await discover_dns(entrypoint.host, test_mode) - if connection_host in [entrypoint.host for entrypoint in result]: - # Found the entrypoint - found_entrypoint = True - break + if connection_host in [entrypoint.addr.host for entrypoint in result]: + return True - if not found_entrypoint: - # In case the validation fails - return False - - return True + return False def increment_retry_attempt(self, now: int) -> None: """ Updates timestamp for next retry. @@ -242,9 +219,20 @@ def create_from_json(cls, data: dict[str, Any]) -> Self: It is to create an UnverifiedPeer from a peer connection. """ + peer_id = PeerId(data['id']) + endpoints = [] + + for endpoint_str in data.get('entrypoints', []): + # We have to parse using PeerEndpoint to be able to support older peers that still + # send the id in entrypoints, but we validate that they're sending the correct id. + endpoint = PeerEndpoint.parse(endpoint_str) + if endpoint.peer_id is not None and endpoint.peer_id != peer_id: + raise ValueError(f'conflicting peer_id: {endpoint.peer_id} != {peer_id}') + endpoints.append(endpoint.addr) + return cls( - id=PeerId(data['id']), - info=PeerInfo(entrypoints=[_parse_entrypoint(e) for e in data.get('entrypoints', [])]), + id=peer_id, + info=PeerInfo(entrypoints=endpoints), ) def merge(self, other: UnverifiedPeer) -> None: @@ -364,12 +352,7 @@ def verify_signature(self, signature: bytes, data: bytes) -> bool: return True def validate(self) -> None: - """ Return `True` if the following conditions are valid: - (i) public key and private key matches; - (ii) the id matches with the public key. - - TODO(epnichols): Update docs. Only raises exceptions; doesn't return anything. - """ + """Calculate the PeerId based on the public key and raise an exception if it does not match.""" if self.id != self.calculate_id(): raise InvalidPeerIdException('id does not match public key') diff --git a/hathor/p2p/peer_discovery/bootstrap.py b/hathor/p2p/peer_discovery/bootstrap.py index a30970ae2..55b5e9f16 100644 --- a/hathor/p2p/peer_discovery/bootstrap.py +++ b/hathor/p2p/peer_discovery/bootstrap.py @@ -17,7 +17,7 @@ from structlog import get_logger from typing_extensions import override -from hathor.p2p.entrypoint import Entrypoint +from hathor.p2p.peer_endpoint import PeerEndpoint from .peer_discovery import PeerDiscovery @@ -28,15 +28,15 @@ class BootstrapPeerDiscovery(PeerDiscovery): """ It implements a bootstrap peer discovery, which receives a static list of peers. """ - def __init__(self, entrypoints: list[Entrypoint]): + def __init__(self, entrypoints: list[PeerEndpoint]): """ - :param descriptions: Descriptions of peers to connect to. + :param entrypoints: Addresses of peers to connect to. """ super().__init__() self.log = logger.new() self.entrypoints = entrypoints @override - async def discover_and_connect(self, connect_to: Callable[[Entrypoint], None]) -> None: + async def discover_and_connect(self, connect_to: Callable[[PeerEndpoint], None]) -> None: for entrypoint in self.entrypoints: connect_to(entrypoint) diff --git a/hathor/p2p/peer_discovery/dns.py b/hathor/p2p/peer_discovery/dns.py index b946fc9eb..c5dfe74d6 100644 --- a/hathor/p2p/peer_discovery/dns.py +++ b/hathor/p2p/peer_discovery/dns.py @@ -15,7 +15,7 @@ import socket from collections.abc import Iterator from itertools import chain -from typing import Callable, TypeAlias, cast +from typing import Callable, TypeAlias from structlog import get_logger from twisted.internet.defer import Deferred, gatherResults @@ -23,7 +23,7 @@ from twisted.names.dns import Record_A, Record_TXT, RRHeader from typing_extensions import override -from hathor.p2p.entrypoint import Entrypoint, Protocol +from hathor.p2p.peer_endpoint import PeerAddress, PeerEndpoint, Protocol from .peer_discovery import PeerDiscovery @@ -53,7 +53,7 @@ def do_lookup_text(self, host: str) -> Deferred[LookupResult]: return lookupText(host) @override - async def discover_and_connect(self, connect_to: Callable[[Entrypoint], None]) -> None: + async def discover_and_connect(self, connect_to: Callable[[PeerEndpoint], None]) -> None: """ Run DNS lookup for host and connect to it This is executed when starting the DNS Peer Discovery and first connecting to the network """ @@ -61,26 +61,26 @@ async def discover_and_connect(self, connect_to: Callable[[Entrypoint], None]) - for entrypoint in (await self.dns_seed_lookup(host)): connect_to(entrypoint) - async def dns_seed_lookup(self, host: str) -> set[Entrypoint]: + async def dns_seed_lookup(self, host: str) -> set[PeerEndpoint]: """ Run a DNS lookup for TXT, A, and AAAA records and return a list of connection strings. """ if self.test_mode: # Useful for testing purposes, so we don't need to execute a DNS query - return {Entrypoint.parse('tcp://127.0.0.1:40403')} + return {PeerEndpoint.parse('tcp://127.0.0.1:40403')} - deferreds = [] + deferreds: list[Deferred[Iterator[PeerEndpoint]]] = [] - d1 = self.do_lookup_text(host) - d1.addCallback(self.dns_seed_lookup_text) - d1.addErrback(self.errback) - deferreds.append(cast(Deferred[Iterator[Entrypoint]], d1)) # mypy doesn't know how addCallback affects d1 + d1 = self.do_lookup_text(host) \ + .addCallback(self.dns_seed_lookup_text) \ + .addErrback(self.errback) + deferreds.append(d1) - d2 = self.do_lookup_address(host) - d2.addCallback(self.dns_seed_lookup_address) - d2.addErrback(self.errback) - deferreds.append(cast(Deferred[Iterator[Entrypoint]], d2)) # mypy doesn't know how addCallback affects d2 + d2 = self.do_lookup_address(host) \ + .addCallback(self.dns_seed_lookup_address) \ + .addErrback(self.errback) + deferreds.append(d2) - results: list[Iterator[Entrypoint]] = await gatherResults(deferreds) + results: list[Iterator[PeerEndpoint]] = await gatherResults(deferreds) return set(chain(*results)) def errback(self, result): @@ -89,7 +89,7 @@ def errback(self, result): self.log.error('errback', result=result) return [] - def dns_seed_lookup_text(self, results: LookupResult) -> Iterator[Entrypoint]: + def dns_seed_lookup_text(self, results: LookupResult) -> Iterator[PeerEndpoint]: """ Run a DNS lookup for TXT records to discover new peers. The `results` has three lists that contain answer records, authority records, and additional records. @@ -100,14 +100,14 @@ def dns_seed_lookup_text(self, results: LookupResult) -> Iterator[Entrypoint]: for txt in record.payload.data: raw_entrypoint = txt.decode('utf-8') try: - entrypoint = Entrypoint.parse(raw_entrypoint) + entrypoint = PeerEndpoint.parse(raw_entrypoint) except ValueError: self.log.warning('could not parse entrypoint, skipping it', raw_entrypoint=raw_entrypoint) continue self.log.info('seed DNS TXT found', entrypoint=str(entrypoint)) yield entrypoint - def dns_seed_lookup_address(self, results: LookupResult) -> Iterator[Entrypoint]: + def dns_seed_lookup_address(self, results: LookupResult) -> Iterator[PeerEndpoint]: """ Run a DNS lookup for A records to discover new peers. The `results` has three lists that contain answer records, authority records, and additional records. @@ -118,6 +118,6 @@ def dns_seed_lookup_address(self, results: LookupResult) -> Iterator[Entrypoint] address = record.payload.address assert address is not None host = socket.inet_ntoa(address) - entrypoint = Entrypoint(Protocol.TCP, host, self.default_port) + entrypoint = PeerAddress(Protocol.TCP, host, self.default_port).with_id() self.log.info('seed DNS A found', entrypoint=str(entrypoint)) yield entrypoint diff --git a/hathor/p2p/peer_discovery/peer_discovery.py b/hathor/p2p/peer_discovery/peer_discovery.py index a6ff799ed..7d040fae2 100644 --- a/hathor/p2p/peer_discovery/peer_discovery.py +++ b/hathor/p2p/peer_discovery/peer_discovery.py @@ -15,7 +15,7 @@ from abc import ABC, abstractmethod from typing import Callable -from hathor.p2p.entrypoint import Entrypoint +from hathor.p2p.peer_endpoint import PeerEndpoint class PeerDiscovery(ABC): @@ -23,7 +23,7 @@ class PeerDiscovery(ABC): """ @abstractmethod - async def discover_and_connect(self, connect_to: Callable[[Entrypoint], None]) -> None: + async def discover_and_connect(self, connect_to: Callable[[PeerEndpoint], None]) -> None: """ This method must discover the peers and call `connect_to` for each of them. :param connect_to: Function which will be called for each discovered peer. diff --git a/hathor/p2p/peer_endpoint.py b/hathor/p2p/peer_endpoint.py new file mode 100644 index 000000000..c7cafce20 --- /dev/null +++ b/hathor/p2p/peer_endpoint.py @@ -0,0 +1,277 @@ +# Copyright 2024 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from dataclasses import dataclass +from enum import Enum +from typing import Any +from urllib.parse import parse_qs, urlparse + +from twisted.internet.address import IPv4Address, IPv6Address +from twisted.internet.endpoints import TCP4ClientEndpoint +from twisted.internet.interfaces import IAddress, IStreamClientEndpoint +from typing_extensions import Self + +from hathor.p2p.peer_id import PeerId +from hathor.reactor import ReactorProtocol as Reactor + +COMPARISON_ERROR_MESSAGE = ( + 'never compare PeerAddress with PeerEndpoint or two PeerEndpoint instances directly! ' + 'instead, compare the addr attribute explicitly, and if relevant, the peer_id too.' +) + + +class Protocol(Enum): + TCP = 'tcp' + + +@dataclass(frozen=True, slots=True) +class PeerAddress: + """Peer address as received when a connection is made.""" + + protocol: Protocol + host: str + port: int + + def __str__(self) -> str: + return f'{self.protocol.value}://{self.host}:{self.port}' + + def __eq__(self, other: Any) -> bool: + """ + This function implements strict comparison between two PeerAddress insteances. Comparison between a PeerAddress + and a PeerEndpoint, or between two PeerEndpoint instances, purposefully throws a ValueError. + + Instead, in those cases users should explicity compare the underlying PeerAddress instances using the `addr` + attribute. This ensures we don't have issues with implicit equality checks,such as when using the `in` operator + + Examples: + + >>> ep1 = 'tcp://foo:111' + >>> ep2 = 'tcp://foo:111/?id=c0f19299c2a4dcbb6613a14011ff07b63d6cb809e4cee25e9c1ccccdd6628696' + >>> ep3 = 'tcp://foo:111/?id=bc5119d47bb4ea7c19100bd97fb11f36970482108bd3d45ff101ee4f6bbec872' + >>> ep4 = 'tcp://bar:111/?id=c0f19299c2a4dcbb6613a14011ff07b63d6cb809e4cee25e9c1ccccdd6628696' + >>> ep5 = 'tcp://foo:112/?id=c0f19299c2a4dcbb6613a14011ff07b63d6cb809e4cee25e9c1ccccdd6628696' + >>> ep6 = 'tcp://localhost:111' + >>> ep7 = 'tcp://127.0.0.1:111' + >>> PeerEndpoint.parse(ep1).addr == PeerEndpoint.parse(ep2).addr + True + >>> PeerEndpoint.parse(ep2).addr == PeerEndpoint.parse(ep3).addr + True + >>> PeerEndpoint.parse(ep1).addr == PeerEndpoint.parse(ep4).addr + False + >>> PeerEndpoint.parse(ep2).addr == PeerEndpoint.parse(ep4).addr + False + >>> PeerEndpoint.parse(ep2).addr == PeerEndpoint.parse(ep5).addr + False + >>> PeerEndpoint.parse(ep6).addr == PeerEndpoint.parse(ep7).addr + True + >>> PeerEndpoint.parse(ep1) == PeerEndpoint.parse(ep1) + Traceback (most recent call last): + ... + ValueError: never compare PeerAddress with PeerEndpoint or two PeerEndpoint instances directly! \ +instead, compare the addr attribute explicitly, and if relevant, the peer_id too. + >>> PeerEndpoint.parse(ep1) == PeerEndpoint.parse(ep1).addr + Traceback (most recent call last): + ... + ValueError: never compare PeerAddress with PeerEndpoint or two PeerEndpoint instances directly! \ +instead, compare the addr attribute explicitly, and if relevant, the peer_id too. + >>> PeerEndpoint.parse(ep1).addr == PeerEndpoint.parse(ep1) + Traceback (most recent call last): + ... + ValueError: never compare PeerAddress with PeerEndpoint or two PeerEndpoint instances directly! \ +instead, compare the addr attribute explicitly, and if relevant, the peer_id too. + >>> PeerEndpoint.parse(ep1) != PeerEndpoint.parse(ep4).addr + Traceback (most recent call last): + ... + ValueError: never compare PeerAddress with PeerEndpoint or two PeerEndpoint instances directly! \ +instead, compare the addr attribute explicitly, and if relevant, the peer_id too. + >>> PeerEndpoint.parse(ep1) in [PeerEndpoint.parse(ep1)] + Traceback (most recent call last): + ... + ValueError: never compare PeerAddress with PeerEndpoint or two PeerEndpoint instances directly! \ +instead, compare the addr attribute explicitly, and if relevant, the peer_id too. + >>> PeerEndpoint.parse(ep1).addr in [PeerEndpoint.parse(ep1).addr] + True + >>> PeerEndpoint.parse(ep1).addr != PeerEndpoint.parse(ep4).addr + True + """ + if not isinstance(other, PeerAddress): + raise ValueError(COMPARISON_ERROR_MESSAGE) + + if self.is_localhost() and other.is_localhost(): + return (self.protocol, self.port) == (other.protocol, other.port) + + return (self.protocol, self.host, self.port) == (other.protocol, other.host, other.port) + + def __ne__(self, other: Any) -> bool: + return not self == other + + @classmethod + def parse(cls, description: str) -> Self: + protocol, host, port, query = _parse_address_parts(description) + if query: + raise ValueError(f'unexpected query: "{description}". did you incorrectly add an id=?') + return cls(protocol, host, port) + + @classmethod + def from_hostname_address(cls, hostname: str, address: IPv4Address | IPv6Address) -> Self: + return cls.parse(f'{address.type}://{hostname}:{address.port}') + + @classmethod + def from_address(cls, address: IAddress) -> Self: + """Create an Entrypoint from a Twisted IAddress.""" + if not isinstance(address, (IPv4Address, IPv6Address)): + raise NotImplementedError(f'address: {address}') + return cls.parse(f'{address.type}://{address.host}:{address.port}') + + def to_client_endpoint(self, reactor: Reactor) -> IStreamClientEndpoint: + """This method generates a twisted client endpoint that has a .connect() method.""" + # XXX: currently we don't support IPv6, but when we do we have to decide between TCP4ClientEndpoint and + # TCP6ClientEndpoint, when the host is an IP address that is easy, but when it is a DNS hostname, we will not + # know which to use until we know which resource records it holds (A or AAAA) + return TCP4ClientEndpoint(reactor, self.host, self.port) + + def is_localhost(self) -> bool: + """Used to determine if the address host is a localhost address. + + Examples: + + >>> PeerAddress.parse('tcp://127.0.0.1:444').is_localhost() + True + >>> PeerAddress.parse('tcp://localhost:444').is_localhost() + True + >>> PeerAddress.parse('tcp://8.8.8.8:444').is_localhost() + False + >>> PeerAddress.parse('tcp://foo.bar:444').is_localhost() + False + """ + return self.host in ('127.0.0.1', 'localhost') + + def with_id(self, peer_id: PeerId | None = None) -> PeerEndpoint: + """Create a PeerEndpoint instance with self as the address and with the provided peer_id, or None.""" + return PeerEndpoint(self, peer_id) + + +@dataclass(frozen=True, slots=True) +class PeerEndpoint: + """Peer endpoint description (returned from DNS query, or received from the p2p network) may contain a peer-id.""" + + addr: PeerAddress + peer_id: PeerId | None = None + + def __str__(self) -> str: + return str(self.addr) if self.peer_id is None else f'{self.addr}/?id={self.peer_id}' + + def __eq__(self, other: Any) -> bool: + """See PeerAddress.__eq__""" + raise ValueError(COMPARISON_ERROR_MESSAGE) + + def __ne__(self, other: Any) -> bool: + """See PeerAddress.__eq__""" + raise ValueError(COMPARISON_ERROR_MESSAGE) + + @classmethod + def parse(cls, description: str) -> PeerEndpoint: + """Parse endpoint description into an PeerEndpoint object. + + Examples: + + >>> str(PeerEndpoint.parse('tcp://127.0.0.1:40403/')) + 'tcp://127.0.0.1:40403' + + >>> id1 = 'c0f19299c2a4dcbb6613a14011ff07b63d6cb809e4cee25e9c1ccccdd6628696' + >>> PeerEndpoint.parse(f'tcp://127.0.0.1:40403/?id={id1}') + PeerEndpoint(addr=PeerAddress(protocol=, host='127.0.0.1', port=40403), \ +peer_id=PeerId('c0f19299c2a4dcbb6613a14011ff07b63d6cb809e4cee25e9c1ccccdd6628696')) + + >>> str(PeerEndpoint.parse(f'tcp://127.0.0.1:40403/?id={id1}')) + 'tcp://127.0.0.1:40403/?id=c0f19299c2a4dcbb6613a14011ff07b63d6cb809e4cee25e9c1ccccdd6628696' + + >>> PeerEndpoint.parse('tcp://127.0.0.1:40403') + PeerEndpoint(addr=PeerAddress(protocol=, host='127.0.0.1', port=40403), peer_id=None) + + >>> PeerEndpoint.parse('tcp://127.0.0.1:40403/') + PeerEndpoint(addr=PeerAddress(protocol=, host='127.0.0.1', port=40403), peer_id=None) + + >>> PeerEndpoint.parse('tcp://foo.bar.baz:40403/') + PeerEndpoint(addr=PeerAddress(protocol=, host='foo.bar.baz', port=40403), \ +peer_id=None) + + >>> str(PeerEndpoint.parse('tcp://foo.bar.baz:40403/')) + 'tcp://foo.bar.baz:40403' + + >>> PeerEndpoint.parse('tcp://127.0.0.1:40403/?id=123') + Traceback (most recent call last): + ... + ValueError: non-hexadecimal number found in fromhex() arg at position 3 + + >>> PeerEndpoint.parse('tcp://127.0.0.1:4040f') + Traceback (most recent call last): + ... + ValueError: Port could not be cast to integer value as '4040f' + + >>> PeerEndpoint.parse('udp://127.0.0.1:40403/') + Traceback (most recent call last): + ... + ValueError: 'udp' is not a valid Protocol + + >>> PeerEndpoint.parse('tcp://127.0.0.1/') + Traceback (most recent call last): + ... + ValueError: expected a port: "tcp://127.0.0.1/" + + >>> PeerEndpoint.parse('tcp://:40403/') + Traceback (most recent call last): + ... + ValueError: expected a host: "tcp://:40403/" + + >>> PeerEndpoint.parse('tcp://127.0.0.1:40403/foo') + Traceback (most recent call last): + ... + ValueError: unexpected path: "tcp://127.0.0.1:40403/foo" + + >>> id2 = 'bc5119d47bb4ea7c19100bd97fb11f36970482108bd3d45ff101ee4f6bbec872' + >>> PeerEndpoint.parse(f'tcp://127.0.0.1:40403/?id={id1}&id={id2}') + Traceback (most recent call last): + ... + ValueError: unexpected id count: 2 + """ + protocol, host, port, query_str = _parse_address_parts(description) + peer_id: PeerId | None = None + + if query_str: + query = parse_qs(query_str) + if 'id' in query: + ids = query['id'] + if len(ids) != 1: + raise ValueError(f'unexpected id count: {len(ids)}') + peer_id = PeerId(ids[0]) + + return PeerAddress(protocol, host, port).with_id(peer_id) + + +def _parse_address_parts(description: str) -> tuple[Protocol, str, int, str]: + url = urlparse(description) + protocol = Protocol(url.scheme) + host = url.hostname + if host is None: + raise ValueError(f'expected a host: "{description}"') + port = url.port + if port is None: + raise ValueError(f'expected a port: "{description}"') + if url.path not in {'', '/'}: + raise ValueError(f'unexpected path: "{description}"') + + return protocol, host, port, url.query diff --git a/hathor/p2p/protocol.py b/hathor/p2p/protocol.py index f2bda9cc4..e05e63b55 100644 --- a/hathor/p2p/protocol.py +++ b/hathor/p2p/protocol.py @@ -14,9 +14,10 @@ import time from enum import Enum -from typing import TYPE_CHECKING, Any, Coroutine, Generator, Optional, cast +from typing import TYPE_CHECKING, Optional, cast from structlog import get_logger +from twisted.internet import defer from twisted.internet.defer import Deferred from twisted.internet.interfaces import IDelayedCall, ITCPTransport, ITransport from twisted.internet.protocol import connectionDone @@ -24,9 +25,9 @@ from twisted.python.failure import Failure from hathor.conf.settings import HathorSettings -from hathor.p2p.entrypoint import Entrypoint from hathor.p2p.messages import ProtocolMessages -from hathor.p2p.peer import PrivatePeer, PublicPeer +from hathor.p2p.peer import PrivatePeer, PublicPeer, UnverifiedPeer +from hathor.p2p.peer_endpoint import PeerEndpoint from hathor.p2p.peer_id import PeerId from hathor.p2p.rate_limiter import RateLimiter from hathor.p2p.states import BaseState, HelloState, PeerIdState, ReadyState @@ -70,7 +71,6 @@ class RateLimitKeys(str, Enum): GLOBAL = 'global' class WarningFlags(str, Enum): - NO_PEER_ID_URL = 'no_peer_id_url' NO_ENTRYPOINTS = 'no_entrypoints' my_peer: PrivatePeer @@ -83,7 +83,7 @@ class WarningFlags(str, Enum): state: Optional[BaseState] connection_time: float _state_instances: dict[PeerState, BaseState] - entrypoint: Optional[Entrypoint] + entrypoint: Optional[PeerEndpoint] warning_flags: set[str] aborting: bool diff_timestamp: Optional[int] @@ -149,10 +149,10 @@ def __init__( # Connection string of the peer # Used to validate if entrypoints has this string - self.entrypoint: Optional[Entrypoint] = None + self.entrypoint: Optional[PeerEndpoint] = None # Peer id sent in the connection url that is expected to connect (optional) - self.expected_peer_id: Optional[str] = None + self.expected_peer_id: PeerId | None = None # Set of warning flags that may be added during the connection process self.warning_flags: set[str] = set() @@ -254,9 +254,13 @@ def on_connect(self) -> None: if self.connections: self.connections.on_peer_connect(self) - def on_outbound_connect(self, entrypoint: Entrypoint) -> None: + def on_outbound_connect(self, entrypoint: PeerEndpoint, peer: UnverifiedPeer | PublicPeer | None) -> None: """Called when we successfully establish an outbound connection to a peer.""" # Save the used entrypoint in protocol so we can validate that it matches the entrypoints data + if entrypoint.peer_id is not None and peer is not None: + assert entrypoint.peer_id == peer.id + + self.expected_peer_id = peer.id if peer else entrypoint.peer_id self.entrypoint = entrypoint def on_peer_ready(self) -> None: @@ -292,7 +296,7 @@ def send_message(self, cmd: ProtocolMessages, payload: Optional[str] = None) -> raise NotImplementedError @cpu.profiler(key=lambda self, cmd: 'p2p-cmd!{}'.format(str(cmd))) - def recv_message(self, cmd: ProtocolMessages, payload: str) -> Optional[Deferred[None]]: + def recv_message(self, cmd: ProtocolMessages, payload: str) -> None: """ Executed when a new message arrives. """ assert self.state is not None @@ -301,7 +305,6 @@ def recv_message(self, cmd: ProtocolMessages, payload: str) -> Optional[Deferred self.last_message = now if self._peer is not None: self.peer.info.last_seen = now - self.reset_idle_timeout() if not self.ratelimit.add_hit(self.RateLimitKeys.GLOBAL): # XXX: on Python 3.11 the result of the following expression: @@ -310,21 +313,22 @@ def recv_message(self, cmd: ProtocolMessages, payload: str) -> Optional[Deferred # that something like `str(value)` is called which results in a different value (usually not the case # for regular strings, but it is for enum+str), using `enum_variant.value` side-steps this problem self.state.send_throttle(self.RateLimitKeys.GLOBAL.value) - return None - - fn = self.state.cmd_map.get(cmd) - if fn is not None: - try: - result = fn(payload) - return Deferred.fromCoroutine(result) if isinstance(result, Coroutine) else result - except Exception: - self.log.warn('recv_message processing error', exc_info=True) - raise - else: + return + + cmd_handler = self.state.cmd_map.get(cmd) + if cmd_handler is None: self.log.debug('cmd not found', cmd=cmd, payload=payload, available=list(self.state.cmd_map.keys())) self.send_error_and_close_connection('Invalid Command: {} {}'.format(cmd, payload)) + return - return None + deferred_result: Deferred[None] = defer.maybeDeferred(cmd_handler, payload) + deferred_result \ + .addCallback(lambda _: self.reset_idle_timeout()) \ + .addErrback(self._on_cmd_handler_error, cmd) + + def _on_cmd_handler_error(self, failure: Failure, cmd: ProtocolMessages) -> None: + self.log.warn('recv_message processing error', reason=failure.getErrorMessage(), exc_info=True) + self.send_error_and_close_connection(f'Error processing "{cmd.value}" command') def send_error(self, msg: str) -> None: """ Send an error message to the peer. @@ -411,7 +415,7 @@ def lineLengthExceeded(self, line: str) -> None: super(HathorLineReceiver, self).lineLengthExceeded(line) @cpu.profiler(key=lambda self: 'p2p!{}'.format(self.get_short_remote())) - def lineReceived(self, line: bytes) -> Optional[Generator[Any, Any, None]]: + def lineReceived(self, line: bytes) -> None: assert self.transport is not None if self.aborting: @@ -420,7 +424,7 @@ def lineReceived(self, line: bytes) -> Optional[Generator[Any, Any, None]]: # abort and close the connection, HathorLineReceive.lineReceived will still be called for the buffered # lines. If that happens we just ignore those messages. self.log.debug('ignore received messager after abort') - return None + return self.metrics.received_messages += 1 self.metrics.received_bytes += len(line) @@ -429,17 +433,16 @@ def lineReceived(self, line: bytes) -> Optional[Generator[Any, Any, None]]: sline = line.decode('utf-8') except UnicodeDecodeError: self.transport.loseConnection() - return None + return msgtype, _, msgdata = sline.partition(' ') try: cmd = ProtocolMessages(msgtype) except ValueError: self.transport.loseConnection() - return None - else: - self.recv_message(cmd, msgdata) - return None + return + + self.recv_message(cmd, msgdata) def send_message(self, cmd_enum: ProtocolMessages, payload: Optional[str] = None) -> None: cmd = cmd_enum.value diff --git a/hathor/p2p/resources/add_peers.py b/hathor/p2p/resources/add_peers.py index aeb92208c..c8faeb5dc 100644 --- a/hathor/p2p/resources/add_peers.py +++ b/hathor/p2p/resources/add_peers.py @@ -20,8 +20,8 @@ from hathor.api_util import Resource, render_options, set_cors from hathor.cli.openapi_files.register import register_resource from hathor.manager import HathorManager -from hathor.p2p.entrypoint import Entrypoint from hathor.p2p.peer_discovery import BootstrapPeerDiscovery +from hathor.p2p.peer_endpoint import PeerEndpoint from hathor.util import json_dumpb, json_loadb @@ -60,7 +60,7 @@ def render_POST(self, request: Request) -> bytes: }) try: - entrypoints = list(map(Entrypoint.parse, raw_entrypoints)) + entrypoints = list(map(PeerEndpoint.parse, raw_entrypoints)) except ValueError: return json_dumpb({ 'success': False, @@ -69,14 +69,15 @@ def render_POST(self, request: Request) -> bytes: known_peers = self.manager.connections.verified_peer_storage.values() - def already_connected(entrypoint: Entrypoint) -> bool: + def already_connected(endpoint: PeerEndpoint) -> bool: # ignore peers that we're already trying to connect - if entrypoint in self.manager.connections.iter_not_ready_endpoints(): - return True + for ready_endpoint in self.manager.connections.iter_not_ready_endpoints(): + if endpoint.addr == ready_endpoint.addr: + return True # remove peers we already know about for peer in known_peers: - if entrypoint in peer.entrypoints: + if endpoint.addr in peer.info.entrypoints: return True return False diff --git a/hathor/p2p/states/peer_id.py b/hathor/p2p/states/peer_id.py index 2aca0a9db..77e8a051e 100644 --- a/hathor/p2p/states/peer_id.py +++ b/hathor/p2p/states/peer_id.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Any from structlog import get_logger @@ -63,17 +63,19 @@ def handle_ready(self, payload: str) -> None: # So it was just waiting for the ready message from the other peer to change the state to READY self.protocol.change_state(self.protocol.PeerState.READY) + def _get_peer_id_data(self) -> dict[str, Any]: + my_peer = self.protocol.my_peer + return dict( + id=str(my_peer.id), + pubKey=my_peer.get_public_key(), + entrypoints=my_peer.info.entrypoints_as_str(), + ) + def send_peer_id(self) -> None: """ Send a PEER-ID message, identifying the peer. """ - protocol = self.protocol - my_peer = protocol.my_peer - hello = { - 'id': str(my_peer.id), - 'pubKey': my_peer.get_public_key(), - 'entrypoints': my_peer.info.entrypoints_as_str(), - } - self.send_message(ProtocolMessages.PEER_ID, json_dumps(hello)) + data = self._get_peer_id_data() + self.send_message(ProtocolMessages.PEER_ID, json_dumps(data)) async def handle_peer_id(self, payload: str) -> None: """ Executed when a PEER-ID is received. It basically checks @@ -89,7 +91,6 @@ async def handle_peer_id(self, payload: str) -> None: data = json_loads(payload) peer = PublicPeer.create_from_json(data) - peer.validate() assert peer.id is not None # If the connection URL had a peer-id parameter we need to check it's the same @@ -119,6 +120,9 @@ async def handle_peer_id(self, payload: str) -> None: protocol.send_error_and_close_connection('Connection string is not in the entrypoints.') return + if protocol.entrypoint is not None and protocol.entrypoint.peer_id is not None: + assert protocol.entrypoint.peer_id == peer.id + if protocol.use_ssl: certificate_valid = peer.validate_certificate(protocol) if not certificate_valid: diff --git a/hathor/p2p/utils.py b/hathor/p2p/utils.py index c0a25f3d8..55f9b9591 100644 --- a/hathor/p2p/utils.py +++ b/hathor/p2p/utils.py @@ -29,8 +29,8 @@ from hathor.conf.get_settings import get_global_settings from hathor.conf.settings import HathorSettings from hathor.indexes.height_index import HeightInfo -from hathor.p2p.entrypoint import Entrypoint from hathor.p2p.peer_discovery import DNSPeerDiscovery +from hathor.p2p.peer_endpoint import PeerEndpoint from hathor.p2p.peer_id import PeerId from hathor.transaction.genesis import get_representation_for_all_genesis @@ -78,7 +78,7 @@ def get_settings_hello_dict(settings: HathorSettings) -> dict[str, Any]: return settings_dict -async def discover_dns(host: str, test_mode: int = 0) -> list[Entrypoint]: +async def discover_dns(host: str, test_mode: int = 0) -> list[PeerEndpoint]: """ Start a DNS peer discovery object and execute a search for the host Returns the DNS string from the requested host diff --git a/hathor/simulator/fake_connection.py b/hathor/simulator/fake_connection.py index c993302db..b3a29afc9 100644 --- a/hathor/simulator/fake_connection.py +++ b/hathor/simulator/fake_connection.py @@ -15,14 +15,16 @@ from __future__ import annotations from collections import deque -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING, Literal, Optional from OpenSSL.crypto import X509 from structlog import get_logger -from twisted.internet.address import HostnameAddress +from twisted.internet.address import IPv4Address from twisted.internet.testing import StringTransport from hathor.p2p.peer import PrivatePeer +from hathor.p2p.peer_endpoint import PeerAddress, PeerEndpoint +from hathor.p2p.peer_id import PeerId if TYPE_CHECKING: from hathor.manager import HathorManager @@ -32,8 +34,8 @@ class HathorStringTransport(StringTransport): - def __init__(self, peer: PrivatePeer): - super().__init__() + def __init__(self, peer: PrivatePeer, *, peer_address: IPv4Address): + super().__init__(peerAddress=peer_address) self._peer = peer @property @@ -46,12 +48,27 @@ def getPeerCertificate(self) -> X509: class FakeConnection: - def __init__(self, manager1: 'HathorManager', manager2: 'HathorManager', *, latency: float = 0, - autoreconnect: bool = False): + _next_port: int = 49000 + _port_per_manager: dict['HathorManager', int] = {} + + def __init__( + self, + manager1: 'HathorManager', + manager2: 'HathorManager', + *, + latency: float = 0, + autoreconnect: bool = False, + addr1: IPv4Address | None = None, + addr2: IPv4Address | None = None, + fake_bootstrap_id: PeerId | None | Literal[False] = False, + ): """ :param: latency: Latency between nodes in seconds + :fake_bootstrap_id: when False, bootstrap mode is disabled. When a PeerId or None are passed, bootstrap mode is + enabled and the value is used as the connection's entrypoint.peer_id """ self.log = logger.new() + self._fake_bootstrap_id = fake_bootstrap_id self.manager1 = manager1 self.manager2 = manager2 @@ -64,8 +81,28 @@ def __init__(self, manager1: 'HathorManager', manager2: 'HathorManager', *, late self._buf1: deque[str] = deque() self._buf2: deque[str] = deque() + # manager1's address, the server, where manager2 will connect to + self.addr1 = addr1 or IPv4Address('TCP', '127.0.0.1', self._get_port(manager1)) + # manager2's address, the client, where manager2 will connect from + self.addr2 = addr2 or IPv4Address('TCP', '127.0.0.1', self._get_port(manager2)) + self.reconnect() + @classmethod + def _get_port(cls, manager: 'HathorManager') -> int: + port = cls._port_per_manager.get(manager) + if port is None: + port = cls._next_port + cls._next_port += 1 + return port + + @property + def entrypoint(self) -> PeerEndpoint: + entrypoint = PeerAddress.from_address(self.addr1) + if self._fake_bootstrap_id is False: + return entrypoint.with_id(self.manager1.my_peer.id) + return entrypoint.with_id(self._fake_bootstrap_id) + @property def proto1(self): return self._proto1 @@ -234,10 +271,21 @@ def reconnect(self) -> None: self.disconnect(Failure(Exception('forced reconnection'))) self._buf1.clear() self._buf2.clear() - self._proto1 = self.manager1.connections.server_factory.buildProtocol(HostnameAddress(b'fake', 0)) - self._proto2 = self.manager2.connections.client_factory.buildProtocol(HostnameAddress(b'fake', 0)) - self.tr1 = HathorStringTransport(self._proto2.my_peer) - self.tr2 = HathorStringTransport(self._proto1.my_peer) + + self._proto1 = self.manager1.connections.server_factory.buildProtocol(self.addr2) + self._proto2 = self.manager2.connections.client_factory.buildProtocol(self.addr1) + + # When _fake_bootstrap_id is set we don't pass the peer because that's how bootstrap calls connect_to() + peer = self._proto1.my_peer.to_unverified_peer() if self._fake_bootstrap_id is False else None + self.manager2.connections.connect_to(self.entrypoint, peer) + + connecting_peers = list(self.manager2.connections.connecting_peers.values()) + for connecting_peer in connecting_peers: + if connecting_peer.entrypoint.addr == self.entrypoint.addr: + connecting_peer.endpoint_deferred.callback(self._proto2) + + self.tr1 = HathorStringTransport(self._proto2.my_peer, peer_address=self.addr2) + self.tr2 = HathorStringTransport(self._proto1.my_peer, peer_address=self.addr1) self._proto1.makeConnection(self.tr1) self._proto2.makeConnection(self.tr2) self.is_connected = True diff --git a/tests/others/test_metrics.py b/tests/others/test_metrics.py index bbdede763..b46f6985b 100644 --- a/tests/others/test_metrics.py +++ b/tests/others/test_metrics.py @@ -3,9 +3,9 @@ import pytest -from hathor.p2p.entrypoint import Entrypoint from hathor.p2p.manager import PeerConnectionsMetrics from hathor.p2p.peer import PrivatePeer +from hathor.p2p.peer_endpoint import PeerEndpoint from hathor.p2p.protocol import HathorProtocol from hathor.pubsub import HathorEvents from hathor.simulator.utils import add_new_blocks @@ -70,7 +70,7 @@ def test_connections_manager_integration(self): manager.connections.handshaking_peers.update({Mock()}) # Execution - endpoint = Entrypoint.parse('tcp://127.0.0.1:8005') + endpoint = PeerEndpoint.parse('tcp://127.0.0.1:8005') # This will trigger sending to the pubsub one of the network events manager.connections.connect_to(endpoint, use_ssl=True) diff --git a/tests/p2p/test_bootstrap.py b/tests/p2p/test_bootstrap.py index 3c3d9fa8c..82aa932bb 100644 --- a/tests/p2p/test_bootstrap.py +++ b/tests/p2p/test_bootstrap.py @@ -4,11 +4,11 @@ from twisted.names.dns import TXT, A, Record_A, Record_TXT, RRHeader from typing_extensions import override -from hathor.p2p.entrypoint import Entrypoint, Protocol from hathor.p2p.manager import ConnectionsManager from hathor.p2p.peer import PrivatePeer from hathor.p2p.peer_discovery import DNSPeerDiscovery, PeerDiscovery from hathor.p2p.peer_discovery.dns import LookupResult +from hathor.p2p.peer_endpoint import PeerAddress, PeerEndpoint, Protocol from hathor.pubsub import PubSubManager from tests import unittest from tests.test_memory_reactor_clock import TestMemoryReactorClock @@ -19,9 +19,10 @@ def __init__(self, mocked_host_ports: list[tuple[str, int]]): self.mocked_host_ports = mocked_host_ports @override - async def discover_and_connect(self, connect_to: Callable[[Entrypoint], None]) -> None: + async def discover_and_connect(self, connect_to: Callable[[PeerEndpoint], None]) -> None: for host, port in self.mocked_host_ports: - connect_to(Entrypoint(Protocol.TCP, host, port)) + addr = PeerAddress(Protocol.TCP, host, port) + connect_to(addr.with_id()) class MockDNSPeerDiscovery(DNSPeerDiscovery): diff --git a/tests/p2p/test_connections.py b/tests/p2p/test_connections.py index 570424c84..b27897ca4 100644 --- a/tests/p2p/test_connections.py +++ b/tests/p2p/test_connections.py @@ -1,4 +1,5 @@ -from hathor.p2p.entrypoint import Entrypoint +from hathor.manager import HathorManager +from hathor.p2p.peer_endpoint import PeerEndpoint from tests import unittest from tests.utils import run_server @@ -14,9 +15,9 @@ def test_connections(self) -> None: process3.terminate() def test_manager_connections(self) -> None: - manager = self.create_peer('testnet', enable_sync_v1=True, enable_sync_v2=False) + manager: HathorManager = self.create_peer('testnet', enable_sync_v1=True, enable_sync_v2=False) - endpoint = Entrypoint.parse('tcp://127.0.0.1:8005') + endpoint = PeerEndpoint.parse('tcp://127.0.0.1:8005') manager.connections.connect_to(endpoint, use_ssl=True) self.assertIn(endpoint, manager.connections.iter_not_ready_endpoints()) diff --git a/tests/p2p/test_peer_id.py b/tests/p2p/test_peer_id.py index 1604e29c9..56dfaf79b 100644 --- a/tests/p2p/test_peer_id.py +++ b/tests/p2p/test_peer_id.py @@ -4,10 +4,11 @@ from typing import cast from unittest.mock import Mock +import pytest from twisted.internet.interfaces import ITransport -from hathor.p2p.entrypoint import Entrypoint from hathor.p2p.peer import InvalidPeerIdException, PrivatePeer, PublicPeer, UnverifiedPeer +from hathor.p2p.peer_endpoint import PeerAddress, PeerEndpoint from hathor.p2p.peer_id import PeerId from hathor.p2p.peer_storage import VerifiedPeerStorage from tests import unittest @@ -87,9 +88,9 @@ def test_merge_peer(self) -> None: self.assertEqual(peer.public_key, p1.public_key) self.assertEqual(peer.info.entrypoints, []) - ep1 = Entrypoint.parse('tcp://127.0.0.1:1001') - ep2 = Entrypoint.parse('tcp://127.0.0.1:1002') - ep3 = Entrypoint.parse('tcp://127.0.0.1:1003') + ep1 = PeerAddress.parse('tcp://127.0.0.1:1001') + ep2 = PeerAddress.parse('tcp://127.0.0.1:1002') + ep3 = PeerAddress.parse('tcp://127.0.0.1:1003') p3 = PrivatePeer.auto_generated().to_public_peer() p3.info.entrypoints.append(ep1) @@ -204,6 +205,59 @@ def test_retry_logic(self) -> None: peer.info.reset_retry_timestamp() self.assertTrue(peer.info.can_retry(0)) + def test_unverified_peer_to_json_roundtrip(self) -> None: + peer_id = PrivatePeer.auto_generated().id + addr1 = 'tcp://localhost:40403' + addr2 = 'tcp://192.168.0.1:40404' + addr3 = 'tcp://foo.bar:80' + + peer_json_simple = dict( + id=str(peer_id), + entrypoints=[addr1, addr2, addr3] + ) + result = UnverifiedPeer.create_from_json(peer_json_simple) + + assert result.id == peer_id + assert result.info.entrypoints == [ + PeerAddress.parse(addr1), + PeerAddress.parse(addr2), + PeerAddress.parse(addr3), + ] + assert result.to_json() == peer_json_simple + + # We support this for compatibility with old peers that may send ids in the URLs + peer_json_with_ids = dict( + id=str(peer_id), + entrypoints=[ + f'{addr1}/?id={peer_id}', + f'{addr2}/?id={peer_id}', + addr3, + ] + ) + result = UnverifiedPeer.create_from_json(peer_json_with_ids) + + assert result.id == peer_id + assert result.info.entrypoints == [ + PeerAddress.parse(addr1), + PeerAddress.parse(addr2), + PeerAddress.parse(addr3), + ] + assert result.to_json() == peer_json_simple # the roundtrip erases the ids from the URLs + + other_peer_id = PrivatePeer.auto_generated().id + peer_json_with_conflicting_ids = dict( + id=str(peer_id), + entrypoints=[ + f'{addr1}/?id={peer_id}', + f'{addr2}/?id={other_peer_id}', + addr3, + ] + ) + + with pytest.raises(ValueError) as e: + UnverifiedPeer.create_from_json(peer_json_with_conflicting_ids) + assert str(e.value) == f'conflicting peer_id: {other_peer_id} != {peer_id}' + class BasePeerIdTest(unittest.TestCase): __test__ = False @@ -211,25 +265,25 @@ class BasePeerIdTest(unittest.TestCase): async def test_validate_entrypoint(self) -> None: manager = self.create_peer('testnet', unlock_wallet=False) peer = manager.my_peer - peer.info.entrypoints = [Entrypoint.parse('tcp://127.0.0.1:40403')] + peer.info.entrypoints = [PeerAddress.parse('tcp://127.0.0.1:40403')] # we consider that we are starting the connection to the peer protocol = manager.connections.client_factory.buildProtocol('127.0.0.1') - protocol.entrypoint = Entrypoint.parse('tcp://127.0.0.1:40403') + protocol.entrypoint = PeerEndpoint.parse('tcp://127.0.0.1:40403') result = await peer.info.validate_entrypoint(protocol) self.assertTrue(result) # if entrypoint is an URI - peer.info.entrypoints = [Entrypoint.parse('tcp://uri_name:40403')] + peer.info.entrypoints = [PeerAddress.parse('tcp://uri_name:40403')] result = await peer.info.validate_entrypoint(protocol) self.assertTrue(result) # test invalid. DNS in test mode will resolve to '127.0.0.1:40403' - protocol.entrypoint = Entrypoint.parse('tcp://45.45.45.45:40403') + protocol.entrypoint = PeerEndpoint.parse('tcp://45.45.45.45:40403') result = await peer.info.validate_entrypoint(protocol) self.assertFalse(result) # now test when receiving the connection - i.e. the peer starts it protocol.entrypoint = None - peer.info.entrypoints = [Entrypoint.parse('tcp://127.0.0.1:40403')] + peer.info.entrypoints = [PeerAddress.parse('tcp://127.0.0.1:40403')] from collections import namedtuple DummyPeer = namedtuple('DummyPeer', 'host') @@ -241,7 +295,7 @@ def getPeer(self) -> DummyPeer: result = await peer.info.validate_entrypoint(protocol) self.assertTrue(result) # if entrypoint is an URI - peer.info.entrypoints = [Entrypoint.parse('tcp://uri_name:40403')] + peer.info.entrypoints = [PeerAddress.parse('tcp://uri_name:40403')] result = await peer.info.validate_entrypoint(protocol) self.assertTrue(result) diff --git a/tests/p2p/test_protocol.py b/tests/p2p/test_protocol.py index 34ec291d3..841a45929 100644 --- a/tests/p2p/test_protocol.py +++ b/tests/p2p/test_protocol.py @@ -1,12 +1,16 @@ -from json import JSONDecodeError +import json from typing import Optional from unittest.mock import Mock, patch +from twisted.internet import defer from twisted.internet.protocol import Protocol from twisted.python.failure import Failure -from hathor.p2p.entrypoint import Entrypoint +from hathor.manager import HathorManager +from hathor.p2p.manager import ConnectionsManager +from hathor.p2p.messages import ProtocolMessages from hathor.p2p.peer import PrivatePeer +from hathor.p2p.peer_endpoint import PeerAddress from hathor.p2p.protocol import HathorLineReceiver, HathorProtocol from hathor.simulator import FakeConnection from hathor.util import json_dumps, json_loadb @@ -72,7 +76,7 @@ def test_on_connect(self) -> None: def test_peer_with_entrypoint(self) -> None: entrypoint_str = 'tcp://192.168.1.1:54321' - entrypoint = Entrypoint.parse(entrypoint_str) + entrypoint = PeerAddress.parse(entrypoint_str) self.peer1.info.entrypoints.append(entrypoint) self.peer2.info.entrypoints.append(entrypoint) self.conn.run_one_step() # HELLO @@ -144,8 +148,10 @@ def test_invalid_payload(self) -> None: self.conn.run_one_step() # HELLO self.conn.run_one_step() # PEER-ID self.conn.run_one_step() # READY - with self.assertRaises(JSONDecodeError): - self._send_cmd(self.conn.proto1, 'PEERS', 'abc') + self.conn.tr1.clear() + self._send_cmd(self.conn.proto1, 'PEERS', 'abc') + assert self.conn.peek_tr1_value() == b'ERROR Error processing "PEERS" command\r\n' + self.assertTrue(self.conn.tr1.disconnecting) def test_invalid_hello1(self) -> None: self.conn.tr1.clear() @@ -263,6 +269,68 @@ def test_invalid_same_peer_id2(self) -> None: # connection is still up self.assertIsConnected(conn_alive) + def test_invalid_peer_id1(self) -> None: + """Test no payload""" + self.conn.run_one_step() + self.conn.tr1.clear() + self._send_cmd(self.conn.proto1, 'PEER-ID') + assert self.conn.peek_tr1_value() == b'ERROR Error processing "PEER-ID" command\r\n' + self.assertTrue(self.conn.tr1.disconnecting) + + def test_invalid_peer_id2(self) -> None: + """Test invalid json payload""" + self.conn.run_one_step() + self.conn.tr1.clear() + self._send_cmd(self.conn.proto1, 'PEER-ID', 'invalid_payload') + assert self.conn.peek_tr1_value() == b'ERROR Error processing "PEER-ID" command\r\n' + self.assertTrue(self.conn.tr1.disconnecting) + + def test_invalid_peer_id3(self) -> None: + """Test empty payload""" + self.conn.run_one_step() + self.conn.tr1.clear() + self._send_cmd(self.conn.proto1, 'PEER-ID', '{}') + assert self.conn.peek_tr1_value() == b'ERROR Error processing "PEER-ID" command\r\n' + self.assertTrue(self.conn.tr1.disconnecting) + + def test_invalid_peer_id4(self) -> None: + """Test payload with missing property""" + self.conn.run_one_step() + self.conn.tr1.clear() + data = self.conn.proto2.state._get_peer_id_data() + del data['pubKey'] + self._send_cmd( + self.conn.proto1, + 'PEER-ID', + json.dumps(data) + ) + assert self.conn.peek_tr1_value() == b'ERROR Error processing "PEER-ID" command\r\n' + self.assertTrue(self.conn.tr1.disconnecting) + self.assertTrue(self.conn.tr1.disconnecting) + + def test_invalid_peer_id5(self) -> None: + """Test payload with peer id not matching public key""" + self.conn.run_one_step() + self.conn.tr1.clear() + data = self.conn.proto2.state._get_peer_id_data() + new_peer = PrivatePeer.auto_generated() + data['id'] = str(new_peer.id) + self._send_cmd( + self.conn.proto1, + 'PEER-ID', + json.dumps(data) + ) + assert self.conn.peek_tr1_value() == b'ERROR Error processing "PEER-ID" command\r\n' + self.assertTrue(self.conn.tr1.disconnecting) + + def test_valid_peer_id(self) -> None: + self.conn.run_one_step() + self.conn.run_one_step() + self._check_result_only_cmd(self.conn.peek_tr1_value(), b'READY') + self._check_result_only_cmd(self.conn.peek_tr2_value(), b'READY') + self.assertFalse(self.conn.tr1.disconnecting) + self.assertFalse(self.conn.tr2.disconnecting) + def test_invalid_different_network(self) -> None: manager3 = self.create_peer(network='mainnet') conn = FakeConnection(self.manager1, manager3) @@ -314,6 +382,129 @@ def test_idle_connection(self) -> None: self.clock.advance(15) self.assertIsNotConnected(self.conn) + def test_invalid_expected_peer_id(self) -> None: + p2p_manager: ConnectionsManager = self.manager2.connections + + # Initially, manager1 and manager2 are handshaking, from the setup + assert p2p_manager.connecting_peers == {} + assert p2p_manager.handshaking_peers == {self.conn.proto2} + assert p2p_manager.connected_peers == {} + + # We change our peer id (on manager1) + new_peer = PrivatePeer.auto_generated() + self.conn.proto1.my_peer = new_peer + self.conn.tr2._peer = new_peer + + # We advance the states and fail in the PEER-ID step (on manager2) + self._check_result_only_cmd(self.conn.peek_tr2_value(), b'HELLO') + self.conn.run_one_step() + self._check_result_only_cmd(self.conn.peek_tr2_value(), b'PEER-ID') + self.conn.run_one_step() + assert self.conn.peek_tr2_value() == b'ERROR Peer id different from the requested one.\r\n' + + def test_invalid_expected_peer_id_bootstrap(self) -> None: + p2p_manager: ConnectionsManager = self.manager1.connections + + # Initially, manager1 and manager2 are handshaking, from the setup + assert p2p_manager.connecting_peers == {} + assert p2p_manager.handshaking_peers == {self.conn.proto1} + assert p2p_manager.connected_peers == {} + + # We create a new manager3, and use it as a bootstrap in manager1 + peer3 = PrivatePeer.auto_generated() + manager3: HathorManager = self.create_peer(self.network, peer3) + conn = FakeConnection(manager1=manager3, manager2=self.manager1, fake_bootstrap_id=peer3.id) + + # Now manager1 and manager3 are handshaking + assert p2p_manager.connecting_peers == {} + assert p2p_manager.handshaking_peers == {self.conn.proto1, conn.proto2} + assert p2p_manager.connected_peers == {} + + # We change our peer id (on manager3) + new_peer = PrivatePeer.auto_generated() + conn.proto1.my_peer = new_peer + conn.tr2._peer = new_peer + + # We advance the states and fail in the PEER-ID step (on manager1) + self._check_result_only_cmd(conn.peek_tr2_value(), b'HELLO') + conn.run_one_step() + self._check_result_only_cmd(conn.peek_tr2_value(), b'PEER-ID') + conn.run_one_step() + assert conn.peek_tr2_value() == b'ERROR Peer id different from the requested one.\r\n' + + def test_valid_unset_peer_id_bootstrap(self) -> None: + p2p_manager: ConnectionsManager = self.manager1.connections + + # Initially, manager1 and manager2 are handshaking, from the setup + assert p2p_manager.connecting_peers == {} + assert p2p_manager.handshaking_peers == {self.conn.proto1} + assert p2p_manager.connected_peers == {} + + # We create a new manager3, and use it as a bootstrap in manager1, but without the peer_id + manager3: HathorManager = self.create_peer(self.network) + conn = FakeConnection(manager1=manager3, manager2=self.manager1, fake_bootstrap_id=None) + + # Now manager1 and manager3 are handshaking + assert p2p_manager.connecting_peers == {} + assert p2p_manager.handshaking_peers == {self.conn.proto1, conn.proto2} + assert p2p_manager.connected_peers == {} + + # We change our peer id (on manager3) + new_peer = PrivatePeer.auto_generated() + conn.proto1.my_peer = new_peer + conn.tr2._peer = new_peer + + # We advance the states and in this case succeed (on manager1), because + # even though the peer_id was changed, it wasn't initially set. + self._check_result_only_cmd(conn.peek_tr2_value(), b'HELLO') + conn.run_one_step() + self._check_result_only_cmd(conn.peek_tr2_value(), b'PEER-ID') + conn.run_one_step() + self._check_result_only_cmd(conn.peek_tr2_value(), b'READY') + + def test_exception_on_synchronous_cmd_handler(self) -> None: + self.conn.run_one_step() + self.conn.run_one_step() + + def error() -> None: + raise Exception('some error') + + self.conn.proto1.state.cmd_map = { + ProtocolMessages.READY: error + } + + self.conn.run_one_step() + assert self.conn.peek_tr1_value() == b'ERROR Error processing "READY" command\r\n' + self.assertTrue(self.conn.tr1.disconnecting) + + def test_exception_on_deferred_cmd_handler(self) -> None: + self.conn.run_one_step() + self.conn.run_one_step() + + self.conn.proto1.state.cmd_map = { + ProtocolMessages.READY: lambda: defer.fail(Exception('some error')), + } + + self.conn.run_one_step() + assert self.conn.peek_tr1_value() == b'ERROR Error processing "READY" command\r\n' + self.assertTrue(self.conn.tr1.disconnecting) + + def test_exception_on_asynchronous_cmd_handler(self) -> None: + self.conn.run_one_step() + self.conn.run_one_step() + + async def error() -> None: + raise Exception('some error') + + self.conn.proto1.state.cmd_map = { + ProtocolMessages.READY: error + } + + self.conn.run_one_step() + self.clock.advance(1) + assert self.conn.peek_tr1_value() == b'ERROR Error processing "READY" command\r\n' + self.assertTrue(self.conn.tr1.disconnecting) + class SyncV1HathorProtocolTestCase(unittest.SyncV1Params, BaseHathorProtocolTestCase): __test__ = True diff --git a/tests/resources/p2p/test_add_peer.py b/tests/resources/p2p/test_add_peer.py index ca9ca99a2..f70f3aefe 100644 --- a/tests/resources/p2p/test_add_peer.py +++ b/tests/resources/p2p/test_add_peer.py @@ -1,7 +1,7 @@ from twisted.internet.defer import inlineCallbacks -from hathor.p2p.entrypoint import Entrypoint from hathor.p2p.peer import PrivatePeer +from hathor.p2p.peer_endpoint import PeerAddress from hathor.p2p.resources import AddPeersResource from tests import unittest from tests.resources.base_resource import StubSite, _BaseResourceTest @@ -22,7 +22,7 @@ def test_connecting_peers(self): # test when we send a peer we're already connected to peer = PrivatePeer.auto_generated() - peer.entrypoints = [Entrypoint.parse('tcp://localhost:8006')] + peer.entrypoints = [PeerAddress.parse('tcp://localhost:8006')] self.manager.connections.verified_peer_storage.add(peer) response = yield self.web.post('p2p/peers', ['tcp://localhost:8006', 'tcp://localhost:8007']) data = response.json_value() diff --git a/tests/resources/p2p/test_status.py b/tests/resources/p2p/test_status.py index 68d409348..646ba6903 100644 --- a/tests/resources/p2p/test_status.py +++ b/tests/resources/p2p/test_status.py @@ -1,9 +1,10 @@ from twisted.internet import endpoints +from twisted.internet.address import IPv4Address from twisted.internet.defer import inlineCallbacks import hathor from hathor.conf.unittests import SETTINGS -from hathor.p2p.entrypoint import Entrypoint +from hathor.p2p.peer_endpoint import PeerAddress from hathor.p2p.resources import StatusResource from hathor.simulator import FakeConnection from tests import unittest @@ -16,14 +17,15 @@ class BaseStatusTest(_BaseResourceTest._ResourceTest): def setUp(self): super().setUp() self.web = StubSite(StatusResource(self.manager)) - self.entrypoint = Entrypoint.parse('tcp://192.168.1.1:54321') - self.manager.connections.my_peer.info.entrypoints.append(self.entrypoint) + address1 = IPv4Address('TCP', '192.168.1.1', 54321) + self.manager.connections.my_peer.info.entrypoints.append(PeerAddress.from_address(address1)) self.manager.peers_whitelist.append(self.get_random_peer_from_pool().id) self.manager.peers_whitelist.append(self.get_random_peer_from_pool().id) self.manager2 = self.create_peer('testnet') - self.manager2.connections.my_peer.info.entrypoints.append(self.entrypoint) - self.conn1 = FakeConnection(self.manager, self.manager2) + address2 = IPv4Address('TCP', '192.168.1.1', 54322) + self.manager2.connections.my_peer.info.entrypoints.append(PeerAddress.from_address(address2)) + self.conn1 = FakeConnection(self.manager, self.manager2, addr1=address1, addr2=address2) @inlineCallbacks def test_get(self): From bfeacb5fd9cca82f1e675b3ad25b982ec36d1e44 Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Thu, 7 Nov 2024 19:19:39 -0300 Subject: [PATCH 45/61] feat(feature-activation): implement closest_block metadata and Feature Activation for Transactions (#933) --- .../bit_signaling_service.py | 2 +- hathor/feature_activation/feature_service.py | 34 +++- .../feature_activation/resources/feature.py | 4 +- hathor/transaction/static_metadata.py | 61 ++++++- .../migrations/add_closest_ancestor_block.py | 37 ++++ .../merge_mined_block_verifier.py | 2 +- hathor/vertex_handler/vertex_handler.py | 16 +- .../test_bit_signaling_service.py | 8 +- .../test_feature_service.py | 4 +- .../test_feature_simulation.py | 161 ++++++++++++++++-- tests/resources/transaction/test_tx.py | 16 +- tests/tx/test_cache_storage.py | 2 +- tests/tx/test_static_metadata.py | 86 ++++++++++ tests/tx/test_tx.py | 6 +- 14 files changed, 383 insertions(+), 56 deletions(-) create mode 100644 hathor/transaction/storage/migrations/add_closest_ancestor_block.py create mode 100644 tests/tx/test_static_metadata.py diff --git a/hathor/feature_activation/bit_signaling_service.py b/hathor/feature_activation/bit_signaling_service.py index 3d21c32e5..427eae57b 100644 --- a/hathor/feature_activation/bit_signaling_service.py +++ b/hathor/feature_activation/bit_signaling_service.py @@ -163,7 +163,7 @@ def _log_signal_bits(self, feature: Feature, enable_bit: bool, support: bool, no def _get_signaling_features(self, block: Block) -> dict[Feature, Criteria]: """Given a specific block, return all features that are in a signaling state for that block.""" - feature_infos = self._feature_service.get_feature_infos(block=block) + feature_infos = self._feature_service.get_feature_infos(vertex=block) signaling_features = { feature: feature_info.criteria for feature, feature_info in feature_infos.items() diff --git a/hathor/feature_activation/feature_service.py b/hathor/feature_activation/feature_service.py index 2b8212cef..7649589c8 100644 --- a/hathor/feature_activation/feature_service.py +++ b/hathor/feature_activation/feature_service.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + from dataclasses import dataclass from typing import TYPE_CHECKING, Optional, TypeAlias @@ -22,7 +24,7 @@ if TYPE_CHECKING: from hathor.feature_activation.bit_signaling_service import BitSignalingService - from hathor.transaction import Block + from hathor.transaction import Block, Vertex from hathor.transaction.storage import TransactionStorage @@ -49,11 +51,20 @@ def __init__(self, *, settings: HathorSettings, tx_storage: 'TransactionStorage' self._tx_storage = tx_storage self.bit_signaling_service: Optional['BitSignalingService'] = None - def is_feature_active(self, *, block: 'Block', feature: Feature) -> bool: - """Returns whether a Feature is active at a certain block.""" + def is_feature_active(self, *, vertex: Vertex, feature: Feature) -> bool: + """Return whether a Feature is active for a certain vertex.""" + block = self._get_feature_activation_block(vertex) state = self.get_state(block=block, feature=feature) + return state.is_active() - return state == FeatureState.ACTIVE + def _get_feature_activation_block(self, vertex: Vertex) -> Block: + """Return the block used for feature activation depending on the vertex type.""" + from hathor.transaction import Block, Transaction + if isinstance(vertex, Block): + return vertex + if isinstance(vertex, Transaction): + return self._tx_storage.get_block(vertex.static_metadata.closest_ancestor_block) + raise NotImplementedError def is_signaling_mandatory_features(self, block: 'Block') -> BlockSignalingState: """ @@ -64,7 +75,7 @@ def is_signaling_mandatory_features(self, block: 'Block') -> BlockSignalingState height = block.static_metadata.height offset_to_boundary = height % self._feature_settings.evaluation_interval remaining_blocks = self._feature_settings.evaluation_interval - offset_to_boundary - 1 - feature_infos = self.get_feature_infos(block=block) + feature_infos = self.get_feature_infos(vertex=block) must_signal_features = ( feature for feature, feature_info in feature_infos.items() @@ -194,8 +205,9 @@ def _calculate_new_state( raise NotImplementedError(f'Unknown previous state: {previous_state}') - def get_feature_infos(self, *, block: 'Block') -> dict[Feature, FeatureInfo]: - """Returns the criteria definition and feature state for all features at a certain block.""" + def get_feature_infos(self, *, vertex: Vertex) -> dict[Feature, FeatureInfo]: + """Return the criteria definition and feature state for all features for a certain vertex.""" + block = self._get_feature_activation_block(vertex) return { feature: FeatureInfo( criteria=criteria, @@ -204,6 +216,14 @@ def get_feature_infos(self, *, block: 'Block') -> dict[Feature, FeatureInfo]: for feature, criteria in self._feature_settings.features.items() } + def get_feature_states(self, *, vertex: Vertex) -> dict[Feature, FeatureState]: + """Return the feature state for all features for a certain vertex.""" + feature_infos = self.get_feature_infos(vertex=vertex) + return { + feature: info.state + for feature, info in feature_infos.items() + } + def _get_ancestor_at_height(self, *, block: 'Block', ancestor_height: int) -> 'Block': """ Given a block, return its ancestor at a specific height. diff --git a/hathor/feature_activation/resources/feature.py b/hathor/feature_activation/resources/feature.py index 75e7c16bf..bb65e7d67 100644 --- a/hathor/feature_activation/resources/feature.py +++ b/hathor/feature_activation/resources/feature.py @@ -68,7 +68,7 @@ def get_block_features(self, request: Request) -> bytes: return error.json_dumpb() signal_bits = [] - feature_infos = self._feature_service.get_feature_infos(block=block) + feature_infos = self._feature_service.get_feature_infos(vertex=block) for feature, feature_info in feature_infos.items(): if feature_info.state not in FeatureState.get_signaling_states(): @@ -90,7 +90,7 @@ def get_block_features(self, request: Request) -> bytes: def get_features(self) -> bytes: best_block = self.tx_storage.get_best_block() bit_counts = best_block.static_metadata.feature_activation_bit_counts - feature_infos = self._feature_service.get_feature_infos(block=best_block) + feature_infos = self._feature_service.get_feature_infos(vertex=best_block) features = [] for feature, feature_info in feature_infos.items(): diff --git a/hathor/transaction/static_metadata.py b/hathor/transaction/static_metadata.py index 09cdf98dd..03855479a 100644 --- a/hathor/transaction/static_metadata.py +++ b/hathor/transaction/static_metadata.py @@ -19,7 +19,7 @@ from operator import add from typing import TYPE_CHECKING, Callable -from typing_extensions import Self +from typing_extensions import Self, override from hathor.feature_activation.feature import Feature from hathor.feature_activation.model.feature_state import FeatureState @@ -57,6 +57,7 @@ def from_bytes(cls, data: bytes, *, target: 'BaseTransaction') -> 'VertexStaticM return BlockStaticMetadata(**json_dict) if isinstance(target, Transaction): + json_dict['closest_ancestor_block'] = bytes.fromhex(json_dict['closest_ancestor_block']) return TransactionStaticMetadata(**json_dict) raise NotImplementedError @@ -175,6 +176,10 @@ def _get_previous_feature_activation_bit_counts( class TransactionStaticMetadata(VertexStaticMetadata): + # The Block with the greatest height that is a direct or indirect dependency (ancestor) of the transaction, + # including both funds and verification DAGs. It's used by Feature Activation for Transactions. + closest_ancestor_block: VertexId + @classmethod def create_from_storage(cls, tx: 'Transaction', settings: HathorSettings, storage: 'TransactionStorage') -> Self: """Create a `TransactionStaticMetadata` using dependencies provided by a storage.""" @@ -189,14 +194,12 @@ def create( ) -> Self: """Create a `TransactionStaticMetadata` using dependencies provided by a `vertex_getter`. This must be fast, ideally O(1).""" - min_height = cls._calculate_min_height( - tx, - settings, - vertex_getter=vertex_getter, - ) + min_height = cls._calculate_min_height(tx, settings, vertex_getter) + closest_ancestor_block = cls._calculate_closest_ancestor_block(tx, settings, vertex_getter) return cls( - min_height=min_height + min_height=min_height, + closest_ancestor_block=closest_ancestor_block, ) @classmethod @@ -245,3 +248,47 @@ def _calculate_my_min_height( if isinstance(spent_tx, Block): min_height = max(min_height, spent_tx.static_metadata.height + settings.REWARD_SPEND_MIN_BLOCKS + 1) return min_height + + @staticmethod + def _calculate_closest_ancestor_block( + tx: 'Transaction', + settings: HathorSettings, + vertex_getter: Callable[[VertexId], 'BaseTransaction'], + ) -> VertexId: + """ + Calculate the tx's closest_ancestor_block. It's calculated by propagating the metadata forward in the DAG. + """ + from hathor.transaction import Block, Transaction + if tx.is_genesis: + return settings.GENESIS_BLOCK_HASH + + closest_ancestor_block: Block | None = None + + for vertex_id in tx.get_all_dependencies(): + vertex = vertex_getter(vertex_id) + candidate_block: Block + + if isinstance(vertex, Block): + candidate_block = vertex + elif isinstance(vertex, Transaction): + vertex_candidate = vertex_getter(vertex.static_metadata.closest_ancestor_block) + assert isinstance(vertex_candidate, Block) + candidate_block = vertex_candidate + else: + raise NotImplementedError + + if ( + not closest_ancestor_block + or candidate_block.static_metadata.height > closest_ancestor_block.static_metadata.height + ): + closest_ancestor_block = candidate_block + + assert closest_ancestor_block is not None + return closest_ancestor_block.hash + + @override + def json_dumpb(self) -> bytes: + from hathor.util import json_dumpb + json_dict = self.dict() + json_dict['closest_ancestor_block'] = json_dict['closest_ancestor_block'].hex() + return json_dumpb(json_dict) diff --git a/hathor/transaction/storage/migrations/add_closest_ancestor_block.py b/hathor/transaction/storage/migrations/add_closest_ancestor_block.py new file mode 100644 index 000000000..9ac3c5e8e --- /dev/null +++ b/hathor/transaction/storage/migrations/add_closest_ancestor_block.py @@ -0,0 +1,37 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import TYPE_CHECKING + +from structlog import get_logger + +from hathor.transaction.storage.migrations import BaseMigration + +if TYPE_CHECKING: + from hathor.transaction.storage import TransactionStorage + +logger = get_logger() + + +class Migration(BaseMigration): + def skip_empty_db(self) -> bool: + return True + + def get_db_name(self) -> str: + return 'add_closest_ancestor_block' + + def run(self, storage: 'TransactionStorage') -> None: + raise Exception('Cannot migrate your database due to an incompatible change in the metadata. ' + 'Please, delete your data folder and use the latest available snapshot or sync ' + 'from beginning.') diff --git a/hathor/verification/merge_mined_block_verifier.py b/hathor/verification/merge_mined_block_verifier.py index 60bfb42da..307604104 100644 --- a/hathor/verification/merge_mined_block_verifier.py +++ b/hathor/verification/merge_mined_block_verifier.py @@ -31,7 +31,7 @@ def verify_aux_pow(self, block: MergeMinedBlock) -> None: assert block.aux_pow is not None is_feature_active = self._feature_service.is_feature_active( - block=block, + vertex=block, feature=Feature.INCREASE_MAX_MERKLE_PATH_LENGTH ) max_merkle_path_length = ( diff --git a/hathor/vertex_handler/vertex_handler.py b/hathor/vertex_handler/vertex_handler.py index 59650e83e..f3e824bf1 100644 --- a/hathor/vertex_handler/vertex_handler.py +++ b/hathor/vertex_handler/vertex_handler.py @@ -208,24 +208,22 @@ def _log_new_object(self, tx: BaseTransaction, message_fmt: str, *, quiet: bool) """ metadata = tx.get_metadata() now = datetime.datetime.fromtimestamp(self._reactor.seconds()) + feature_states = self._feature_service.get_feature_states(vertex=tx) kwargs = { 'tx': tx, 'ts_date': datetime.datetime.fromtimestamp(tx.timestamp), 'time_from_now': tx.get_time_from_now(now), 'validation': metadata.validation.name, + 'feature_states': { + feature.value: state.value + for feature, state in feature_states.items() + } } if self._log_vertex_bytes: kwargs['bytes'] = bytes(tx).hex() - if tx.is_block: + if isinstance(tx, Block): message = message_fmt.format('block') - if isinstance(tx, Block): - feature_infos = self._feature_service.get_feature_infos(block=tx) - feature_states = { - feature.value: info.state.value - for feature, info in feature_infos.items() - } - kwargs['_height'] = tx.get_height() - kwargs['feature_states'] = feature_states + kwargs['_height'] = tx.get_height() else: message = message_fmt.format('tx') if not quiet: diff --git a/tests/feature_activation/test_bit_signaling_service.py b/tests/feature_activation/test_bit_signaling_service.py index 8b487be92..5f41ff01a 100644 --- a/tests/feature_activation/test_bit_signaling_service.py +++ b/tests/feature_activation/test_bit_signaling_service.py @@ -24,7 +24,7 @@ from hathor.feature_activation.model.feature_info import FeatureInfo from hathor.feature_activation.model.feature_state import FeatureState from hathor.feature_activation.settings import Settings as FeatureSettings -from hathor.transaction import Block +from hathor.transaction import Block, Vertex from hathor.transaction.storage import TransactionStorage @@ -169,7 +169,7 @@ def _test_generate_signal_bits( settings = Mock(spec_set=HathorSettings) settings.FEATURE_ACTIVATION = FeatureSettings() feature_service = Mock(spec_set=FeatureService) - feature_service.get_feature_infos = lambda block: feature_infos + feature_service.get_feature_infos = lambda vertex: feature_infos service = BitSignalingService( settings=settings, @@ -264,8 +264,8 @@ def test_non_signaling_features_warning( tx_storage = Mock(spec_set=TransactionStorage) tx_storage.get_best_block = lambda: best_block - def get_feature_infos_mock(block: Block) -> dict[Feature, FeatureInfo]: - if block == best_block: + def get_feature_infos_mock(vertex: Vertex) -> dict[Feature, FeatureInfo]: + if vertex == best_block: return {} raise NotImplementedError diff --git a/tests/feature_activation/test_feature_service.py b/tests/feature_activation/test_feature_service.py index cb8546bb1..f042b4e45 100644 --- a/tests/feature_activation/test_feature_service.py +++ b/tests/feature_activation/test_feature_service.py @@ -447,7 +447,7 @@ def test_is_feature_active(block_height: int) -> None: service.bit_signaling_service = Mock() block = not_none(storage.get_block_by_height(block_height)) - result = service.is_feature_active(block=block, feature=Feature.NOP_FEATURE_1) + result = service.is_feature_active(vertex=block, feature=Feature.NOP_FEATURE_1) assert result is True @@ -505,7 +505,7 @@ def get_state(self: FeatureService, *, block: Block, feature: Feature) -> Featur return states[feature] with patch('hathor.feature_activation.feature_service.FeatureService.get_state', get_state): - result = service.get_feature_infos(block=Mock()) + result = service.get_feature_infos(vertex=Mock(spec_set=Block)) expected = { Feature.NOP_FEATURE_1: FeatureInfo(criteria_mock_1, FeatureState.STARTED), diff --git a/tests/feature_activation/test_feature_simulation.py b/tests/feature_activation/test_feature_simulation.py index 6bbeb9e35..30dd9c77e 100644 --- a/tests/feature_activation/test_feature_simulation.py +++ b/tests/feature_activation/test_feature_simulation.py @@ -22,10 +22,11 @@ from hathor.feature_activation.feature import Feature from hathor.feature_activation.feature_service import FeatureService from hathor.feature_activation.model.criteria import Criteria +from hathor.feature_activation.model.feature_state import FeatureState from hathor.feature_activation.resources.feature import FeatureResource from hathor.feature_activation.settings import Settings as FeatureSettings from hathor.simulator import FakeConnection -from hathor.simulator.utils import add_new_blocks +from hathor.simulator.utils import add_new_blocks, gen_new_tx from hathor.transaction.exceptions import BlockMustSignalError from hathor.util import not_none from tests import unittest @@ -75,11 +76,14 @@ def test_feature(self) -> None: } ) - settings = get_global_settings()._replace(FEATURE_ACTIVATION=feature_settings) + settings = get_global_settings()._replace(FEATURE_ACTIVATION=feature_settings, REWARD_SPEND_MIN_BLOCKS=0) + self.simulator.settings = settings builder = self.get_simulator_builder().set_settings(settings) artifacts = self.simulator.create_artifacts(builder) feature_service = artifacts.feature_service manager = artifacts.manager + assert manager.wallet is not None + address = manager.wallet.get_unused_address(mark_as_used=False) feature_resource = FeatureResource( settings=settings, @@ -95,9 +99,16 @@ def test_feature(self) -> None: patch.object(FeatureService, '_calculate_new_state', calculate_new_state_mock), patch.object(FeatureService, '_get_ancestor_iteratively', get_ancestor_iteratively_mock), ): + assert artifacts.bit_signaling_service.get_support_features() == [] + assert artifacts.bit_signaling_service.get_not_support_features() == [] + # at the beginning, the feature is DEFINED: - add_new_blocks(manager, 10) + [*_, last_block] = add_new_blocks(manager, 10) self.simulator.run(60) + tx = gen_new_tx(manager, address, 6400*10) + tx.weight = 25 + tx.update_hash() + assert manager.propagate_tx(tx, fails_silently=False) result = self._get_result(web_client) assert result == dict( block_height=10, @@ -121,9 +132,21 @@ def test_feature(self) -> None: assert get_ancestor_iteratively_mock.call_count == 0 calculate_new_state_mock.reset_mock() + expected_states = {Feature.NOP_FEATURE_1: FeatureState.DEFINED} + assert feature_service.get_feature_states(vertex=last_block) == expected_states + assert feature_service.get_feature_states(vertex=tx) == expected_states + assert tx.static_metadata.closest_ancestor_block == last_block.hash + + assert artifacts.bit_signaling_service.get_support_features() == [] + assert artifacts.bit_signaling_service.get_not_support_features() == [] + # at block 19, the feature is DEFINED, just before becoming STARTED: - add_new_blocks(manager, 9) + [*_, last_block] = add_new_blocks(manager, 9) self.simulator.run(60) + tx = gen_new_tx(manager, address, 6400*19) + tx.weight = 25 + tx.update_hash() + assert manager.propagate_tx(tx, fails_silently=False) result = self._get_result(web_client) assert result == dict( block_height=19, @@ -146,9 +169,21 @@ def test_feature(self) -> None: assert get_ancestor_iteratively_mock.call_count == 0 calculate_new_state_mock.reset_mock() + expected_states = {Feature.NOP_FEATURE_1: FeatureState.DEFINED} + assert feature_service.get_feature_states(vertex=last_block) == expected_states + assert feature_service.get_feature_states(vertex=tx) == expected_states + assert tx.static_metadata.closest_ancestor_block == last_block.hash + + assert artifacts.bit_signaling_service.get_support_features() == [] + assert artifacts.bit_signaling_service.get_not_support_features() == [] + # at block 20, the feature becomes STARTED: - add_new_blocks(manager, 1) + [*_, last_block] = add_new_blocks(manager, 1) self.simulator.run(60) + tx = gen_new_tx(manager, address, 6400*20) + tx.weight = 25 + tx.update_hash() + assert manager.propagate_tx(tx, fails_silently=False) result = self._get_result(web_client) assert result == dict( block_height=20, @@ -169,13 +204,25 @@ def test_feature(self) -> None: assert min(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 20 assert get_ancestor_iteratively_mock.call_count == 0 + expected_states = {Feature.NOP_FEATURE_1: FeatureState.STARTED} + assert feature_service.get_feature_states(vertex=last_block) == expected_states + assert feature_service.get_feature_states(vertex=tx) == expected_states + assert tx.static_metadata.closest_ancestor_block == last_block.hash + + assert artifacts.bit_signaling_service.get_support_features() == [] + assert artifacts.bit_signaling_service.get_not_support_features() == [Feature.NOP_FEATURE_1] + # we add one block before resetting the mock, just to make sure block 20 gets a chance to be saved add_new_blocks(manager, 1) calculate_new_state_mock.reset_mock() # at block 55, the feature is STARTED, just before becoming MUST_SIGNAL: - add_new_blocks(manager, 34) + [*_, last_block] = add_new_blocks(manager, 34) self.simulator.run(60) + tx = gen_new_tx(manager, address, 6400*55) + tx.weight = 30 + tx.update_hash() + assert manager.propagate_tx(tx, fails_silently=False) result = self._get_result(web_client) assert result == dict( block_height=55, @@ -197,9 +244,21 @@ def test_feature(self) -> None: assert get_ancestor_iteratively_mock.call_count == 0 calculate_new_state_mock.reset_mock() + expected_states = {Feature.NOP_FEATURE_1: FeatureState.STARTED} + assert feature_service.get_feature_states(vertex=last_block) == expected_states + assert feature_service.get_feature_states(vertex=tx) == expected_states + assert tx.static_metadata.closest_ancestor_block == last_block.hash + + assert artifacts.bit_signaling_service.get_support_features() == [] + assert artifacts.bit_signaling_service.get_not_support_features() == [Feature.NOP_FEATURE_1] + # at block 56, the feature becomes MUST_SIGNAL: - add_new_blocks(manager, 1) + [*_, last_block] = add_new_blocks(manager, 1) self.simulator.run(60) + tx = gen_new_tx(manager, address, 6400*56) + tx.weight = 30 + tx.update_hash() + assert manager.propagate_tx(tx, fails_silently=False) result = self._get_result(web_client) assert result == dict( block_height=56, @@ -220,6 +279,14 @@ def test_feature(self) -> None: assert min(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 56 assert get_ancestor_iteratively_mock.call_count == 0 + expected_states = {Feature.NOP_FEATURE_1: FeatureState.MUST_SIGNAL} + assert feature_service.get_feature_states(vertex=last_block) == expected_states + assert feature_service.get_feature_states(vertex=tx) == expected_states + assert tx.static_metadata.closest_ancestor_block == last_block.hash + + assert artifacts.bit_signaling_service.get_support_features() == [Feature.NOP_FEATURE_1] + assert artifacts.bit_signaling_service.get_not_support_features() == [] + # we add one block before resetting the mock, just to make sure block 56 gets a chance to be saved add_new_blocks(manager, 1, signal_bits=0b1) calculate_new_state_mock.reset_mock() @@ -236,8 +303,12 @@ def test_feature(self) -> None: assert not manager.propagate_tx(non_signaling_block) # at block 59, the feature is MUST_SIGNAL, just before becoming LOCKED_IN: - add_new_blocks(manager, num_blocks=2, signal_bits=0b1) + [*_, last_block] = add_new_blocks(manager, num_blocks=2, signal_bits=0b1) self.simulator.run(60) + tx = gen_new_tx(manager, address, 6400*59) + tx.weight = 30 + tx.update_hash() + assert manager.propagate_tx(tx, fails_silently=False) result = self._get_result(web_client) assert result == dict( block_height=59, @@ -260,9 +331,21 @@ def test_feature(self) -> None: assert get_ancestor_iteratively_mock.call_count == 0 calculate_new_state_mock.reset_mock() + expected_states = {Feature.NOP_FEATURE_1: FeatureState.MUST_SIGNAL} + assert feature_service.get_feature_states(vertex=last_block) == expected_states + assert feature_service.get_feature_states(vertex=tx) == expected_states + assert tx.static_metadata.closest_ancestor_block == last_block.hash + + assert artifacts.bit_signaling_service.get_support_features() == [Feature.NOP_FEATURE_1] + assert artifacts.bit_signaling_service.get_not_support_features() == [] + # at block 60, the feature becomes LOCKED_IN: - add_new_blocks(manager, 1) + [*_, last_block] = add_new_blocks(manager, 1) self.simulator.run(60) + tx = gen_new_tx(manager, address, 6400*60) + tx.weight = 30 + tx.update_hash() + assert manager.propagate_tx(tx, fails_silently=False) result = self._get_result(web_client) assert result == dict( block_height=60, @@ -283,13 +366,25 @@ def test_feature(self) -> None: assert min(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 60 assert get_ancestor_iteratively_mock.call_count == 0 + expected_states = {Feature.NOP_FEATURE_1: FeatureState.LOCKED_IN} + assert feature_service.get_feature_states(vertex=last_block) == expected_states + assert feature_service.get_feature_states(vertex=tx) == expected_states + assert tx.static_metadata.closest_ancestor_block == last_block.hash + + assert artifacts.bit_signaling_service.get_support_features() == [Feature.NOP_FEATURE_1] + assert artifacts.bit_signaling_service.get_not_support_features() == [] + # we add one block before resetting the mock, just to make sure block 60 gets a chance to be saved add_new_blocks(manager, 1) calculate_new_state_mock.reset_mock() # at block 71, the feature is LOCKED_IN, just before becoming ACTIVE: - add_new_blocks(manager, 10) + [*_, last_block] = add_new_blocks(manager, 10) self.simulator.run(60) + tx = gen_new_tx(manager, address, 6400*71) + tx.weight = 30 + tx.update_hash() + assert manager.propagate_tx(tx, fails_silently=False) result = self._get_result(web_client) assert result == dict( block_height=71, @@ -311,9 +406,21 @@ def test_feature(self) -> None: assert get_ancestor_iteratively_mock.call_count == 0 calculate_new_state_mock.reset_mock() + expected_states = {Feature.NOP_FEATURE_1: FeatureState.LOCKED_IN} + assert feature_service.get_feature_states(vertex=last_block) == expected_states + assert feature_service.get_feature_states(vertex=tx) == expected_states + assert tx.static_metadata.closest_ancestor_block == last_block.hash + + assert artifacts.bit_signaling_service.get_support_features() == [Feature.NOP_FEATURE_1] + assert artifacts.bit_signaling_service.get_not_support_features() == [] + # at block 72, the feature becomes ACTIVE, forever: - add_new_blocks(manager, 1) + [*_, last_block] = add_new_blocks(manager, 1) self.simulator.run(60) + tx = gen_new_tx(manager, address, 6400*72) + tx.weight = 30 + tx.update_hash() + assert manager.propagate_tx(tx, fails_silently=False) result = self._get_result(web_client) assert result == dict( block_height=72, @@ -335,6 +442,14 @@ def test_feature(self) -> None: assert get_ancestor_iteratively_mock.call_count == 0 calculate_new_state_mock.reset_mock() + expected_states = {Feature.NOP_FEATURE_1: FeatureState.ACTIVE} + assert feature_service.get_feature_states(vertex=last_block) == expected_states + assert feature_service.get_feature_states(vertex=tx) == expected_states + assert tx.static_metadata.closest_ancestor_block == last_block.hash + + assert artifacts.bit_signaling_service.get_support_features() == [] + assert artifacts.bit_signaling_service.get_not_support_features() == [] + def test_reorg(self) -> None: feature_settings = FeatureSettings( evaluation_interval=4, @@ -364,6 +479,9 @@ def test_reorg(self) -> None: ) web_client = StubSite(feature_resource) + assert artifacts.bit_signaling_service.get_support_features() == [] + assert artifacts.bit_signaling_service.get_not_support_features() == [] + # at the beginning, the feature is DEFINED: self.simulator.run(60) result = self._get_result(web_client) @@ -384,6 +502,9 @@ def test_reorg(self) -> None: ] ) + assert artifacts.bit_signaling_service.get_support_features() == [] + assert artifacts.bit_signaling_service.get_not_support_features() == [] + # at block 4, the feature becomes STARTED with 0% acceptance add_new_blocks(manager, 4) self.simulator.run(60) @@ -405,6 +526,9 @@ def test_reorg(self) -> None: ] ) + assert artifacts.bit_signaling_service.get_support_features() == [] + assert artifacts.bit_signaling_service.get_not_support_features() == [Feature.NOP_FEATURE_1] + # at block 7, acceptance is 25% (we're signaling 1 block out of 4) add_new_blocks(manager, 2) add_new_blocks(manager, 1, signal_bits=0b10) @@ -427,6 +551,9 @@ def test_reorg(self) -> None: ] ) + assert artifacts.bit_signaling_service.get_support_features() == [] + assert artifacts.bit_signaling_service.get_not_support_features() == [Feature.NOP_FEATURE_1] + # at block 11, acceptance is 75% (we're signaling 3 blocks out of 4), # so the feature will be locked-in in the next block add_new_blocks(manager, 1) @@ -450,6 +577,9 @@ def test_reorg(self) -> None: ] ) + assert artifacts.bit_signaling_service.get_support_features() == [] + assert artifacts.bit_signaling_service.get_not_support_features() == [Feature.NOP_FEATURE_1] + # at block 12, the feature is locked-in add_new_blocks(manager, 1) self.simulator.run(60) @@ -471,6 +601,9 @@ def test_reorg(self) -> None: ] ) + assert artifacts.bit_signaling_service.get_support_features() == [] + assert artifacts.bit_signaling_service.get_not_support_features() == [Feature.NOP_FEATURE_1] + # at block 16, the feature is activated add_new_blocks(manager, 4) self.simulator.run(60) @@ -492,6 +625,9 @@ def test_reorg(self) -> None: ] ) + assert artifacts.bit_signaling_service.get_support_features() == [] + assert artifacts.bit_signaling_service.get_not_support_features() == [] + # We then create a new manager with one more block (17 vs 16), so its blockchain wins when # both managers are connected. This causes a reorg and the feature goes back to the STARTED state. builder2 = self.get_simulator_builder().set_settings(settings) @@ -523,6 +659,9 @@ def test_reorg(self) -> None: ] ) + assert artifacts.bit_signaling_service.get_support_features() == [] + assert artifacts.bit_signaling_service.get_not_support_features() == [Feature.NOP_FEATURE_1] + class BaseMemoryStorageFeatureSimulationTest(BaseFeatureSimulationTest): def get_simulator_builder(self) -> Builder: diff --git a/tests/resources/transaction/test_tx.py b/tests/resources/transaction/test_tx.py index 01cac3f8c..52f776cf0 100644 --- a/tests/resources/transaction/test_tx.py +++ b/tests/resources/transaction/test_tx.py @@ -88,7 +88,7 @@ def test_get_one_known_tx(self): '0248b9e7d6a626f45dec86975b00f4dd53f84f1f0091125250b044e49023fbbd0f74f6093cdd2226fdff3e09a1000002be') tx = Transaction.create_from_struct(bytes.fromhex(tx_hex), self.manager.tx_storage) tx.get_metadata().validation = ValidationState.FULL - tx.set_static_metadata(TransactionStaticMetadata(min_height=0)) + tx.set_static_metadata(TransactionStaticMetadata(min_height=0, closest_ancestor_block=b'')) self.manager.tx_storage.save_transaction(tx) tx_parent1_hex = ('0001010102001c382847d8440d05da95420bee2ebeb32bc437f82a9ae47b0745c8a29a7b0d001c382847d844' @@ -101,7 +101,7 @@ def test_get_one_known_tx(self): '8fb080f53a0c9c57ddb000000120') tx_parent1 = Transaction.create_from_struct(bytes.fromhex(tx_parent1_hex), self.manager.tx_storage) tx_parent1.get_metadata().validation = ValidationState.FULL - tx_parent1.set_static_metadata(TransactionStaticMetadata(min_height=0)) + tx_parent1.set_static_metadata(TransactionStaticMetadata(min_height=0, closest_ancestor_block=b'')) self.manager.tx_storage.save_transaction(tx_parent1) tx_parent2_hex = ('0001000103001f16fe62e3433bcc74b262c11a1fa94fcb38484f4d8fb080f53a0c9c57ddb001006946304402' @@ -114,7 +114,7 @@ def test_get_one_known_tx(self): 'd57709926b76e64763bf19c3f13eeac30000016d') tx_parent2 = Transaction.create_from_struct(bytes.fromhex(tx_parent2_hex), self.manager.tx_storage) tx_parent2.get_metadata().validation = ValidationState.FULL - tx_parent2.set_static_metadata(TransactionStaticMetadata(min_height=0)) + tx_parent2.set_static_metadata(TransactionStaticMetadata(min_height=0, closest_ancestor_block=b'')) self.manager.tx_storage.save_transaction(tx_parent2) tx_input_hex = ('0001010203007231eee3cb6160d95172a409d634d0866eafc8775f5729fff6a61e7850aba500b3ab76c5337b55' @@ -130,7 +130,7 @@ def test_get_one_known_tx(self): 'cfaf6e7ceb2ba91c9c84009c8174d4a46ebcc789d1989e3dec5b68cffeef239fd8cf86ef62728e2eacee000001b6') tx_input = Transaction.create_from_struct(bytes.fromhex(tx_input_hex), self.manager.tx_storage) tx_input.get_metadata().validation = ValidationState.FULL - tx_input.set_static_metadata(TransactionStaticMetadata(min_height=0)) + tx_input.set_static_metadata(TransactionStaticMetadata(min_height=0, closest_ancestor_block=b'')) self.manager.tx_storage.save_transaction(tx_input) # XXX: this is completely dependant on MemoryTokensIndex implementation, hence use_memory_storage=True @@ -198,7 +198,7 @@ def test_get_one_known_tx_with_authority(self): '5114256caacfb8f6dd13db33000020393') tx = Transaction.create_from_struct(bytes.fromhex(tx_hex), self.manager.tx_storage) tx.get_metadata().validation = ValidationState.FULL - tx.set_static_metadata(TransactionStaticMetadata(min_height=0)) + tx.set_static_metadata(TransactionStaticMetadata(min_height=0, closest_ancestor_block=b'')) self.manager.tx_storage.save_transaction(tx) tx_parent1_hex = ('0001010203000023b318c91dcfd4b967b205dc938f9f5e2fd5114256caacfb8f6dd13db330000023b318c91dcfd' @@ -214,7 +214,7 @@ def test_get_one_known_tx_with_authority(self): 'd13db3300038c3d3b69ce90bb88c0c4d6a87b9f0c349e5b10c9b7ce6714f996e512ac16400021261') tx_parent1 = Transaction.create_from_struct(bytes.fromhex(tx_parent1_hex), self.manager.tx_storage) tx_parent1.get_metadata().validation = ValidationState.FULL - tx_parent1.set_static_metadata(TransactionStaticMetadata(min_height=0)) + tx_parent1.set_static_metadata(TransactionStaticMetadata(min_height=0, closest_ancestor_block=b'')) self.manager.tx_storage.save_transaction(tx_parent1) tx_parent2_hex = ('000201040000476810205cb3625d62897fcdad620e01d66649869329640f5504d77e960d01006a473045022100c' @@ -229,7 +229,7 @@ def test_get_one_known_tx_with_authority(self): tx_parent2_bytes = bytes.fromhex(tx_parent2_hex) tx_parent2 = TokenCreationTransaction.create_from_struct(tx_parent2_bytes, self.manager.tx_storage) tx_parent2.get_metadata().validation = ValidationState.FULL - tx_parent2.set_static_metadata(TransactionStaticMetadata(min_height=0)) + tx_parent2.set_static_metadata(TransactionStaticMetadata(min_height=0, closest_ancestor_block=b'')) self.manager.tx_storage.save_transaction(tx_parent2) # Both inputs are the same as the last parent, so no need to manually add them @@ -522,7 +522,7 @@ def test_partially_validated_not_found(self): '0248b9e7d6a626f45dec86975b00f4dd53f84f1f0091125250b044e49023fbbd0f74f6093cdd2226fdff3e09a1000002be') tx = Transaction.create_from_struct(bytes.fromhex(tx_hex), self.manager.tx_storage) tx.set_validation(ValidationState.BASIC) - tx.set_static_metadata(TransactionStaticMetadata(min_height=0)) + tx.set_static_metadata(TransactionStaticMetadata(min_height=0, closest_ancestor_block=b'')) with self.manager.tx_storage.allow_partially_validated_context(): self.manager.tx_storage.save_transaction(tx) diff --git a/tests/tx/test_cache_storage.py b/tests/tx/test_cache_storage.py index bf6e9670a..f6457cd01 100644 --- a/tests/tx/test_cache_storage.py +++ b/tests/tx/test_cache_storage.py @@ -34,7 +34,7 @@ def tearDown(self): def _get_new_tx(self, nonce): from hathor.transaction.validation_state import ValidationState - tx = Transaction(nonce=nonce, storage=self.cache_storage) + tx = Transaction(nonce=nonce, storage=self.cache_storage, parents=[self._settings.GENESIS_TX1_HASH]) tx.update_hash() tx.init_static_metadata_from_storage(self._settings, self.cache_storage) meta = TransactionMetadata(hash=tx.hash) diff --git a/tests/tx/test_static_metadata.py b/tests/tx/test_static_metadata.py new file mode 100644 index 000000000..4f5464f77 --- /dev/null +++ b/tests/tx/test_static_metadata.py @@ -0,0 +1,86 @@ +# Copyright 2024 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +from _pytest.fixtures import fixture + +from hathor.conf.get_settings import get_global_settings +from hathor.conf.settings import HathorSettings +from hathor.transaction import Block, Transaction, TxInput, Vertex +from hathor.transaction.static_metadata import BlockStaticMetadata, TransactionStaticMetadata +from hathor.types import VertexId + + +@fixture +def settings() -> HathorSettings: + return get_global_settings() + + +def create_block(*, vertex_id: VertexId, height: int) -> Block: + block = Block(hash=vertex_id) + block.set_static_metadata(BlockStaticMetadata( + min_height=0, + height=height, + feature_activation_bit_counts=[], + feature_states={}, + )) + return block + + +def create_tx(*, vertex_id: VertexId, closest_ancestor_block: VertexId) -> Transaction: + tx = Transaction(hash=vertex_id) + tx.set_static_metadata(TransactionStaticMetadata( + min_height=0, + closest_ancestor_block=closest_ancestor_block, + )) + return tx + + +@fixture +def tx_storage() -> dict[VertexId, Vertex]: + vertices = [ + create_block(vertex_id=b'b1', height=100), + create_block(vertex_id=b'b2', height=101), + create_block(vertex_id=b'b3', height=102), + create_block(vertex_id=b'b4', height=103), + create_tx(vertex_id=b'tx1', closest_ancestor_block=b'b1'), + create_tx(vertex_id=b'tx2', closest_ancestor_block=b'b2'), + create_tx(vertex_id=b'tx3', closest_ancestor_block=b'b4'), + ] + return {vertex.hash: vertex for vertex in vertices} + + +@pytest.mark.parametrize( + ['inputs', 'expected'], + [ + ([], b'b2'), + ([b'b1'], b'b2'), + ([b'b3'], b'b3'), + ([b'tx3'], b'b4'), + ([b'b1', b'b2', b'tx1', b'tx3'], b'b4'), + ], +) +def test_closest_ancestor_block( + settings: HathorSettings, + tx_storage: dict[VertexId, Vertex], + inputs: list[VertexId], + expected: VertexId, +) -> None: + tx = Transaction( + parents=[b'tx1', b'tx2'], + inputs=[TxInput(tx_id=vertex_id, index=0, data=b'') for vertex_id in inputs], + ) + static_metadata = TransactionStaticMetadata.create(tx, settings, lambda vertex_id: tx_storage[vertex_id]) + + assert static_metadata.closest_ancestor_block == expected diff --git a/tests/tx/test_tx.py b/tests/tx/test_tx.py index 48e1ad6e8..833d158d2 100644 --- a/tests/tx/test_tx.py +++ b/tests/tx/test_tx.py @@ -11,7 +11,7 @@ from hathor.feature_activation.feature import Feature from hathor.feature_activation.feature_service import FeatureService from hathor.simulator.utils import add_new_blocks -from hathor.transaction import MAX_OUTPUT_VALUE, Block, Transaction, TxInput, TxOutput +from hathor.transaction import MAX_OUTPUT_VALUE, Block, Transaction, TxInput, TxOutput, Vertex from hathor.transaction.exceptions import ( BlockWithInputs, ConflictingInputs, @@ -343,11 +343,11 @@ def test_merge_mined_long_merkle_path(self): patch_path = 'hathor.feature_activation.feature_service.FeatureService.is_feature_active' - def is_feature_active_false(self: FeatureService, *, block: Block, feature: Feature) -> bool: + def is_feature_active_false(self: FeatureService, *, vertex: Vertex, feature: Feature) -> bool: assert feature == Feature.INCREASE_MAX_MERKLE_PATH_LENGTH return False - def is_feature_active_true(self: FeatureService, *, block: Block, feature: Feature) -> bool: + def is_feature_active_true(self: FeatureService, *, vertex: Vertex, feature: Feature) -> bool: assert feature == Feature.INCREASE_MAX_MERKLE_PATH_LENGTH return True From 6ad70892fd36a39811e65870a0e8d7db38f06702 Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Fri, 8 Nov 2024 16:53:08 -0300 Subject: [PATCH 46/61] chore(feature-activation): config NOP feature to test FA for transactions (#1177) --- hathor/conf/testnet.py | 14 ++++++++++++++ hathor/conf/testnet.yml | 13 +++++++++++++ 2 files changed, 27 insertions(+) diff --git a/hathor/conf/testnet.py b/hathor/conf/testnet.py index c956c48c5..4743e9f4e 100644 --- a/hathor/conf/testnet.py +++ b/hathor/conf/testnet.py @@ -68,6 +68,20 @@ lock_in_on_timeout=False, version='0.59.0', signal_support_by_default=True, + ), + + # NOP feature to test Feature Activation for Transactions + Feature.NOP_FEATURE_1: Criteria( + bit=0, + # N = 4_354_560 + # Expected to be reached around Sunday, 2024-11-17. + # Right now the best block is 4_326_600 on testnet (2024-11-07). + start_height=4_354_560, # N + timeout_height=4_394_880, # N + 2 * 20160 (2 weeks after the start) + minimum_activation_height=0, + lock_in_on_timeout=False, + version='0.63.0', + signal_support_by_default=True, ) } ) diff --git a/hathor/conf/testnet.yml b/hathor/conf/testnet.yml index 554df5247..8babd1f32 100644 --- a/hathor/conf/testnet.yml +++ b/hathor/conf/testnet.yml @@ -50,3 +50,16 @@ FEATURE_ACTIVATION: lock_in_on_timeout: false version: 0.59.0 signal_support_by_default: true + + # NOP feature to test Feature Activation for Transactions + NOP_FEATURE_1: + bit: 0 + # N = 4_354_560 + # Expected to be reached around Sunday, 2024-11-17. + # Right now the best block is 4_326_600 on testnet (2024-11-07). + start_height: 4_354_560 # N + timeout_height: 4_394_880 # N + 2 * 20160 (2 weeks after the start) + minimum_activation_height: 0 + lock_in_on_timeout: false + version: 0.63.0 + signal_support_by_default: true From 759c1d7b6c769e67e1ca0e2f60d01c09e06fdb14 Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Fri, 8 Nov 2024 20:21:34 -0300 Subject: [PATCH 47/61] chore: improve p2p protocol error logging (#1179) --- hathor/p2p/protocol.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hathor/p2p/protocol.py b/hathor/p2p/protocol.py index e05e63b55..cd90601e8 100644 --- a/hathor/p2p/protocol.py +++ b/hathor/p2p/protocol.py @@ -327,7 +327,7 @@ def recv_message(self, cmd: ProtocolMessages, payload: str) -> None: .addErrback(self._on_cmd_handler_error, cmd) def _on_cmd_handler_error(self, failure: Failure, cmd: ProtocolMessages) -> None: - self.log.warn('recv_message processing error', reason=failure.getErrorMessage(), exc_info=True) + self.log.error(f'recv_message processing error:\n{failure.getTraceback()}', reason=failure.getErrorMessage()) self.send_error_and_close_connection(f'Error processing "{cmd.value}" command') def send_error(self, msg: str) -> None: From bff1face36574573ed22783604478bb045a05510 Mon Sep 17 00:00:00 2001 From: Marcelo Salhab Brogliato Date: Mon, 11 Nov 2024 09:56:14 -0600 Subject: [PATCH 48/61] feat(dag-builder): Add a tool to generate vertices from a DAG description --- hathor/conf/unittests.py | 6 +- hathor/conf/unittests.yml | 6 +- hathor/dag_builder/__init__.py | 17 + hathor/dag_builder/artifacts.py | 40 +++ hathor/dag_builder/builder.py | 210 +++++++++++++ hathor/dag_builder/cli.py | 66 ++++ hathor/dag_builder/default_filler.py | 270 ++++++++++++++++ hathor/dag_builder/tokenizer.py | 172 ++++++++++ hathor/dag_builder/types.py | 63 ++++ hathor/dag_builder/vertex_exporter.py | 293 ++++++++++++++++++ .../storage/transaction_storage.py | 3 +- tests/dag_builder/__init__.py | 0 tests/dag_builder/test_dag_builter.py | 208 +++++++++++++ tests/tx/test_genesis.py | 2 +- tests/unittest.py | 14 + tests/utils.py | 19 +- 16 files changed, 1375 insertions(+), 14 deletions(-) create mode 100644 hathor/dag_builder/__init__.py create mode 100644 hathor/dag_builder/artifacts.py create mode 100644 hathor/dag_builder/builder.py create mode 100644 hathor/dag_builder/cli.py create mode 100644 hathor/dag_builder/default_filler.py create mode 100644 hathor/dag_builder/tokenizer.py create mode 100644 hathor/dag_builder/types.py create mode 100644 hathor/dag_builder/vertex_exporter.py create mode 100644 tests/dag_builder/__init__.py create mode 100644 tests/dag_builder/test_dag_builter.py diff --git a/hathor/conf/unittests.py b/hathor/conf/unittests.py index 39e0b67e0..afd06e266 100644 --- a/hathor/conf/unittests.py +++ b/hathor/conf/unittests.py @@ -25,9 +25,9 @@ MIN_SHARE_WEIGHT=2, MAX_TX_WEIGHT_DIFF=25.0, BLOCK_DIFFICULTY_N_BLOCKS=20, - GENESIS_OUTPUT_SCRIPT=bytes.fromhex('76a914fd05059b6006249543b82f36876a17c73fd2267b88ac'), - GENESIS_BLOCK_NONCE=0, - GENESIS_BLOCK_HASH=bytes.fromhex('339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792'), + GENESIS_OUTPUT_SCRIPT=bytes.fromhex('76a914d07bc82d6e0d1bb116614076645e9b87c8c83b4188ac'), + GENESIS_BLOCK_NONCE=5, + GENESIS_BLOCK_HASH=bytes.fromhex('2ebb3b8edcb72a7e46cc0efacfe1b109e2e9dd868a90fe0906968dc8fbbf6488'), GENESIS_TX1_NONCE=6, GENESIS_TX1_HASH=bytes.fromhex('16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952'), GENESIS_TX2_NONCE=2, diff --git a/hathor/conf/unittests.yml b/hathor/conf/unittests.yml index abab9ae90..fdcc5e261 100644 --- a/hathor/conf/unittests.yml +++ b/hathor/conf/unittests.yml @@ -7,9 +7,9 @@ MIN_TX_WEIGHT: 2 MIN_SHARE_WEIGHT: 2 MAX_TX_WEIGHT_DIFF: 25.0 BLOCK_DIFFICULTY_N_BLOCKS: 20 -GENESIS_OUTPUT_SCRIPT: 76a914fd05059b6006249543b82f36876a17c73fd2267b88ac -GENESIS_BLOCK_NONCE: 0 -GENESIS_BLOCK_HASH: 339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792 +GENESIS_OUTPUT_SCRIPT: 76a914d07bc82d6e0d1bb116614076645e9b87c8c83b4188ac +GENESIS_BLOCK_NONCE: 5 +GENESIS_BLOCK_HASH: 2ebb3b8edcb72a7e46cc0efacfe1b109e2e9dd868a90fe0906968dc8fbbf6488 GENESIS_TX1_NONCE: 6 GENESIS_TX1_HASH: 16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952 GENESIS_TX2_NONCE: 2 diff --git a/hathor/dag_builder/__init__.py b/hathor/dag_builder/__init__.py new file mode 100644 index 000000000..3bcdb794e --- /dev/null +++ b/hathor/dag_builder/__init__.py @@ -0,0 +1,17 @@ +# Copyright 2024 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.dag_builder.builder import DAGBuilder + +__all__ = ['DAGBuilder'] diff --git a/hathor/dag_builder/artifacts.py b/hathor/dag_builder/artifacts.py new file mode 100644 index 000000000..8137951ca --- /dev/null +++ b/hathor/dag_builder/artifacts.py @@ -0,0 +1,40 @@ +# Copyright 2024 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from typing import TYPE_CHECKING, Iterator, NamedTuple + +from hathor.dag_builder.types import DAGNode + +if TYPE_CHECKING: + from hathor.transaction import BaseTransaction + + +class _Pair(NamedTuple): + node: DAGNode + vertex: BaseTransaction + + +class DAGArtifacts: + def __init__(self, items: Iterator[tuple[DAGNode, BaseTransaction]]) -> None: + self.by_name: dict[str, _Pair] = {} + + v: list[_Pair] = [] + for node, vertex in items: + p = _Pair(node, vertex) + v.append(p) + self.by_name[node.name] = p + + self.list: tuple[_Pair, ...] = tuple(v) diff --git a/hathor/dag_builder/builder.py b/hathor/dag_builder/builder.py new file mode 100644 index 000000000..e28a6fdfd --- /dev/null +++ b/hathor/dag_builder/builder.py @@ -0,0 +1,210 @@ +# Copyright 2024 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from collections import defaultdict +from typing import Iterator + +from structlog import get_logger +from typing_extensions import Self + +from hathor.conf.settings import HathorSettings +from hathor.daa import DifficultyAdjustmentAlgorithm +from hathor.dag_builder.artifacts import DAGArtifacts +from hathor.dag_builder.tokenizer import Token, TokenType +from hathor.dag_builder.types import ( + AttributeType, + DAGInput, + DAGNode, + DAGNodeType, + DAGOutput, + VertexResolverType, + WalletFactoryType, +) +from hathor.wallet import BaseWallet + +logger = get_logger() + + +class DAGBuilder: + def __init__( + self, + settings: HathorSettings, + daa: DifficultyAdjustmentAlgorithm, + genesis_wallet: BaseWallet, + wallet_factory: WalletFactoryType, + vertex_resolver: VertexResolverType, + ) -> None: + from hathor.dag_builder.default_filler import DefaultFiller + from hathor.dag_builder.tokenizer import tokenize + from hathor.dag_builder.vertex_exporter import VertexExporter + + self.log = logger.new() + + self._nodes: dict[str, DAGNode] = {} + self._tokenize = tokenize + self._filler = DefaultFiller(self, settings, daa) + self._exporter = VertexExporter( + builder=self, + settings=settings, + daa=daa, + genesis_wallet=genesis_wallet, + wallet_factory=wallet_factory, + vertex_resolver=vertex_resolver, + ) + + def parse_tokens(self, tokens: Iterator[Token]) -> None: + """Parse tokens and update the DAG accordingly.""" + for parts in tokens: + match parts: + case (TokenType.PARENT, (_from, _to)): + self.add_parent_edge(_from, _to) + + case (TokenType.SPEND, (_from, _to, _txout_index)): + self.add_spending_edge(_from, _to, _txout_index) + + case (TokenType.ATTRIBUTE, (name, key, value)): + self.add_attribute(name, key, value) + + case (TokenType.ORDER_BEFORE, (_from, _to)): + self.add_deps(_from, _to) + + case (TokenType.OUTPUT, (name, index, amount, token, attrs)): + self.set_output(name, index, amount, token, attrs) + + case (TokenType.BLOCKCHAIN, (name, first_parent, begin_index, end_index)): + self.add_blockchain(name, first_parent, begin_index, end_index) + + case _: + raise NotImplementedError(parts) + + def _get_node(self, name: str) -> DAGNode: + """Return a node.""" + return self._nodes[name] + + def _get_or_create_node(self, name: str, *, default_type: DAGNodeType = DAGNodeType.Unknown) -> DAGNode: + """Return a node, creating one if needed.""" + if name not in self._nodes: + node = DAGNode(name=name, type=default_type) + self._nodes[name] = node + else: + node = self._nodes[name] + if node.type is DAGNodeType.Unknown: + node.type = default_type + else: + if default_type != DAGNodeType.Unknown: + assert node.type is default_type, f'{node.type} != {default_type}' + return node + + def add_deps(self, _from: str, _to: str) -> Self: + """Add a dependency between two nodes. For clarity, `_to` has to be created before `_from`.""" + from_node = self._get_or_create_node(_from) + self._get_or_create_node(_to) + from_node.deps.add(_to) + return self + + def add_blockchain(self, prefix: str, first_parent: str | None, first_index: int, last_index: int) -> Self: + """Add a sequence of nodes representing a chain of blocks.""" + prev = first_parent + for i in range(first_index, last_index + 1): + name = f'{prefix}{i}' + self._get_or_create_node(name, default_type=DAGNodeType.Block) + if prev is not None: + self.add_parent_edge(name, prev) + prev = name + return self + + def add_parent_edge(self, _from: str, _to: str) -> Self: + """Add a parent edge between two nodes. For clarity, `_to` has to be created befre `_from`.""" + self._get_or_create_node(_to) + from_node = self._get_or_create_node(_from) + from_node.parents.add(_to) + return self + + def add_spending_edge(self, _from: str, _to: str, _txout_index: int) -> Self: + """Add a spending edge between two nodes. For clarity, `_to` has to be created before `_from`.""" + to_node = self._get_or_create_node(_to) + if len(to_node.outputs) <= _txout_index: + to_node.outputs.extend([None] * (_txout_index - len(to_node.outputs) + 1)) + to_node.outputs[_txout_index] = DAGOutput(0, '', {}) + from_node = self._get_or_create_node(_from) + from_node.inputs.add(DAGInput(_to, _txout_index)) + return self + + def set_output(self, name: str, index: int, amount: int, token: str, attrs: AttributeType) -> Self: + """Set information about an output.""" + node = self._get_or_create_node(name) + if len(node.outputs) <= index: + node.outputs.extend([None] * (index - len(node.outputs) + 1)) + node.outputs[index] = DAGOutput(amount, token, attrs) + if token != 'HTR': + self._get_or_create_node(token, default_type=DAGNodeType.Token) + node.deps.add(token) + return self + + def add_attribute(self, name: str, key: str, value: str) -> Self: + """Add an attribute to a node.""" + node = self._get_or_create_node(name) + if key == 'type': + node.type = DAGNodeType(value) + else: + node.attrs[key] = value + return self + + def topological_sorting(self) -> Iterator[DAGNode]: + """Run a topological sort on the DAG, yielding nodes in an order that respects all dependency constraints.""" + direct_deps: dict[str, set[str]] = {} + rev_deps: dict[str, set[str]] = defaultdict(set) + seen: set[str] = set() + candidates: list[str] = [] + for name, node in self._nodes.items(): + assert name == node.name + deps = set(node.get_all_dependencies()) + assert name not in direct_deps + direct_deps[name] = deps + for x in deps: + rev_deps[x].add(name) + if len(deps) == 0: + candidates.append(name) + + for _ in range(len(self._nodes)): + if len(candidates) == 0: + self.log('fail because there is at least one cycle in the dependencies', + direct_deps=direct_deps, + rev_deps=rev_deps, + seen=seen, + not_seen=set(self._nodes.keys()) - seen, + nodes=self._nodes) + raise RuntimeError('there is at least one cycle') + name = candidates.pop() + assert name not in seen + seen.add(name) + for d in rev_deps[name]: + direct_deps[d].remove(name) + if len(direct_deps[d]) == 0: + candidates.append(d) + del direct_deps[d] + node = self._get_node(name) + yield node + + def build(self) -> DAGArtifacts: + """Build all the transactions based on the DAG.""" + self._filler.run() + return DAGArtifacts(self._exporter.export()) + + def build_from_str(self, content: str) -> DAGArtifacts: + """Run build() after creating an initial DAG from a string.""" + self.parse_tokens(self._tokenize(content)) + return self.build() diff --git a/hathor/dag_builder/cli.py b/hathor/dag_builder/cli.py new file mode 100644 index 000000000..ff6184fb4 --- /dev/null +++ b/hathor/dag_builder/cli.py @@ -0,0 +1,66 @@ +# Copyright 2024 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.dag_builder.builder import DAGBuilder + + +def main(filename: str, genesis_seed: str) -> None: + from hathor.reactor import initialize_global_reactor + + # reactor + _ = initialize_global_reactor(use_asyncio_reactor=False) + + from hathor.conf.get_settings import get_global_settings + from hathor.daa import DifficultyAdjustmentAlgorithm + from hathor.wallet import HDWallet + settings = get_global_settings() + + def wallet_factory(words=None): + if words is None: + words = ('bind daring above film health blush during tiny neck slight clown salmon ' + 'wine brown good setup later omit jaguar tourist rescue flip pet salute') + hd = HDWallet(words=words) + hd._manually_initialize() + return hd + + genesis_wallet = wallet_factory(genesis_seed) + daa = DifficultyAdjustmentAlgorithm(settings=settings) + + builder = DAGBuilder( + settings=settings, + daa=daa, + genesis_wallet=genesis_wallet, + wallet_factory=wallet_factory, + vertex_resolver=lambda x: None, + ) + + fp = open(filename, 'r') + content = fp.read() + artifacts = builder.build_from_str(content) + + for node, vertex in artifacts.list: + print('//', node) + print('//', repr(vertex)) + print('//', node.name) + print(bytes(vertex).hex()) + print() + + +if __name__ == '__main__': + import os + import sys + if 'HATHOR_CONFIG_YAML' not in os.environ: + os.environ['HATHOR_CONFIG_YAML'] = './hathor/conf/testnet.yml' + genesis_seed = os.environ['GENESIS_SEED'] + main(sys.argv[1], genesis_seed) diff --git a/hathor/dag_builder/default_filler.py b/hathor/dag_builder/default_filler.py new file mode 100644 index 000000000..95026e2cc --- /dev/null +++ b/hathor/dag_builder/default_filler.py @@ -0,0 +1,270 @@ +# Copyright 2024 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from collections import defaultdict +from math import ceil + +from hathor.conf.settings import HathorSettings +from hathor.daa import DifficultyAdjustmentAlgorithm +from hathor.dag_builder.builder import DAGBuilder, DAGInput, DAGNode, DAGNodeType, DAGOutput + + +class DefaultFiller: + """This filler applies a strategy to complete a DAG. + + The strategy is to create a dummy transaction that spends from the genesis block + and has one output for each transaction that needs HTR tokens. + + For custom tokens, it creates an output on the TokenCreationTransaction of the token + for each transaction that needs that custom token. + """ + + def __init__(self, builder: DAGBuilder, settings: HathorSettings, daa: DifficultyAdjustmentAlgorithm) -> None: + self._builder = builder + self._settings = settings + self._daa = daa + + # create the dummy and genesis nodes before builder.build() is called + genesis_block = self._get_or_create_node('genesis_block', default_type=DAGNodeType.Genesis) + if len(genesis_block.outputs) == 0: + genesis_block.outputs.append(DAGOutput(self._settings.GENESIS_TOKENS, 'HTR', {})) + self._get_or_create_node('genesis_1', default_type=DAGNodeType.Genesis) + self._get_or_create_node('genesis_2', default_type=DAGNodeType.Genesis) + self._get_or_create_node('dummy', default_type=DAGNodeType.Transaction) + + def _get_node(self, name: str) -> DAGNode: + """Get a node.""" + return self._builder._get_node(name) + + def _get_or_create_node(self, name: str, *, default_type: DAGNodeType = DAGNodeType.Unknown) -> DAGNode: + """Get a node.""" + return self._builder._get_or_create_node(name, default_type=default_type) + + @staticmethod + def get_next_index(outputs: list[DAGOutput | None]) -> int: + """Return the next index to place a new output. + + If all slots are full, it creates a new slot at the end.""" + for i, txout in enumerate(outputs): + if txout is None: + return i + outputs.append(None) + return len(outputs) - 1 + + def fill_parents(self, node: DAGNode, *, target: int = 2, candidates: list[str] | None = []) -> None: + """Fill parents of a vertex. + + Note: We shouldn't use the DAG transactions because it would confirm them, violating the DAG description.""" + # What's the best way to fill the parents? + # Should we use dummy transactions so it is unrelated to the other transactions? + if node.type is DAGNodeType.Genesis: + return + if len(node.parents) >= target: + return + + if not candidates: + candidates = [ + 'genesis_1', + 'genesis_2', + ] + for pi in candidates: + if len(node.parents) >= target: + break + node.parents.add(pi) + + def find_txin(self, amount: int, token: str) -> DAGInput: + """Create a DAGInput for an amount of tokens.""" + if token == 'HTR': + dummy = self._get_node('dummy') + dummy.inputs.add(DAGInput('genesis_block', 0)) + self.fill_parents(dummy) + + # TODO no more than 255 inputs + index = self.get_next_index(dummy.outputs) + dummy.outputs[index] = DAGOutput(amount, token, {'_origin': 'f1'}) + return DAGInput('dummy', index) + + else: + token_node = self._get_or_create_node(token) + index = self.get_next_index(token_node.outputs) + token_node.outputs[index] = DAGOutput(amount, token, {'_origin': 'f2'}) + return DAGInput(token, index) + + def calculate_balance(self, node: DAGNode) -> dict[str, int]: + """Calculate the balance for each token in a node.""" + ins: defaultdict[str, int] = defaultdict(int) + for tx_name, index in node.inputs: + node2 = self._get_or_create_node(tx_name) + txout = node2.outputs[index] + assert txout is not None + ins[txout.token] += txout.amount + + outs: defaultdict[str, int] = defaultdict(int) + for txout in node.outputs: + assert txout is not None + outs[txout.token] += txout.amount + + keys = set(ins.keys()) | set(outs.keys()) + balance = {} + for key in keys: + balance[key] = outs.get(key, 0) - ins.get(key, 0) + + return balance + + def balance_node_inputs_and_outputs(self, node: DAGNode) -> None: + """Balance the inputs and outputs of a node.""" + balance = self.calculate_balance(node) + + for key, diff in balance.items(): + # =0 balance + # <0 need output + # >0 need input + if diff < 0: + index = self.get_next_index(node.outputs) + node.outputs[index] = DAGOutput(abs(diff), key, {'_origin': 'f3'}) + elif diff > 0: + txin = self.find_txin(diff, key) + node.inputs.add(txin) + + def run(self) -> None: + """Run the filler.""" + for node in self._builder._nodes.values(): + if node.type is DAGNodeType.Unknown: + node.type = DAGNodeType.Transaction + + for node in self._builder._nodes.values(): + if node.type is DAGNodeType.Genesis: + continue + if node.name == 'dummy': + continue + if not node.inputs and not node.outputs: + if node.type is DAGNodeType.Block: + continue + node.outputs.append(DAGOutput(1, 'HTR', {'_origin': 'f4'})) + for i in range(len(node.outputs)): + txout = node.outputs[i] + if txout is None: + node.outputs[i] = DAGOutput(1, 'HTR', {'_origin': 'f5'}) + elif txout.amount == 0: + assert not txout.token + assert not txout.attrs + node.outputs[i] = DAGOutput(1, 'HTR', {'_origin': 'f6'}) + + tokens = [] + for node in list(self._builder.topological_sorting()): + match node.type: + case DAGNodeType.Genesis: + # do nothing + pass + + case DAGNodeType.Block: + if len(node.inputs) > 0: + raise ValueError + + if len(node.outputs) > 1: + raise ValueError + + blk_count = 0 + txs_count = 0 + parent_blk: DAGNode | None = None + for pi in node.parents: + pi_node = self._get_or_create_node(pi) + if pi_node.type is DAGNodeType.Block: + blk_count += 1 + assert parent_blk is None + parent_blk = pi_node + else: + txs_count += 1 + + candidates: list[str] = [] + if blk_count == 0: + node.parents.add('genesis_block') + else: + assert parent_blk is not None + candidates = [ + x + for x in parent_blk.parents + if x != 'genesis_block' and self._get_node(x).type is not DAGNodeType.Block + ] + + self.fill_parents(node, target=3, candidates=candidates) + assert len(node.parents) == 3 + + balance = self.calculate_balance(node) + assert set(balance.keys()).issubset({'HTR'}) + diff = balance.get('HTR', 0) + + target = self._daa.get_tokens_issued_per_block(1) # TODO Use the actual height. + assert diff >= 0 + assert diff <= target + + if diff < target: + node.outputs.append(DAGOutput(target - diff, 'HTR', {'_origin': 'f7'})) + + case DAGNodeType.Transaction: + if node.name == 'dummy': + continue + + self.fill_parents(node) + self.balance_node_inputs_and_outputs(node) + + case DAGNodeType.Token: + tokens.append(node.name) + self.fill_parents(node) + + case _: + raise NotImplementedError(node.type) + + for token in tokens: + node = self._get_or_create_node(token) + + balance = self.calculate_balance(node) + assert set(balance.keys()).issubset({'HTR', token}) + + htr_minimum = ceil(balance[token] / 100) + htr_balance = -balance.get('HTR', 0) + + if htr_balance > htr_minimum: + index = self.get_next_index(node.outputs) + node.outputs[index] = DAGOutput(htr_balance - htr_minimum, 'HTR', {'_origin': 'f8'}) + + elif htr_balance < htr_minimum: + txin = self.find_txin(htr_minimum - htr_balance, 'HTR') + node.inputs.add(txin) + + if 'dummy' in self._builder._nodes: + node = self._get_node('dummy') + balance = self.calculate_balance(node) + if not balance: + del self._builder._nodes['dummy'] + else: + assert set(balance.keys()) == {'HTR'} + diff = balance.get('HTR', 0) + + assert diff <= 0 + + if diff < 0: + index = self.get_next_index(node.outputs) + node.outputs[index] = DAGOutput(-diff, 'HTR', {}) + + for node in self._builder._nodes.values(): + if node.type is DAGNodeType.Block: + continue + if node.type is DAGNodeType.Genesis: + continue + if node.name == 'dummy': + continue + self._builder.add_deps(node.name, 'dummy') diff --git a/hathor/dag_builder/tokenizer.py b/hathor/dag_builder/tokenizer.py new file mode 100644 index 000000000..041eac32b --- /dev/null +++ b/hathor/dag_builder/tokenizer.py @@ -0,0 +1,172 @@ +# Copyright 2024 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import re +from enum import Enum, auto +from typing import Any, Iterator + +""" +A domain specific language to describe DAGs. + +Syntax: + + blockchain genesis a[2..5] # create blocks a2, a3, a4, and a5 where a2's parent is the genesis block + blockchain pi a[5..7] # create blocks a5, a6, and a7 where a5's parent is pi + a <-- b <-- c # a is a parent of b which is a parent of c + a --> b --> c # c is a parent of b which is a parent of a + a.out[i] <<< b c d # b, c, and d spend the i-th output of a + a < b < c # a must be created before b and b must be created before c + a > b > c # a must be created after b and b must be creater after c + a.attr = value # set value of attribute attr to a + +Special attributes: + a.out[i] = 100 HTR # set that the i-th output of a holds 100 HTR + a.out[i] = 100 TOKEN # set that the i-th output of a holds 100 TOKEN where TOKEN is a custom token + a.weight = 50 # set vertex weight + + +Example: + + blockchain genesis a[0..300] + blockchain a300 b[0..20] + blockchain b4 c[0..10] + + # reward lock + a300 < dummy + + b11 --> tx1 + b11 --> tx2 + + b14 --> tx1 + b14 --> tx3 + + c3 --> tx1 + c3 --> tx2 + + tx1 <-- tx2 <-- tx3 + + tx3 --> tx5 --> tx6 + + tx1.out[0] <<< tx2 tx3 + tx1.out[0] <<< tx4 + + a0.out[0] <<< tx1 + + tx1.out[0] = 100 HTR [wallet1] + tx1.out[1] = 50 TK1 [wallet2] + tx2.out[0] = 75 USDC [wallet1] + + USDC.out[0] = 100000 HTR + + b5 < c0 < c10 < b20 + b6 < tx3 + b16 < tx4 +""" + + +class TokenType(Enum): + BLOCKCHAIN = auto() + ATTRIBUTE = auto() + PARENT = auto() + SPEND = auto() + OUTPUT = auto() + ORDER_BEFORE = auto() + + +Token = tuple[TokenType, tuple[Any, ...]] + + +def collect_pairs(parts: list[str], expected_sep: str) -> Iterator[tuple[str, str]]: + """Pair all parts two by two checking the separator.""" + n = len(parts) + if n < 3: + raise SyntaxError + if n % 2 == 0: + raise SyntaxError + + k = (n - 1) // 2 + for i in range(k): + first = parts[2 * i] + sep = parts[2 * i + 1] + second = parts[2 * i + 2] + if parts[2 * i + 1] != expected_sep: + raise SyntaxError(f'inconsistent separator; got {sep} but expecting {expected_sep}') + yield (first, second) + + +def tokenize(content: str) -> Iterator[Token]: + """Parse content and generate tokens. + """ + blockchain_re = re.compile(r'^([a-zA-Z][a-zA-Z0-9-_]*)\[([0-9]+)..([0-9]+)\]$') + first_parent: str | None + for line in content.split('\n'): + line, _, _ = line.partition('#') + line = line.strip() + if not line: + continue + + # split() trims on both sides and remove empty parts + parts = line.split() + + if parts[0] == 'blockchain': + if len(parts) != 3: + raise SyntaxError + first_parent = parts[1] + if first_parent == 'genesis': + first_parent = None + match = blockchain_re.match(parts[2]) + if not match: + raise SyntaxError(f'invalid blockchain format: {line}') + name, begin, end = match.groups() + yield (TokenType.BLOCKCHAIN, (name, first_parent, int(begin), int(end))) + + elif parts[1] == '=': + name, key = parts[0].split('.', 1) + if key.startswith('out[') and key[-1] == ']': + index = int(key[4:-1]) + amount = int(parts[2]) + token = parts[3] + attrs = parts[4:] + yield (TokenType.OUTPUT, (name, index, amount, token, attrs)) + else: + yield (TokenType.ATTRIBUTE, (name, key, ' '.join(parts[2:]))) + + elif parts[1] == '<--': + for _to, _from in collect_pairs(parts, '<--'): + yield (TokenType.PARENT, (_from, _to)) + + elif parts[1] == '-->': + for _from, _to in collect_pairs(parts, '-->'): + yield (TokenType.PARENT, (_from, _to)) + + elif parts[1] == '<<<': + _to, _out = parts[0].split('.', 1) + if not _out.startswith('out['): + raise SyntaxError + if _out[-1] != ']': + raise SyntaxError + _txout_index = int(_out[4:-1]) + for _from in parts[2:]: + yield (TokenType.SPEND, (_from, _to, _txout_index)) + + elif parts[1] == '<': + for _a, _b in collect_pairs(parts, '<'): + yield (TokenType.ORDER_BEFORE, (_b, _a)) + + elif parts[1] == '>': + for _a, _b in collect_pairs(parts, '>'): + yield (TokenType.ORDER_BEFORE, (_a, _b)) + + else: + raise SyntaxError(line) diff --git a/hathor/dag_builder/types.py b/hathor/dag_builder/types.py new file mode 100644 index 000000000..46d5af170 --- /dev/null +++ b/hathor/dag_builder/types.py @@ -0,0 +1,63 @@ +# Copyright 2024 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass, field +from enum import Enum +from typing import Any, Iterator, NamedTuple, TypeAlias + +from hathor.transaction import BaseTransaction +from hathor.wallet import BaseWallet + +AttributeType: TypeAlias = dict[str, str | int] +VertexResolverType: TypeAlias = Callable[[BaseTransaction], Any] +WalletFactoryType: TypeAlias = Callable[[], BaseWallet] + + +class DAGNodeType(Enum): + Unknown = 'unknown' + Block = 'block' + Transaction = 'transaction' + Token = 'token' + Genesis = 'genesis' + + +@dataclass +class DAGNode: + name: str + type: DAGNodeType + + attrs: dict[str, str] = field(default_factory=dict) + inputs: set[DAGInput] = field(default_factory=set) + outputs: list[DAGOutput | None] = field(default_factory=list) + parents: set[str] = field(default_factory=set) + deps: set[str] = field(default_factory=set) + + def get_all_dependencies(self) -> Iterator[str]: + yield from self.parents + yield from (name for name, _ in self.inputs) + yield from self.deps + + +class DAGInput(NamedTuple): + node_name: str + txout_index: int + + +class DAGOutput(NamedTuple): + amount: int + token: str + attrs: AttributeType diff --git a/hathor/dag_builder/vertex_exporter.py b/hathor/dag_builder/vertex_exporter.py new file mode 100644 index 000000000..4daa17f39 --- /dev/null +++ b/hathor/dag_builder/vertex_exporter.py @@ -0,0 +1,293 @@ +# Copyright 2024 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Iterator + +from hathor.conf.settings import HathorSettings +from hathor.crypto.util import decode_address +from hathor.daa import DifficultyAdjustmentAlgorithm +from hathor.dag_builder.builder import DAGBuilder, DAGNode +from hathor.dag_builder.types import DAGNodeType, VertexResolverType, WalletFactoryType +from hathor.transaction import BaseTransaction, Block, Transaction +from hathor.transaction.base_transaction import TxInput, TxOutput +from hathor.transaction.scripts.p2pkh import P2PKH +from hathor.transaction.token_creation_tx import TokenCreationTransaction +from hathor.wallet import BaseWallet + + +class VertexExporter: + """Transform a complete DAG into vertices. + """ + def __init__( + self, + *, + builder: DAGBuilder, + settings: HathorSettings, + daa: DifficultyAdjustmentAlgorithm, + genesis_wallet: BaseWallet, + wallet_factory: WalletFactoryType, + vertex_resolver: VertexResolverType, + ) -> None: + self._builder = builder + self._vertices: dict[str, BaseTransaction] = {} + self._wallets: dict[str, BaseWallet] = {} + self._vertice_per_id: dict[bytes, BaseTransaction] = {} + self._block_height: dict[bytes, int] = {} + + self._settings = settings + self._daa = daa + self._wallet_factory = wallet_factory + self._vertex_resolver = vertex_resolver + + self._wallets['genesis'] = genesis_wallet + self._wallets['main'] = self._wallet_factory() + + def _get_node(self, name: str) -> DAGNode: + """Get node.""" + return self._builder._get_node(name) + + def get_vertex_id(self, name: str) -> bytes: + """Get the vertex id given its node name.""" + return self._vertices[name].hash + + def get_parent_block(self, block: Block) -> Block: + """Get the parent block of a block.""" + if block.parents[0] == self._settings.GENESIS_BLOCK_HASH: + genesis_block = Block( + timestamp=self._settings.GENESIS_BLOCK_TIMESTAMP, + weight=self._settings.MIN_BLOCK_WEIGHT, + ) + genesis_block.get_height = lambda: 0 # type: ignore[method-assign] + return genesis_block + parent = self._vertice_per_id[block.parents[0]] + assert isinstance(parent, Block) + return parent + + def _create_vertex_parents(self, node: DAGNode) -> tuple[list[bytes], list[bytes]]: + """Convert node parents to vertex parents, splitted into blocks and transactions.""" + block_parents = [] + txs_parents = [] + for pi in node.parents: + pi_node = self._get_node(pi) + if pi_node.type is DAGNodeType.Block or pi_node.name == 'genesis_block': + block_parents.append(self.get_vertex_id(pi)) + else: + txs_parents.append(self.get_vertex_id(pi)) + return block_parents, txs_parents + + def _create_vertex_txin(self, node: DAGNode) -> list[TxInput]: + """Create TxInput objects for a node.""" + inputs = [] + for tx_name, index in node.inputs: + txin = TxInput(tx_id=self.get_vertex_id(tx_name), index=index, data=b'') + inputs.append(txin) + return inputs + + def _create_vertex_txout( + self, + node: DAGNode, + *, + token_creation: bool = False + ) -> tuple[list[bytes], list[TxOutput]]: + """Create TxOutput objects for a node.""" + tokens: list[bytes] = [] + outputs: list[TxOutput] = [] + + for txout in node.outputs: + assert txout is not None + amount, token_name, attrs = txout + if token_name == 'HTR': + index = 0 + elif token_creation: + index = 1 + else: + token_uid = self.get_vertex_id(token_name) + try: + index = tokens.index(token_uid) + 1 + except ValueError: + tokens.append(token_uid) + index = len(tokens) + + script = self.get_next_p2pkh_script() + outputs.append(TxOutput(value=amount, token_data=index, script=script)) + + return tokens, outputs + + def get_next_p2pkh_script(self) -> bytes: + """Return next p2pkh script to be used in outputs.""" + address_b58 = self._wallets['main'].get_unused_address() + return P2PKH.create_output_script(decode_address(address_b58)) + + def get_min_timestamp(self, node: DAGNode) -> int: + """Return the minimum timestamp where a node is valid.""" + # update timestamp + deps = list(node.get_all_dependencies()) + assert deps + timestamp = 1 + max(self._vertices[name].timestamp for name in deps) + return timestamp + + def update_vertex_hash(self, vertex: BaseTransaction) -> None: + """Resolve vertex and update its hash.""" + self._vertex_resolver(vertex) + vertex.update_hash() + + def sign_all_inputs(self, node: DAGNode, vertex: Transaction) -> None: + """Sign all inputs of a vertex.""" + data_to_sign = vertex.get_sighash_all() + for txin in vertex.inputs: + pi = self._vertice_per_id[txin.tx_id] + txout = pi.outputs[txin.index] + p2pkh = P2PKH.parse_script(txout.script) + assert p2pkh is not None + + for wallet_name, wallet in self._wallets.items(): + try: + private_key = wallet.get_private_key(p2pkh.address) + break + except KeyError: + pass + + public_key_bytes, signature = wallet.get_input_aux_data(data_to_sign, private_key) + txin.data = P2PKH.create_input_data(public_key_bytes, signature) + + def create_vertex_token(self, node: DAGNode) -> TokenCreationTransaction: + """Create a token given a node.""" + block_parents, txs_parents = self._create_vertex_parents(node) + inputs = self._create_vertex_txin(node) + tokens, outputs = self._create_vertex_txout(node, token_creation=True) + + assert len(block_parents) == 0 + assert len(tokens) == 0 + assert node.name != 'HTR' + + vertex = TokenCreationTransaction(parents=txs_parents, inputs=inputs, outputs=outputs) + vertex.token_name = node.name + vertex.token_symbol = node.name + vertex.timestamp = self.get_min_timestamp(node) + self.sign_all_inputs(node, vertex) + if 'weight' in node.attrs: + vertex.weight = float(node.attrs['weight']) + else: + vertex.weight = self._daa.minimum_tx_weight(vertex) + self.update_vertex_hash(vertex) + return vertex + + def create_vertex_block(self, node: DAGNode) -> Block: + """Create a Block given a node.""" + block_parents, txs_parents = self._create_vertex_parents(node) + inputs = self._create_vertex_txin(node) + tokens, outputs = self._create_vertex_txout(node) + + assert len(inputs) == 0 + assert len(block_parents) == 1 + assert len(txs_parents) == 2 + + height = 1 + self._block_height[block_parents[0]] + + parents = block_parents + txs_parents + + blk = Block(parents=parents, outputs=outputs) + blk.timestamp = self.get_min_timestamp(node) + self._settings.AVG_TIME_BETWEEN_BLOCKS + blk.get_height = lambda: height # type: ignore[method-assign] + blk.update_hash() # the next call fails is blk.hash is None + if 'weight' in node.attrs: + blk.weight = float(node.attrs['weight']) + else: + blk.weight = self._daa.calculate_block_difficulty(blk, self.get_parent_block) + self.update_vertex_hash(blk) + self._block_height[blk.hash] = height + return blk + + def create_vertex_transaction(self, node: DAGNode) -> Transaction: + """Create a Transaction given a node.""" + block_parents, txs_parents = self._create_vertex_parents(node) + inputs = self._create_vertex_txin(node) + tokens, outputs = self._create_vertex_txout(node) + + assert len(block_parents) == 0 + tx = Transaction(parents=txs_parents, inputs=inputs, outputs=outputs, tokens=tokens) + tx.timestamp = self.get_min_timestamp(node) + self.sign_all_inputs(node, tx) + if 'weight' in node.attrs: + tx.weight = float(node.attrs['weight']) + else: + tx.weight = self._daa.minimum_tx_weight(tx) + self.update_vertex_hash(tx) + return tx + + def create_genesis_vertex(self, node: DAGNode) -> BaseTransaction: + """Create a genesis vertex given a node.""" + vertex: BaseTransaction + + if node.name == 'genesis_block': + vertex = Block() + vertex.hash = self._settings.GENESIS_BLOCK_HASH + vertex.timestamp = self._settings.GENESIS_BLOCK_TIMESTAMP + txout = TxOutput( + value=self._settings.GENESIS_TOKENS, + token_data=0, + script=self._settings.GENESIS_OUTPUT_SCRIPT + ) + vertex.outputs.append(txout) + + elif node.name == 'genesis_1': + vertex = Transaction() + vertex.hash = self._settings.GENESIS_TX1_HASH + vertex.timestamp = self._settings.GENESIS_TX1_TIMESTAMP + + elif node.name == 'genesis_2': + vertex = Transaction() + vertex.hash = self._settings.GENESIS_TX2_HASH + vertex.timestamp = self._settings.GENESIS_TX2_TIMESTAMP + + else: + raise NotImplementedError(node.name) + + return vertex + + def create_vertex(self, node: DAGNode) -> BaseTransaction: + """Create a vertex.""" + vertex: BaseTransaction + + match node.type: + case DAGNodeType.Block: + vertex = self.create_vertex_block(node) + + case DAGNodeType.Token: + vertex = self.create_vertex_token(node) + + case DAGNodeType.Transaction: + vertex = self.create_vertex_transaction(node) + + case DAGNodeType.Genesis: + vertex = self.create_genesis_vertex(node) + + case _: + raise NotImplementedError(node.type) + + assert vertex is not None + self._vertice_per_id[vertex.hash] = vertex + self._vertices[node.name] = vertex + return vertex + + def export(self) -> Iterator[tuple[DAGNode, BaseTransaction]]: + """Yield all pairs (node, vertex).""" + self._block_height[self._settings.GENESIS_BLOCK_HASH] = 0 + + vertex: BaseTransaction | None + + for node in self._builder.topological_sorting(): + vertex = self.create_vertex(node) + if node.type is not DAGNodeType.Genesis: + yield node, vertex diff --git a/hathor/transaction/storage/transaction_storage.py b/hathor/transaction/storage/transaction_storage.py index a6ee50aa9..26e226a7d 100644 --- a/hathor/transaction/storage/transaction_storage.py +++ b/hathor/transaction/storage/transaction_storage.py @@ -1069,7 +1069,8 @@ def _construct_genesis_block(self) -> Block: ) block.update_hash() - assert block.hash == self._settings.GENESIS_BLOCK_HASH + assert block.hash == self._settings.GENESIS_BLOCK_HASH, \ + f'{block.hash.hex()} != {self._settings.GENESIS_BLOCK_HASH.hex()}' return block def _construct_genesis_tx1(self) -> Transaction: diff --git a/tests/dag_builder/__init__.py b/tests/dag_builder/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/dag_builder/test_dag_builter.py b/tests/dag_builder/test_dag_builter.py new file mode 100644 index 000000000..b059f6e4b --- /dev/null +++ b/tests/dag_builder/test_dag_builter.py @@ -0,0 +1,208 @@ +from hathor.transaction.token_creation_tx import TokenCreationTransaction +from tests import unittest + + +class DAGCreatorTestCase(unittest.TestCase): + _enable_sync_v1 = False + _enable_sync_v2 = True + + def setUp(self): + super().setUp() + + from hathor.simulator.patches import SimulatorCpuMiningService + from hathor.simulator.simulator import _build_vertex_verifiers + + cpu_mining_service = SimulatorCpuMiningService() + + builder = self.get_builder() \ + .set_vertex_verifiers_builder(_build_vertex_verifiers) \ + .set_cpu_mining_service(cpu_mining_service) + + self.manager = self.create_peer_from_builder(builder) + self.dag_builder = self.get_dag_builder(self.manager) + + def test_one_tx(self) -> None: + artifacts = self.dag_builder.build_from_str(""" + blockchain genesis b[1..50] + b1.out[0] <<< tx1 + b30 < tx1 # reward lock + b40 --> tx1 + """) + + for node, vertex in artifacts.list: + self.manager.on_new_tx(vertex, fails_silently=False) + + v_order = [node.name for node, _ in artifacts.list] + + tx1 = artifacts.by_name['tx1'].vertex + b1 = artifacts.by_name['b1'].vertex + b40 = artifacts.by_name['b40'].vertex + + # blockchain genesis b[1..50] + self.assertEqual(b1.parents[0], self._settings.GENESIS_BLOCK_HASH) + for i in range(2, 51): + prev = artifacts.by_name[f'b{i - 1}'].vertex + cur = artifacts.by_name[f'b{i}'].vertex + self.assertEqual(cur.parents[0], prev.hash) + + # b30 < tx1 + self.assertGreater(v_order.index('tx1'), v_order.index('b30')) + + # b1.out[0] <<< tx1 + self.assertEqual(tx1.inputs[0].tx_id, b1.hash) + + # b40 --> tx1 + self.assertEqual(tx1.get_metadata().first_block, b40.hash) + + def test_weight(self) -> None: + artifacts = self.dag_builder.build_from_str(""" + blockchain genesis b[1..50] + blockchain b37 c[1..1] + b30 < dummy + b50 < c1 + + tx1.out[0] = 1 TKA + + TKA.weight = 31.8 + tx1.weight = 25.2 + c1.weight = 80.6 + """) + + for node, vertex in artifacts.list: + self.manager.on_new_tx(vertex, fails_silently=False) + + tx1 = artifacts.by_name['tx1'].vertex + tka = artifacts.by_name['TKA'].vertex + c1 = artifacts.by_name['c1'].vertex + b38 = artifacts.by_name['b38'].vertex + + self.assertAlmostEqual(tka.weight, 31.8) + self.assertAlmostEqual(tx1.weight, 25.2) + self.assertAlmostEqual(c1.weight, 80.6) + self.assertIsNotNone(b38.get_metadata().voided_by, b38) + + def test_spend_unspecified_utxo(self) -> None: + artifacts = self.dag_builder.build_from_str(""" + blockchain genesis b[1..50] + b30 < dummy + tx1.out[0] <<< tx2 + """) + + for node, vertex in artifacts.list: + self.manager.on_new_tx(vertex, fails_silently=False) + + tx1 = artifacts.by_name['tx1'].vertex + self.assertEqual(len(tx1.outputs), 1) + # the default filler fills unspecified utxos with 1 HTR + self.assertEqual(tx1.outputs[0].value, 1) + self.assertEqual(tx1.outputs[0].token_data, 0) + + def test_block_parents(self) -> None: + artifacts = self.dag_builder.build_from_str(""" + blockchain genesis b[1..50] + b30 < dummy + + b32 --> tx1 + + b34 --> tx2 + + b36 --> tx3 + b36 --> tx4 + """) + + for node, vertex in artifacts.list: + self.manager.on_new_tx(vertex, fails_silently=False) + + b0 = artifacts.by_name['b30'].vertex + b1 = artifacts.by_name['b31'].vertex + b2 = artifacts.by_name['b32'].vertex + b3 = artifacts.by_name['b33'].vertex + b4 = artifacts.by_name['b34'].vertex + b5 = artifacts.by_name['b35'].vertex + b6 = artifacts.by_name['b36'].vertex + b7 = artifacts.by_name['b37'].vertex + + tx1 = artifacts.by_name['tx1'].vertex + tx2 = artifacts.by_name['tx2'].vertex + tx3 = artifacts.by_name['tx3'].vertex + tx4 = artifacts.by_name['tx4'].vertex + + self.assertEqual(b2.parents[0], b1.hash) + self.assertEqual(b3.parents[0], b2.hash) + self.assertEqual(b4.parents[0], b3.hash) + self.assertEqual(b5.parents[0], b4.hash) + self.assertEqual(b6.parents[0], b5.hash) + + self.assertEqual(set(b1.parents[1:]), set(b0.parents[1:])) + self.assertEqual(set(b3.parents[1:]), set(b2.parents[1:])) + self.assertEqual(set(b5.parents[1:]), set(b4.parents[1:])) + self.assertEqual(set(b7.parents[1:]), set(b6.parents[1:])) + + self.assertTrue(set(b2.parents[1:]).issubset([tx1.hash] + b1.parents[1:])) + self.assertTrue(set(b4.parents[1:]).issubset([tx2.hash] + b3.parents[1:])) + self.assertEqual(set(b6.parents[1:]), {tx3.hash, tx4.hash}) + + def test_custom_token(self) -> None: + artifacts = self.dag_builder.build_from_str(""" + blockchain genesis b[1..50] + b1.out[0] <<< tx1 + tx1.out[1] = 100 TKA + b30 < tx1 # reward lock + b30 < dummy # reward lock + b40 --> tx1 + """) + + for node, vertex in artifacts.list: + self.manager.on_new_tx(vertex, fails_silently=False) + + tka = artifacts.by_name['TKA'].vertex + tx1 = artifacts.by_name['tx1'].vertex + + # TKA token creation transaction + self.assertIsInstance(tka, TokenCreationTransaction) + self.assertEqual(tka.token_name, 'TKA') + self.assertEqual(tka.token_symbol, 'TKA') + + # tx1.out[1] = 100 TKA + self.assertEqual(tx1.outputs[1].value, 100) + self.assertEqual(tx1.get_token_uid(tx1.outputs[1].token_data), tka.hash) + + def test_big_dag(self) -> None: + artifacts = self.dag_builder.build_from_str(""" + blockchain genesis a[0..30] + blockchain a30 b[0..20] + blockchain b4 c[0..10] + + a30 < dummy + + b11 --> tx1 + b11 --> tx2 + + b14 --> tx1 + b14 --> tx3 + + c3 --> tx1 + c3 --> tx2 + + tx1 <-- tx2 <-- tx3 + + tx3 --> tx5 --> tx6 + + tx1.out[0] <<< tx2 tx3 + tx1.out[0] <<< tx4 + + a0.out[0] <<< tx1 + + tx1.out[0] = 100 HTR [wallet1] + tx1.out[1] = 50 TK1 [wallet2] + tx2.out[0] = 75 USDC [wallet1] + + USDC.out[0] = 100000 HTR + + b5 < c0 < c10 < b20 + b6 < tx3 + b16 < tx4 + """) + + for node, vertex in artifacts.list: + self.manager.on_new_tx(vertex, fails_silently=False) diff --git a/tests/tx/test_genesis.py b/tests/tx/test_genesis.py index f2839bf6b..dbb96b8f7 100644 --- a/tests/tx/test_genesis.py +++ b/tests/tx/test_genesis.py @@ -20,7 +20,7 @@ def get_genesis_output(): elif settings.NETWORK_NAME.startswith('testnet'): address = 'WdmDUMp8KvzhWB7KLgguA2wBiKsh4Ha8eX' elif settings.NETWORK_NAME == 'unittests': - address = 'HVayMofEDh4XGsaQJeRJKhutYxYodYNop6' + address = 'HRXVDmLVdq8pgok1BCUKpiFWdAVAy4a5AJ' else: raise ValueError('Network unknown.') diff --git a/tests/unittest.py b/tests/unittest.py index afb11c1b0..94cef1c34 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -14,6 +14,7 @@ from hathor.conf.get_settings import get_global_settings from hathor.conf.settings import HathorSettings from hathor.daa import DifficultyAdjustmentAlgorithm, TestMode +from hathor.dag_builder import DAGBuilder from hathor.event import EventManager from hathor.event.storage import EventStorage from hathor.manager import HathorManager @@ -30,6 +31,7 @@ from hathor.util import Random, not_none from hathor.wallet import BaseWallet, HDWallet, Wallet from tests.test_memory_reactor_clock import TestMemoryReactorClock +from tests.utils import GENESIS_SEED logger = get_logger() main = ut_main @@ -170,6 +172,18 @@ def _create_test_wallet(self, unlocked: bool = False) -> Wallet: wallet.lock() return wallet + def get_dag_builder(self, manager: HathorManager) -> DAGBuilder: + genesis_wallet = HDWallet(words=GENESIS_SEED) + genesis_wallet._manually_initialize() + + return DAGBuilder( + settings=manager._settings, + daa=manager.daa, + genesis_wallet=genesis_wallet, + wallet_factory=self.get_wallet, + vertex_resolver=lambda x: manager.cpu_mining_service.resolve(x), + ) + def get_builder(self) -> TestBuilder: builder = TestBuilder() builder.set_rng(self.rng) \ diff --git a/tests/utils.py b/tests/utils.py index a2566a3ca..67d98587b 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -8,12 +8,13 @@ from typing import Any, Optional import requests +from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.asymmetric import ec from hathorlib.scripts import DataScript from twisted.internet.task import Clock from hathor.conf import HathorSettings -from hathor.crypto.util import decode_address, get_address_b58_from_public_key, get_private_key_from_bytes +from hathor.crypto.util import decode_address, get_address_b58_from_public_key from hathor.event.model.base_event import BaseEvent from hathor.event.model.event_data import TxData, TxMetadata from hathor.event.model.event_type import EventType @@ -349,14 +350,20 @@ def execute_tx_gen( execute(args) -def get_genesis_key() -> ec.EllipticCurvePrivateKeyWithSerialization: - private_key_bytes = base64.b64decode( - 'MIGEAgEAMBAGByqGSM49AgEGBSuBBAAKBG0wawIBAQQgOCgCddzDZsfKgiMJLOt97eov9RLwHeePyBIK2WPF8MChRA' - 'NCAAQ/XSOK+qniIY0F3X+lDrb55VQx5jWeBLhhzZnH6IzGVTtlAj9Ki73DVBm5+VXK400Idd6ddzS7FahBYYC7IaTl' +def get_genesis_key() -> ec.EllipticCurvePrivateKey: + from hathor.wallet import HDWallet + wallet = HDWallet(words=GENESIS_SEED) + wallet._manually_initialize() + key = wallet.get_key_at_index(0) + return ec.derive_private_key( + int.from_bytes(key.secret_exponent().to_bytes(32, 'big'), 'big'), + ec.SECP256K1(), + backend=default_backend() ) - return get_private_key_from_bytes(private_key_bytes) +GENESIS_SEED = ('coral light army gather adapt blossom school alcohol coral light army gather ' + 'adapt blossom school alcohol coral light army gather adapt blossom school awesome') GENESIS_PRIVATE_KEY = get_genesis_key() GENESIS_PUBLIC_KEY = GENESIS_PRIVATE_KEY.public_key() GENESIS_ADDRESS_B58 = get_address_b58_from_public_key(GENESIS_PUBLIC_KEY) From 3a3819495eb72cee9b4788d060addb65ca48fc4c Mon Sep 17 00:00:00 2001 From: Marcelo Salhab Brogliato Date: Thu, 14 Nov 2024 13:07:14 -0600 Subject: [PATCH 49/61] test(events): Improve comparison between response and expected response --- .../event/test_event_simulation_scenarios.py | 51 ++++++++++--------- 1 file changed, 27 insertions(+), 24 deletions(-) diff --git a/tests/event/test_event_simulation_scenarios.py b/tests/event/test_event_simulation_scenarios.py index 2b7e413b1..0b8e0ed3b 100644 --- a/tests/event/test_event_simulation_scenarios.py +++ b/tests/event/test_event_simulation_scenarios.py @@ -43,6 +43,25 @@ class BaseEventSimulationScenariosTest(BaseEventSimulationTester): seed_config = 6946502462188444706 + def assert_response_equal(self, responses: list[EventResponse], expected: list[EventResponse]) -> None: + """Compare responses and expected responses. + """ + self.assertEqual(len(responses), len(expected)) + + for a, b in zip(responses, expected): + self.assertEqual(type(a), type(b)) + self.assertEqual(a.__fields__, b.__fields__) + self.assertEqual(a.event.__fields__, b.event.__fields__) + self.assertEqual(a.event.data.__fields__, b.event.data.__fields__) + + for field in ['type', 'peer_id', 'network', 'latest_event_id', 'stream_id']: + self.assertEqual(getattr(a, field), getattr(b, field)) + + for field in ['id', 'type', 'group_id']: + self.assertEqual(getattr(a.event, field), getattr(b.event, field)) + + self.assertEqual(type(a.event.data), type(b.event.data)) + def test_only_load(self) -> None: stream_id = self.manager._event_manager._stream_id assert stream_id is not None @@ -62,9 +81,7 @@ def test_only_load(self) -> None: EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=4, timestamp=1578878880.0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=4, stream_id=stream_id) # noqa: E501 ] - responses = _remove_timestamp(responses) - expected = _remove_timestamp(expected) - assert responses == expected, f'expected: {expected}\n\nactual: {responses}' + self.assert_response_equal(responses, expected) def test_single_chain_one_block(self) -> None: stream_id = self.manager._event_manager._stream_id @@ -91,9 +108,7 @@ def test_single_chain_one_block(self) -> None: EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id) # noqa: E501 ] - responses = _remove_timestamp(responses) - expected = _remove_timestamp(expected) - assert responses == expected, f'expected: {expected}\n\nactual: {responses}' + self.assert_response_equal(responses, expected) def test_single_chain_blocks_and_transactions(self) -> None: stream_id = self.manager._event_manager._stream_id @@ -157,9 +172,7 @@ def test_single_chain_blocks_and_transactions(self) -> None: EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=38, timestamp=1578879091.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUTisHvpM4sDeINzxF5auK/8bP6UaIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HDeSe6qKqjSLwtnjLBV84NddtZQyNb9HUU', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', 'd2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', '5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=19.576585834390443, accumulated_weight_raw="256", score_raw="781879", first_block=None, height=12, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id) # noqa: E501 ] - responses = _remove_timestamp(responses) - expected = _remove_timestamp(expected) - assert responses == expected, f'expected: {expected}\n\nactual: {responses}' + self.assert_response_equal(responses, expected) def test_reorg(self) -> None: stream_id = self.manager._event_manager._stream_id @@ -206,9 +219,7 @@ def test_reorg(self) -> None: EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=20, timestamp=1578879064.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1', nonce=0, timestamp=1578879001, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUgQrqLefPfPVpkXlfvvAp943epyOIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HJHSdTickduA1MF9PTbzBQi6Z7stNAzwAu', timelock=None))], parents=['1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.321928094887363, accumulated_weight_raw="4", score_raw="20", first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id) # noqa: E501 ] - responses = _remove_timestamp(responses) - expected = _remove_timestamp(expected) - assert responses == expected, f'expected: {expected}\n\nactual: {responses}' + self.assert_response_equal(responses, expected) def test_unvoided_transaction(self) -> None: stream_id = self.manager._event_manager._stream_id @@ -274,9 +285,7 @@ def test_unvoided_transaction(self) -> None: EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=39, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H8XUjiUx24WLXUN63da34hX6bEs29GJjSs', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=19.000858282039708, accumulated_weight_raw="256", score_raw="524600", first_block=None, height=12, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 ] - responses = _remove_timestamp(responses) - expected = _remove_timestamp(expected) - assert responses == expected, f'expected: {expected}\n\nactual: {responses}' + self.assert_response_equal(responses, expected) def test_invalid_mempool(self) -> None: stream_id = self.manager._event_manager._stream_id @@ -347,9 +356,7 @@ def test_invalid_mempool(self) -> None: EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=41, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='2e3122412eb129c7f0d03e37d8a5637da9354df980a2259332b2b14e7a340d94', nonce=0, timestamp=1578879030, signal_bits=0, version=0, weight=10.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='', decoded=None)], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='2e3122412eb129c7f0d03e37d8a5637da9354df980a2259332b2b14e7a340d94', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=10.0, score=10.066089190457772, accumulated_weight_raw="1024", score_raw="1072", first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id) # noqa: E501 ] - responses = _remove_timestamp(responses) - expected = _remove_timestamp(expected) - assert responses == expected, f'expected: {expected}\n\nactual: {responses}' + self.assert_response_equal(responses, expected) def test_empty_script(self) -> None: stream_id = self.manager._event_manager._stream_id @@ -412,9 +419,7 @@ def test_empty_script(self) -> None: # One NEW_VERTEX_ACCEPTED for a new block EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=38, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmkey79Rbhjq4BtHYCm2mT8hDprWIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLatLcoaATFMqECb5fD5rdW2nF9WGyw9os', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', 'ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=18.691576556156242, accumulated_weight_raw="256", score_raw="423375", first_block=None, height=12, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id)] # noqa: E501 - responses = _remove_timestamp(responses) - expected = _remove_timestamp(expected) - assert responses == expected, f'expected: {expected}\n\nactual: {responses}' + self.assert_response_equal(responses, expected) def test_custom_script(self) -> None: stream_id = self.manager._event_manager._stream_id @@ -478,9 +483,7 @@ def test_custom_script(self) -> None: EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=38, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUTisHvpM4sDeINzxF5auK/8bP6UaIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HDeSe6qKqjSLwtnjLBV84NddtZQyNb9HUU', timelock=None))], parents=['8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218', '3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', 'cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=18.79789262729119, accumulated_weight_raw="256", score_raw="455753", first_block=None, height=12, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id) # noqa: E501 ] - responses = _remove_timestamp(responses) - expected = _remove_timestamp(expected) - assert responses == expected, f'expected: {expected}\n\nactual: {responses}' + self.assert_response_equal(responses, expected) def _start_stream(self) -> None: start_stream = StartStreamRequest(type='START_STREAM', window_size=1_000_000, last_ack_event_id=None) From 9822bdb46b94b1a6abcd1e07779053aa29a527cb Mon Sep 17 00:00:00 2001 From: Marcelo Salhab Brogliato Date: Mon, 6 Nov 2023 21:07:33 -0600 Subject: [PATCH 50/61] feat(consensus): Use DAG of funds to set first_block Co-authored-by: Jan Segre --- hathor/consensus/block_consensus.py | 9 ++- .../include_funds_for_first_block.py | 37 ++++++++++ .../storage/transaction_storage.py | 10 ++- tests/consensus/test_first_block.py | 71 +++++++++++++++++++ 4 files changed, 123 insertions(+), 4 deletions(-) create mode 100644 hathor/transaction/storage/migrations/include_funds_for_first_block.py create mode 100644 tests/consensus/test_first_block.py diff --git a/hathor/consensus/block_consensus.py b/hathor/consensus/block_consensus.py index 419a66268..d77dee210 100644 --- a/hathor/consensus/block_consensus.py +++ b/hathor/consensus/block_consensus.py @@ -432,7 +432,7 @@ def remove_first_block_markers(self, block: Block) -> None: storage = block.storage from hathor.transaction.storage.traversal import BFSTimestampWalk - bfs = BFSTimestampWalk(storage, is_dag_verifications=True, is_left_to_right=False) + bfs = BFSTimestampWalk(storage, is_dag_verifications=True, is_dag_funds=True, is_left_to_right=False) for tx in bfs.run(block, skip_root=True): if tx.is_block: bfs.skip_neighbors(tx) @@ -469,9 +469,12 @@ def _score_block_dfs(self, block: BaseTransaction, used: set[bytes], else: from hathor.transaction.storage.traversal import BFSTimestampWalk - bfs = BFSTimestampWalk(storage, is_dag_verifications=True, is_left_to_right=False) + bfs = BFSTimestampWalk(storage, is_dag_verifications=True, is_dag_funds=True, is_left_to_right=False) for tx in bfs.run(parent, skip_root=False): - assert not tx.is_block + assert tx.hash is not None + if tx.is_block: + bfs.skip_neighbors(tx) + continue if tx.hash in used: bfs.skip_neighbors(tx) diff --git a/hathor/transaction/storage/migrations/include_funds_for_first_block.py b/hathor/transaction/storage/migrations/include_funds_for_first_block.py new file mode 100644 index 000000000..0dddb4c8a --- /dev/null +++ b/hathor/transaction/storage/migrations/include_funds_for_first_block.py @@ -0,0 +1,37 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import TYPE_CHECKING + +from structlog import get_logger + +from hathor.transaction.storage.migrations import BaseMigration + +if TYPE_CHECKING: + from hathor.transaction.storage import TransactionStorage + +logger = get_logger() + + +class Migration(BaseMigration): + def skip_empty_db(self) -> bool: + return True + + def get_db_name(self) -> str: + return 'include_funds_for_first_block' + + def run(self, storage: 'TransactionStorage') -> None: + raise Exception('Cannot migrate your database due to an incompatible change in the metadata. ' + 'Please, delete your data folder and use the latest available snapshot or sync ' + 'from beginning.') diff --git a/hathor/transaction/storage/transaction_storage.py b/hathor/transaction/storage/transaction_storage.py index 26e226a7d..ab06157f5 100644 --- a/hathor/transaction/storage/transaction_storage.py +++ b/hathor/transaction/storage/transaction_storage.py @@ -37,7 +37,13 @@ TransactionIsNotABlock, TransactionNotInAllowedScopeError, ) -from hathor.transaction.storage.migrations import BaseMigration, MigrationState, change_score_acc_weight_metadata +from hathor.transaction.storage.migrations import ( + BaseMigration, + MigrationState, + add_closest_ancestor_block, + change_score_acc_weight_metadata, + include_funds_for_first_block, +) from hathor.transaction.storage.tx_allow_scope import TxAllowScope, tx_allow_context from hathor.transaction.transaction import Transaction from hathor.transaction.transaction_metadata import TransactionMetadata @@ -88,6 +94,8 @@ class TransactionStorage(ABC): # history of migrations that have to be applied in the order defined here _migration_factories: list[type[BaseMigration]] = [ change_score_acc_weight_metadata.Migration, + add_closest_ancestor_block.Migration, + include_funds_for_first_block.Migration, ] _migrations: list[BaseMigration] diff --git a/tests/consensus/test_first_block.py b/tests/consensus/test_first_block.py new file mode 100644 index 000000000..3544b2e7a --- /dev/null +++ b/tests/consensus/test_first_block.py @@ -0,0 +1,71 @@ +from tests import unittest + + +class FirstBlockTestCase(unittest.TestCase): + _enable_sync_v1 = True + _enable_sync_v2 = True + + def setUp(self) -> None: + super().setUp() + + from hathor.simulator.patches import SimulatorCpuMiningService + from hathor.simulator.simulator import _build_vertex_verifiers + + cpu_mining_service = SimulatorCpuMiningService() + + builder = self.get_builder() \ + .set_vertex_verifiers_builder(_build_vertex_verifiers) \ + .set_cpu_mining_service(cpu_mining_service) + + self.manager = self.create_peer_from_builder(builder) + self.dag_builder = self.get_dag_builder(self.manager) + + def test_first_block(self) -> None: + artifacts = self.dag_builder.build_from_str(""" + blockchain genesis b[1..50] + + b30 < dummy + + tx10.out[0] <<< tx50 + tx20.out[0] <<< tx50 + tx30 <-- tx50 + tx40 <-- tx50 + + tx41.out[0] <<< tx40 + tx42 <-- tx40 + tx43 <-- tx40 + + b31 --> tx10 + + b32 --> tx30 + b32 --> tx43 + + b33 --> tx50 + """) + + for node, vertex in artifacts.list: + self.manager.on_new_tx(vertex, fails_silently=False) + + b31 = artifacts.by_name['b31'].vertex + b32 = artifacts.by_name['b32'].vertex + b33 = artifacts.by_name['b33'].vertex + + tx10 = artifacts.by_name['tx10'].vertex + tx20 = artifacts.by_name['tx20'].vertex + tx30 = artifacts.by_name['tx30'].vertex + tx40 = artifacts.by_name['tx40'].vertex + tx41 = artifacts.by_name['tx41'].vertex + tx42 = artifacts.by_name['tx42'].vertex + tx43 = artifacts.by_name['tx43'].vertex + tx50 = artifacts.by_name['tx50'].vertex + + self.assertEqual(tx10.get_metadata().first_block, b31.hash) + + self.assertEqual(tx30.get_metadata().first_block, b32.hash) + self.assertEqual(tx43.get_metadata().first_block, b32.hash) + + self.assertEqual(tx50.get_metadata().first_block, b33.hash) + self.assertEqual(tx20.get_metadata().first_block, b33.hash) + self.assertEqual(tx40.get_metadata().first_block, b33.hash) + self.assertEqual(tx41.get_metadata().first_block, b33.hash) + self.assertEqual(tx42.get_metadata().first_block, b33.hash) From dc3c7cab1dc01f887d2530a02da3b3e149c96cda Mon Sep 17 00:00:00 2001 From: Luis Helder Date: Mon, 18 Nov 2024 18:38:14 -0300 Subject: [PATCH 51/61] ci: use macos-13 on Github Actions as macos-12 will be dropped (#1185) --- .github/workflows/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index fd0c39948..12c57b0fa 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -25,7 +25,7 @@ jobs: full_matrix = { 'python': ['3.10', '3.11', '3.12'], # available OS's: https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idruns-on - 'os': ['ubuntu-22.04', 'macos-12'], + 'os': ['ubuntu-22.04', 'macos-13'], } # this is the fastest one: reduced_matrix = { From d91513e69d22c028fd291bb4007a39d235cef54b Mon Sep 17 00:00:00 2001 From: Marcelo Salhab Brogliato Date: Wed, 13 Nov 2024 15:00:46 -0600 Subject: [PATCH 52/61] feat(p2p): Change sync-v2 main loop interval to one second --- hathor/p2p/sync_v2/agent.py | 4 +- tests/poa/test_poa_simulation.py | 77 +++++++++++++++++++++----------- 2 files changed, 53 insertions(+), 28 deletions(-) diff --git a/hathor/p2p/sync_v2/agent.py b/hathor/p2p/sync_v2/agent.py index 5393080b4..d96fd91e4 100644 --- a/hathor/p2p/sync_v2/agent.py +++ b/hathor/p2p/sync_v2/agent.py @@ -55,6 +55,8 @@ MAX_GET_TRANSACTIONS_BFS_LEN: int = 8 MAX_MEMPOOL_STATUS_TIPS: int = 20 +RUN_SYNC_MAIN_LOOP_INTERVAL = 1 # second(s) + class _HeightInfo(NamedTuple): height: int @@ -232,7 +234,7 @@ def start(self) -> None: if self._started: raise Exception('NodeSyncBlock is already running') self._started = True - self._lc_run.start(5) + self._lc_run.start(RUN_SYNC_MAIN_LOOP_INTERVAL) def stop(self) -> None: if not self._started: diff --git a/tests/poa/test_poa_simulation.py b/tests/poa/test_poa_simulation.py index 9941dabf0..b0b787f6e 100644 --- a/tests/poa/test_poa_simulation.py +++ b/tests/poa/test_poa_simulation.py @@ -26,6 +26,7 @@ from hathor.consensus import poa from hathor.consensus.consensus_settings import PoaSettings, PoaSignerSettings from hathor.consensus.poa import PoaSigner +from hathor.consensus.poa.poa_signer import PoaSignerId from hathor.crypto.util import get_address_b58_from_public_key_bytes, get_public_key_bytes_compressed from hathor.manager import HathorManager from hathor.simulator import FakeConnection @@ -61,7 +62,7 @@ def _assert_block_in_turn(block: PoaBlock, signer: PoaSigner) -> None: def _assert_height_weight_signer_id( vertices: Iterator[BaseTransaction], - expected: list[tuple[int, float, bytes]] + expected: list[tuple[int, float, PoaSignerId]] ) -> None: non_voided_blocks: list[tuple[int, float, bytes]] = [] @@ -248,58 +249,80 @@ def test_producer_leave_and_comeback(self) -> None: signer_id1, signer_id2 = signer1._signer_id, signer2._signer_id self.simulator.settings = get_settings(signer1, signer2, time_between_blocks=10) + expected = [ + # Before manager2 joins, only manager1 produces blocks + (1, poa.BLOCK_WEIGHT_OUT_OF_TURN, signer_id1), + (2, poa.BLOCK_WEIGHT_IN_TURN, signer_id1), + (3, poa.BLOCK_WEIGHT_OUT_OF_TURN, signer_id1), + # When manager2 joins, both of them start taking turns + # But manager2 must sync first. + (4, poa.BLOCK_WEIGHT_IN_TURN, signer_id1), + # Here manager2 has already synced. + (5, poa.BLOCK_WEIGHT_IN_TURN, signer_id2), + (6, poa.BLOCK_WEIGHT_IN_TURN, signer_id1), + (7, poa.BLOCK_WEIGHT_IN_TURN, signer_id2), + (8, poa.BLOCK_WEIGHT_IN_TURN, signer_id1), + (9, poa.BLOCK_WEIGHT_IN_TURN, signer_id2), + (10, poa.BLOCK_WEIGHT_IN_TURN, signer_id1), + (11, poa.BLOCK_WEIGHT_IN_TURN, signer_id2), + (12, poa.BLOCK_WEIGHT_IN_TURN, signer_id1), + # manager2 leaves so manager1 produces all the next blocks + (13, poa.BLOCK_WEIGHT_OUT_OF_TURN, signer_id1), + (14, poa.BLOCK_WEIGHT_IN_TURN, signer_id1), + (15, poa.BLOCK_WEIGHT_OUT_OF_TURN, signer_id1), + # manager2 comes back again, so both of them take turns again + (16, poa.BLOCK_WEIGHT_IN_TURN, signer_id1), + (17, poa.BLOCK_WEIGHT_IN_TURN, signer_id2), + (18, poa.BLOCK_WEIGHT_IN_TURN, signer_id1), + ] + # here we create a situation with an intermittent producer, testing that the other producer produces blocks # out of turn manager1 = self._get_manager(signer1) manager1.allow_mining_without_peers() self.simulator.run(50) + assert manager1.tx_storage.get_block_count() == 4 + _assert_height_weight_signer_id( + manager1.tx_storage.get_all_transactions(), + expected[:3], + ) + manager2 = self._get_manager(signer2) connection = FakeConnection(manager1, manager2) self.simulator.add_connection(connection) self.simulator.run(80) + assert manager1.tx_storage.get_block_count() == 14 + _assert_height_weight_signer_id( + manager1.tx_storage.get_all_transactions(), + expected[:12], + ) + manager2.stop() connection.disconnect(Failure(Exception('testing'))) self.simulator.remove_connection(connection) self.simulator.run(70) + assert manager1.tx_storage.get_block_count() == 17 + _assert_height_weight_signer_id( + manager1.tx_storage.get_all_transactions(), + expected[:15], + ) + assert not manager2.can_start_mining() self.simulator.add_connection(connection) connection.reconnect() manager2.start() self.simulator.run(30) - assert manager1.tx_storage.get_block_count() == 19 - assert manager2.tx_storage.get_block_count() == 19 + assert manager1.tx_storage.get_block_count() == 20 + assert manager2.tx_storage.get_block_count() == 20 assert manager1.tx_storage.get_best_block_tips() == manager2.tx_storage.get_best_block_tips() _assert_height_weight_signer_id( manager1.tx_storage.get_all_transactions(), - [ - # Before manager2 joins, only manager1 produces blocks - (1, poa.BLOCK_WEIGHT_OUT_OF_TURN, signer_id1), - (2, poa.BLOCK_WEIGHT_IN_TURN, signer_id1), - (3, poa.BLOCK_WEIGHT_OUT_OF_TURN, signer_id1), - (4, poa.BLOCK_WEIGHT_IN_TURN, signer_id1), - (5, poa.BLOCK_WEIGHT_OUT_OF_TURN, signer_id1), - (6, poa.BLOCK_WEIGHT_IN_TURN, signer_id1), - # When manager2 joins, both of them start taking turns - (7, poa.BLOCK_WEIGHT_IN_TURN, signer_id2), - (8, poa.BLOCK_WEIGHT_IN_TURN, signer_id1), - (9, poa.BLOCK_WEIGHT_IN_TURN, signer_id2), - (10, poa.BLOCK_WEIGHT_IN_TURN, signer_id1), - (11, poa.BLOCK_WEIGHT_IN_TURN, signer_id2), - (12, poa.BLOCK_WEIGHT_IN_TURN, signer_id1), - # manager2 leaves so manager1 produces all the next blocks - (13, poa.BLOCK_WEIGHT_OUT_OF_TURN, signer_id1), - (14, poa.BLOCK_WEIGHT_IN_TURN, signer_id1), - (15, poa.BLOCK_WEIGHT_OUT_OF_TURN, signer_id1), - # manager2 comes back again, so both of them take turns again - (16, poa.BLOCK_WEIGHT_IN_TURN, signer_id1), - (17, poa.BLOCK_WEIGHT_IN_TURN, signer_id2), - (18, poa.BLOCK_WEIGHT_IN_TURN, signer_id1), - ] + expected, ) @pytest.mark.skipif(not HAS_ROCKSDB, reason='requires python-rocksdb') From 4da0fb9186793ee1116eab39915441909fcf1c28 Mon Sep 17 00:00:00 2001 From: Luis Helder Date: Thu, 21 Nov 2024 16:21:21 -0300 Subject: [PATCH 53/61] feat(p2p): support IPv6 (#1144) --- hathor/builder/builder.py | 15 ++++++ hathor/builder/cli_builder.py | 2 + hathor/cli/nginx_config.py | 3 +- hathor/cli/run_node.py | 29 +++++++++- hathor/cli/run_node_args.py | 4 ++ hathor/conf/settings.py | 1 + hathor/manager.py | 3 +- hathor/p2p/manager.py | 32 ++++++++++- hathor/p2p/peer.py | 21 +++++++- hathor/p2p/peer_endpoint.py | 61 ++++++++++++++++++--- hathor/p2p/states/peer_id.py | 14 ++++- hathor/p2p/states/ready.py | 15 +++++- hathor/simulator/fake_connection.py | 8 +-- tests/cli/test_run_node.py | 29 ++++++++++ tests/p2p/test_bootstrap.py | 26 ++++++++- tests/p2p/test_connections.py | 66 +++++++++++++++++++++++ tests/p2p/test_entrypoint.py | 42 +++++++++++++++ tests/p2p/test_peer_id.py | 8 +++ tests/p2p/test_protocol.py | 84 ++++++++++++++++++++++++++++- tests/unittest.py | 13 ++++- 20 files changed, 451 insertions(+), 25 deletions(-) create mode 100644 tests/p2p/test_entrypoint.py diff --git a/hathor/builder/builder.py b/hathor/builder/builder.py index ea3afd8f7..b2d7e2890 100644 --- a/hathor/builder/builder.py +++ b/hathor/builder/builder.py @@ -202,6 +202,9 @@ def __init__(self) -> None: self._poa_signer: PoaSigner | None = None self._poa_block_producer: PoaBlockProducer | None = None + self._enable_ipv6: bool = False + self._disable_ipv4: bool = False + def build(self) -> BuildArtifacts: if self.artifacts is not None: raise ValueError('cannot call build twice') @@ -426,6 +429,8 @@ def _get_or_create_p2p_manager(self) -> ConnectionsManager: ssl=enable_ssl, whitelist_only=False, rng=self._rng, + enable_ipv6=self._enable_ipv6, + disable_ipv4=self._disable_ipv4, ) SyncSupportLevel.add_factories( self._get_or_create_settings(), @@ -812,6 +817,16 @@ def disable_full_verification(self) -> 'Builder': self._full_verification = False return self + def enable_ipv6(self) -> 'Builder': + self.check_if_can_modify() + self._enable_ipv6 = True + return self + + def disable_ipv4(self) -> 'Builder': + self.check_if_can_modify() + self._disable_ipv4 = True + return self + def set_soft_voided_tx_ids(self, soft_voided_tx_ids: set[bytes]) -> 'Builder': self.check_if_can_modify() self._soft_voided_tx_ids = soft_voided_tx_ids diff --git a/hathor/builder/cli_builder.py b/hathor/builder/cli_builder.py index 464d9b319..a5724de98 100644 --- a/hathor/builder/cli_builder.py +++ b/hathor/builder/cli_builder.py @@ -326,6 +326,8 @@ def create_manager(self, reactor: Reactor) -> HathorManager: ssl=True, whitelist_only=False, rng=Random(), + enable_ipv6=self._args.x_enable_ipv6, + disable_ipv4=self._args.x_disable_ipv4, ) vertex_handler = VertexHandler( diff --git a/hathor/cli/nginx_config.py b/hathor/cli/nginx_config.py index 5c6f2a874..9f8684f0a 100644 --- a/hathor/cli/nginx_config.py +++ b/hathor/cli/nginx_config.py @@ -240,11 +240,12 @@ def generate_nginx_config(openapi: dict[str, Any], *, out_file: TextIO, rate_k: server_open = f''' upstream backend {{ - server fullnode:8080; + server 127.0.0.1:8080; }} server {{ listen 80; + listen [::]:80; server_name localhost; # Look for client IP in the X-Forwarded-For header diff --git a/hathor/cli/run_node.py b/hathor/cli/run_node.py index 9498194ab..58cface89 100644 --- a/hathor/cli/run_node.py +++ b/hathor/cli/run_node.py @@ -93,7 +93,9 @@ def create_parser(cls) -> ArgumentParser: help='Address to listen for new connections (eg: tcp:8000)') parser.add_argument('--bootstrap', action='append', help='Address to connect to (eg: tcp:127.0.0.1:8000') parser.add_argument('--status', type=int, help='Port to run status server') + parser.add_argument('--x-status-ipv6-interface', help='IPv6 interface to bind the status server') parser.add_argument('--stratum', type=int, help='Port to run stratum server') + parser.add_argument('--x-stratum-ipv6-interface', help='IPv6 interface to bind the stratum server') parser.add_argument('--data', help='Data directory') storage = parser.add_mutually_exclusive_group() storage.add_argument('--rocksdb-storage', action='store_true', help='Use RocksDB storage backend (default)') @@ -162,6 +164,10 @@ def create_parser(cls) -> ArgumentParser: help='Log tx bytes for debugging') parser.add_argument('--disable-ws-history-streaming', action='store_true', help='Disable websocket history streaming API') + parser.add_argument('--x-enable-ipv6', action='store_true', + help='Enables listening on IPv6 interface and connecting to IPv6 peers') + parser.add_argument('--x-disable-ipv4', action='store_true', + help='Disables connecting to IPv4 peers') return parser def prepare(self, *, register_resources: bool = True) -> None: @@ -181,6 +187,7 @@ def prepare(self, *, register_resources: bool = True) -> None: print('Maximum number of open file descriptors is too low. Minimum required is 256.') sys.exit(-2) + self.validate_args() self.check_unsafe_arguments() self.check_python_version() @@ -202,7 +209,15 @@ def prepare(self, *, register_resources: bool = True) -> None: if self._args.stratum: assert self.manager.stratum_factory is not None - self.reactor.listenTCP(self._args.stratum, self.manager.stratum_factory) + + if self._args.x_enable_ipv6: + interface = self._args.x_stratum_ipv6_interface or '::0' + # Linux by default will map IPv4 to IPv6, so listening only in the IPv6 interface will be + # enough to handle IPv4 connections. There is a kernel parameter that controls this behavior: + # https://sysctl-explorer.net/net/ipv6/bindv6only/ + self.reactor.listenTCP(self._args.stratum, self.manager.stratum_factory, interface=interface) + else: + self.reactor.listenTCP(self._args.stratum, self.manager.stratum_factory) from hathor.conf.get_settings import get_global_settings settings = get_global_settings() @@ -217,7 +232,12 @@ def prepare(self, *, register_resources: bool = True) -> None: status_server = resources_builder.build() if self._args.status: assert status_server is not None - self.reactor.listenTCP(self._args.status, status_server) + + if self._args.x_enable_ipv6: + interface = self._args.x_status_ipv6_interface or '::0' + self.reactor.listenTCP(self._args.status, status_server, interface=interface) + else: + self.reactor.listenTCP(self._args.status, status_server) self.start_manager() @@ -351,6 +371,11 @@ def run_sysctl_from_signal(self) -> None: except SysctlRunnerException as e: self.log.warn('[USR2] Error', errmsg=str(e)) + def validate_args(self) -> None: + if self._args.x_disable_ipv4 and not self._args.x_enable_ipv6: + self.log.critical('You must enable IPv6 if you disable IPv4.') + sys.exit(-1) + def check_unsafe_arguments(self) -> None: unsafe_args_found = [] for arg_cmdline, arg_test_fn in self.UNSAFE_ARGUMENTS: diff --git a/hathor/cli/run_node_args.py b/hathor/cli/run_node_args.py index f493a7d33..dca87ed16 100644 --- a/hathor/cli/run_node_args.py +++ b/hathor/cli/run_node_args.py @@ -36,7 +36,9 @@ class RunNodeArgs(BaseModel, extra=Extra.allow): listen: list[str] bootstrap: Optional[list[str]] status: Optional[int] + x_status_ipv6_interface: Optional[str] stratum: Optional[int] + x_stratum_ipv6_interface: Optional[str] data: Optional[str] rocksdb_storage: bool memory_storage: bool @@ -83,3 +85,5 @@ class RunNodeArgs(BaseModel, extra=Extra.allow): nano_testnet: bool log_vertex_bytes: bool disable_ws_history_streaming: bool + x_enable_ipv6: bool + x_disable_ipv4: bool diff --git a/hathor/conf/settings.py b/hathor/conf/settings.py index db235f2b7..3edb664a7 100644 --- a/hathor/conf/settings.py +++ b/hathor/conf/settings.py @@ -364,6 +364,7 @@ def GENESIS_TX2_TIMESTAMP(self) -> int: CAPABILITY_WHITELIST: str = 'whitelist' CAPABILITY_SYNC_VERSION: str = 'sync-version' CAPABILITY_GET_BEST_BLOCKCHAIN: str = 'get-best-blockchain' + CAPABILITY_IPV6: str = 'ipv6' # peers announcing this capability will be relayed ipv6 entrypoints from other peers # Where to download whitelist from WHITELIST_URL: Optional[str] = None diff --git a/hathor/manager.py b/hathor/manager.py index cc86dd9dc..8751e5427 100644 --- a/hathor/manager.py +++ b/hathor/manager.py @@ -251,7 +251,8 @@ def get_default_capabilities(self) -> list[str]: return [ self._settings.CAPABILITY_WHITELIST, self._settings.CAPABILITY_SYNC_VERSION, - self._settings.CAPABILITY_GET_BEST_BLOCKCHAIN + self._settings.CAPABILITY_GET_BEST_BLOCKCHAIN, + self._settings.CAPABILITY_IPV6, ] def start(self) -> None: diff --git a/hathor/p2p/manager.py b/hathor/p2p/manager.py index d53c7be83..d7e7045c9 100644 --- a/hathor/p2p/manager.py +++ b/hathor/p2p/manager.py @@ -100,6 +100,8 @@ def __init__( ssl: bool, rng: Random, whitelist_only: bool, + enable_ipv6: bool, + disable_ipv4: bool, ) -> None: self.log = logger.new() self._settings = settings @@ -190,6 +192,12 @@ def __init__( # Parameter to explicitly enable whitelist-only mode, when False it will still check the whitelist for sync-v1 self.whitelist_only = whitelist_only + # Parameter to enable IPv6 connections + self.enable_ipv6 = enable_ipv6 + + # Parameter to disable IPv4 connections + self.disable_ipv4 = disable_ipv4 + # Timestamp when the last discovery ran self._last_discovery: float = 0. @@ -577,7 +585,11 @@ def _update_whitelist_cb(self, body: bytes) -> None: def connect_to_if_not_connected(self, peer: UnverifiedPeer | PublicPeer, now: int) -> None: """ Attempts to connect if it is not connected to the peer. """ - if not peer.info.entrypoints: + if not peer.info.entrypoints or ( + not self.enable_ipv6 and not peer.info.get_ipv4_only_entrypoints() + ) or ( + self.disable_ipv4 and not peer.info.get_ipv6_only_entrypoints() + ): # It makes no sense to keep storing peers that have disconnected and have no entrypoints # We will never be able to connect to them anymore and they will only keep spending memory # and other resources when used in APIs, so we are removing them here @@ -589,7 +601,15 @@ def connect_to_if_not_connected(self, peer: UnverifiedPeer | PublicPeer, now: in assert peer.id is not None if peer.info.can_retry(now): - addr = self.rng.choice(peer.info.entrypoints) + if self.enable_ipv6 and not self.disable_ipv4: + addr = self.rng.choice(peer.info.entrypoints) + elif self.enable_ipv6 and self.disable_ipv4: + addr = self.rng.choice(peer.info.get_ipv6_only_entrypoints()) + elif not self.enable_ipv6 and not self.disable_ipv4: + addr = self.rng.choice(peer.info.get_ipv4_only_entrypoints()) + else: + raise ValueError('IPv4 is disabled and IPv6 is not enabled') + self.connect_to(addr.with_id(peer.id), peer) def _connect_to_callback( @@ -636,6 +656,14 @@ def connect_to( self.log.debug('skip because of simple localhost check', entrypoint=str(entrypoint)) return + if not self.enable_ipv6 and entrypoint.addr.is_ipv6(): + self.log.info('skip because IPv6 is disabled', entrypoint=entrypoint) + return + + if self.disable_ipv4 and entrypoint.addr.is_ipv4(): + self.log.info('skip because IPv4 is disabled', entrypoint=entrypoint) + return + if use_ssl is None: use_ssl = self.use_ssl diff --git a/hathor/p2p/peer.py b/hathor/p2p/peer.py index 53f43369d..8bc963b93 100644 --- a/hathor/p2p/peer.py +++ b/hathor/p2p/peer.py @@ -114,6 +114,18 @@ class PeerInfo: flags: set[str] = field(default_factory=set) _settings: HathorSettings = field(default_factory=get_global_settings, repr=False) + def get_ipv4_only_entrypoints(self) -> list[PeerAddress]: + return list(filter(lambda e: not e.is_ipv6(), self.entrypoints)) + + def get_ipv6_only_entrypoints(self) -> list[PeerAddress]: + return list(filter(lambda e: e.is_ipv6(), self.entrypoints)) + + def ipv4_entrypoints_as_str(self) -> list[str]: + return list(map(str, self.get_ipv4_only_entrypoints())) + + def ipv6_entrypoints_as_str(self) -> list[str]: + return list(map(str, self.get_ipv6_only_entrypoints())) + def entrypoints_as_str(self) -> list[str]: """Return a list of entrypoints serialized as str""" return list(map(str, self.entrypoints)) @@ -203,14 +215,19 @@ class UnverifiedPeer: id: PeerId info: PeerInfo = field(default_factory=PeerInfo) - def to_json(self) -> dict[str, Any]: + def to_json(self, only_ipv4_entrypoints: bool = True) -> dict[str, Any]: """ Return a JSON serialization of the object. This format is compatible with libp2p. """ + if only_ipv4_entrypoints: + entrypoints_as_str = self.info.ipv4_entrypoints_as_str() + else: + entrypoints_as_str = self.info.entrypoints_as_str() + return { 'id': str(self.id), - 'entrypoints': self.info.entrypoints_as_str(), + 'entrypoints': entrypoints_as_str, } @classmethod diff --git a/hathor/p2p/peer_endpoint.py b/hathor/p2p/peer_endpoint.py index c7cafce20..62e4624a2 100644 --- a/hathor/p2p/peer_endpoint.py +++ b/hathor/p2p/peer_endpoint.py @@ -14,13 +14,14 @@ from __future__ import annotations +import re from dataclasses import dataclass from enum import Enum from typing import Any from urllib.parse import parse_qs, urlparse from twisted.internet.address import IPv4Address, IPv6Address -from twisted.internet.endpoints import TCP4ClientEndpoint +from twisted.internet.endpoints import TCP4ClientEndpoint, TCP6ClientEndpoint from twisted.internet.interfaces import IAddress, IStreamClientEndpoint from typing_extensions import Self @@ -32,6 +33,37 @@ 'instead, compare the addr attribute explicitly, and if relevant, the peer_id too.' ) +""" +This Regex will match any valid IPv6 address. + +Some examples that will match: + '::' + '::1' + '2001:0db8:85a3:0000:0000:8a2e:0370:7334' + '2001:db8:85a3:0:0:8a2e:370:7334' + '2001:db8::8a2e:370:7334' + '2001:db8:0:0:0:0:2:1' + '1234::5678' + 'fe80::' + '::abcd:abcd:abcd:abcd:abcd:abcd' + '0:0:0:0:0:0:0:1' + '0:0:0:0:0:0:0:0' + +Some examples that won't match: + '127.0.0.1' --> # IPv4 + '1200::AB00:1234::2552:7777:1313' --> # double '::' + '2001:db8::g123' --> # invalid character + '2001:db8::85a3::7334' --> # double '::' + '2001:db8:85a3:0000:0000:8a2e:0370:7334:1234' --> # too many groups + '12345::abcd' --> # too many characters in a group + '2001:db8:85a3:8a2e:0370' --> # too few groups + '2001:db8:85a3::8a2e:3707334' --> # too many characters in a group + '1234:56789::abcd' --> # too many characters in a group + ':2001:db8::1' --> # invalid start + '2001:db8::1:' --> # invalid end +""" +IPV6_REGEX = re.compile(r'''^(([0-9a-fA-F]{1,4}:){7}([0-9a-fA-F]{1,4}|:)|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:))$''') # noqa: E501 + class Protocol(Enum): TCP = 'tcp' @@ -46,7 +78,11 @@ class PeerAddress: port: int def __str__(self) -> str: - return f'{self.protocol.value}://{self.host}:{self.port}' + host = self.host + if self.is_ipv6(): + host = f'[{self.host}]' + + return f'{self.protocol.value}://{host}:{self.port}' def __eq__(self, other: Any) -> bool: """ @@ -138,9 +174,11 @@ def from_address(cls, address: IAddress) -> Self: def to_client_endpoint(self, reactor: Reactor) -> IStreamClientEndpoint: """This method generates a twisted client endpoint that has a .connect() method.""" - # XXX: currently we don't support IPv6, but when we do we have to decide between TCP4ClientEndpoint and - # TCP6ClientEndpoint, when the host is an IP address that is easy, but when it is a DNS hostname, we will not - # know which to use until we know which resource records it holds (A or AAAA) + # XXX: currently we only support IPv6 IPs, not hosts resolving to AAAA records. + # To support them we would have to perform DNS queries to resolve + # the host and check which record it holds (A or AAAA). + if self.is_ipv6(): + return TCP6ClientEndpoint(reactor, self.host, self.port) return TCP4ClientEndpoint(reactor, self.host, self.port) def is_localhost(self) -> bool: @@ -157,7 +195,18 @@ def is_localhost(self) -> bool: >>> PeerAddress.parse('tcp://foo.bar:444').is_localhost() False """ - return self.host in ('127.0.0.1', 'localhost') + return self.host in ('127.0.0.1', 'localhost', '::1') + + def is_ipv6(self) -> bool: + """Used to determine if the entrypoint host is an IPv6 address. + """ + # XXX: This means we don't currently consider DNS names that resolve to IPv6 addresses as IPv6. + return IPV6_REGEX.fullmatch(self.host) is not None + + def is_ipv4(self) -> bool: + """Used to determine if the entrypoint host is an IPv4 address. + """ + return not self.is_ipv6() def with_id(self, peer_id: PeerId | None = None) -> PeerEndpoint: """Create a PeerEndpoint instance with self as the address and with the provided peer_id, or None.""" diff --git a/hathor/p2p/states/peer_id.py b/hathor/p2p/states/peer_id.py index 77e8a051e..e46e62ce9 100644 --- a/hathor/p2p/states/peer_id.py +++ b/hathor/p2p/states/peer_id.py @@ -42,6 +42,12 @@ def __init__(self, protocol: 'HathorProtocol', settings: HathorSettings) -> None self.my_peer_ready = False self.other_peer_ready = False + # Common capabilities between the two peers + common_capabilities = protocol.capabilities & set(protocol.node.capabilities) + + # whether to relay IPV6 entrypoints + self.should_relay_ipv6_entrypoints: bool = self._settings.CAPABILITY_IPV6 in common_capabilities + def on_enter(self) -> None: self.send_peer_id() @@ -65,10 +71,16 @@ def handle_ready(self, payload: str) -> None: def _get_peer_id_data(self) -> dict[str, Any]: my_peer = self.protocol.my_peer + + if not self.should_relay_ipv6_entrypoints: + entrypoints_as_str = my_peer.info.ipv4_entrypoints_as_str() + else: + entrypoints_as_str = my_peer.info.entrypoints_as_str() + return dict( id=str(my_peer.id), pubKey=my_peer.get_public_key(), - entrypoints=my_peer.info.entrypoints_as_str(), + entrypoints=entrypoints_as_str, ) def send_peer_id(self) -> None: diff --git a/hathor/p2p/states/ready.py b/hathor/p2p/states/ready.py index 1bed1c745..fe1924347 100644 --- a/hathor/p2p/states/ready.py +++ b/hathor/p2p/states/ready.py @@ -96,6 +96,9 @@ def __init__(self, protocol: 'HathorProtocol', settings: HathorSettings) -> None ProtocolMessages.BEST_BLOCKCHAIN: self.handle_best_blockchain, }) + # whether to relay IPV6 entrypoints + self.should_relay_ipv6_entrypoints: bool = self._settings.CAPABILITY_IPV6 in common_capabilities + # Initialize sync manager and add its commands to the list of available commands. connections = self.protocol.connections assert connections is not None @@ -163,8 +166,16 @@ def send_peers(self, peer_list: Iterable[PublicPeer]) -> None: """ data = [] for peer in peer_list: - if peer.info.entrypoints: - data.append(peer.to_unverified_peer().to_json()) + if self.should_relay_ipv6_entrypoints and not peer.info.entrypoints: + self.log.debug('no entrypoints to relay', peer=str(peer.id)) + continue + + if not self.should_relay_ipv6_entrypoints and not peer.info.get_ipv4_only_entrypoints(): + self.log.debug('no ipv4 entrypoints to relay', peer=str(peer.id)) + continue + + data.append(peer.to_unverified_peer().to_json( + only_ipv4_entrypoints=not self.should_relay_ipv6_entrypoints)) self.send_message(ProtocolMessages.PEERS, json_dumps(data)) self.log.debug('send peers', peers=data) diff --git a/hathor/simulator/fake_connection.py b/hathor/simulator/fake_connection.py index b3a29afc9..3c030c901 100644 --- a/hathor/simulator/fake_connection.py +++ b/hathor/simulator/fake_connection.py @@ -19,7 +19,7 @@ from OpenSSL.crypto import X509 from structlog import get_logger -from twisted.internet.address import IPv4Address +from twisted.internet.address import IPv4Address, IPv6Address from twisted.internet.testing import StringTransport from hathor.p2p.peer import PrivatePeer @@ -34,7 +34,7 @@ class HathorStringTransport(StringTransport): - def __init__(self, peer: PrivatePeer, *, peer_address: IPv4Address): + def __init__(self, peer: PrivatePeer, *, peer_address: IPv4Address | IPv6Address): super().__init__(peerAddress=peer_address) self._peer = peer @@ -58,8 +58,8 @@ def __init__( *, latency: float = 0, autoreconnect: bool = False, - addr1: IPv4Address | None = None, - addr2: IPv4Address | None = None, + addr1: IPv4Address | IPv6Address | None = None, + addr2: IPv4Address | IPv6Address | None = None, fake_bootstrap_id: PeerId | None | Literal[False] = False, ): """ diff --git a/tests/cli/test_run_node.py b/tests/cli/test_run_node.py index 3b72a2592..84d73d2ef 100644 --- a/tests/cli/test_run_node.py +++ b/tests/cli/test_run_node.py @@ -20,6 +20,7 @@ def register_signal_handlers(self) -> None: @patch('twisted.internet.reactor.listenTCP') def test_listen_tcp_ipv4(self, mock_listenTCP): + """Should call listenTCP with no interface defined when using only IPv4""" class CustomRunNode(RunNode): def start_manager(self) -> None: pass @@ -31,3 +32,31 @@ def register_signal_handlers(self) -> None: self.assertTrue(run_node is not None) mock_listenTCP.assert_called_with(1234, ANY) + + @patch('twisted.internet.reactor.listenTCP') + def test_listen_tcp_ipv6(self, mock_listenTCP): + """Should call listenTCP with interface='::0' when enabling IPv6""" + class CustomRunNode(RunNode): + def start_manager(self) -> None: + pass + + def register_signal_handlers(self) -> None: + pass + + run_node = CustomRunNode(argv=['--memory-storage', '--x-enable-ipv6', '--status', '1234']) + self.assertTrue(run_node is not None) + + mock_listenTCP.assert_called_with(1234, ANY, interface='::0') + + def test_validate_ipv4_or_ipv6(self): + """The program should exit if no IP version is enabled""" + class CustomRunNode(RunNode): + def start_manager(self) -> None: + pass + + def register_signal_handlers(self) -> None: + pass + + # Should call system exit + with self.assertRaises(SystemExit): + CustomRunNode(argv=['--memory-storage', '--x-disable-ipv4', '--status', '1234']) diff --git a/tests/p2p/test_bootstrap.py b/tests/p2p/test_bootstrap.py index 82aa932bb..9855a0fda 100644 --- a/tests/p2p/test_bootstrap.py +++ b/tests/p2p/test_bootstrap.py @@ -50,7 +50,18 @@ class BootstrapTestCase(unittest.TestCase): def test_mock_discovery(self) -> None: pubsub = PubSubManager(self.clock) peer = PrivatePeer.auto_generated() - connections = ConnectionsManager(self._settings, self.clock, peer, pubsub, True, self.rng, True) + connections = ConnectionsManager( + self._settings, + self.clock, + peer, + pubsub, + True, + self.rng, + True, + enable_ipv6=False, + disable_ipv4=False + ) + host_ports1 = [ ('foobar', 1234), ('127.0.0.99', 9999), @@ -74,7 +85,18 @@ def test_mock_discovery(self) -> None: def test_dns_discovery(self) -> None: pubsub = PubSubManager(self.clock) peer = PrivatePeer.auto_generated() - connections = ConnectionsManager(self._settings, self.clock, peer, pubsub, True, self.rng, True) + connections = ConnectionsManager( + self._settings, + self.clock, + peer, + pubsub, + True, + self.rng, + True, + enable_ipv6=False, + disable_ipv4=False + ) + bootstrap_a = [ '127.0.0.99', '127.0.0.88', diff --git a/tests/p2p/test_connections.py b/tests/p2p/test_connections.py index b27897ca4..db5a85f1e 100644 --- a/tests/p2p/test_connections.py +++ b/tests/p2p/test_connections.py @@ -23,3 +23,69 @@ def test_manager_connections(self) -> None: self.assertIn(endpoint, manager.connections.iter_not_ready_endpoints()) self.assertNotIn(endpoint, manager.connections.iter_ready_connections()) self.assertNotIn(endpoint, manager.connections.iter_all_connections()) + + def test_manager_disabled_ipv6(self) -> None: + """Should not try to connect to ipv6 peers if ipv6 is disabled""" + + manager = self.create_peer( + 'testnet', + enable_sync_v1=False, + enable_sync_v2=True, + enable_ipv6=False, + disable_ipv4=False + ) + + endpoint = PeerEndpoint.parse('tcp://[::1]:8005') + manager.connections.connect_to(endpoint, use_ssl=True) + + self.assertNotIn(endpoint, manager.connections.iter_not_ready_endpoints()) + self.assertNotIn(endpoint, manager.connections.iter_ready_connections()) + self.assertNotIn(endpoint, manager.connections.iter_all_connections()) + + def test_manager_enabled_ipv6_and_ipv4(self) -> None: + """Should connect to both ipv4 and ipv6 peers if both are enabled""" + + manager = self.create_peer( + 'testnet', + enable_sync_v1=False, + enable_sync_v2=True, + enable_ipv6=True, + disable_ipv4=False + ) + + endpoint_ipv6 = PeerEndpoint.parse('tcp://[::3:2:1]:8005') + manager.connections.connect_to(endpoint_ipv6, use_ssl=True) + + endpoint_ipv4 = PeerEndpoint.parse('tcp://1.2.3.4:8005') + manager.connections.connect_to(endpoint_ipv4, use_ssl=True) + + self.assertIn( + endpoint_ipv4.addr.host, + list(map(lambda x: x.addr.host, manager.connections.iter_not_ready_endpoints())) + ) + self.assertIn( + endpoint_ipv6.addr.host, + list(map(lambda x: x.addr.host, manager.connections.iter_not_ready_endpoints())) + ) + + self.assertEqual(2, len(list(manager.connections.iter_not_ready_endpoints()))) + self.assertEqual(0, len(list(manager.connections.iter_ready_connections()))) + self.assertEqual(0, len(list(manager.connections.iter_all_connections()))) + + def test_manager_disabled_ipv4(self) -> None: + """Should not try to connect to ipv4 peers if ipv4 is disabled""" + + manager = self.create_peer( + 'testnet', + enable_sync_v1=False, + enable_sync_v2=True, + enable_ipv6=True, + disable_ipv4=True, + ) + + endpoint = PeerEndpoint.parse('tcp://127.0.0.1:8005') + manager.connections.connect_to(endpoint, use_ssl=True) + + self.assertEqual(0, len(list(manager.connections.iter_not_ready_endpoints()))) + self.assertEqual(0, len(list(manager.connections.iter_ready_connections()))) + self.assertEqual(0, len(list(manager.connections.iter_all_connections()))) diff --git a/tests/p2p/test_entrypoint.py b/tests/p2p/test_entrypoint.py new file mode 100644 index 000000000..718ca4bdc --- /dev/null +++ b/tests/p2p/test_entrypoint.py @@ -0,0 +1,42 @@ +from hathor.p2p.peer_endpoint import PeerAddress, PeerEndpoint, Protocol +from tests import unittest + + +class EntrypointTestCase(unittest.TestCase): + def test_is_ipv6(self) -> None: + valid_addresses = [ + '::', + '::1', + '2001:0db8:85a3:0000:0000:8a2e:0370:7334', + '2001:db8:85a3:0:0:8a2e:370:7334', + '2001:db8::8a2e:370:7334', + '2001:db8:0:0:0:0:2:1', + '1234::5678', + 'fe80::', + '::abcd:abcd:abcd:abcd:abcd:abcd', + '0:0:0:0:0:0:0:1', + '0:0:0:0:0:0:0:0' + ] + + invalid_addresses = [ + '127.0.0.1', + '1200::AB00:1234::2552:7777:1313', + '2001:db8::g123', + '2001:db8::85a3::7334', + '2001:db8:85a3:0000:0000:8a2e:0370:7334:1234', + '12345::abcd', + '2001:db8:85a3:8a2e:0370', + '2001:db8:85a3::8a2e:3707334', + '1234:56789::abcd', + ':2001:db8::1', + '2001:db8::1:', + '2001::85a3::8a2e:370:7334' + ] + + for address in valid_addresses: + peer_address = PeerAddress(Protocol.TCP, address, 40403) + self.assertTrue(PeerEndpoint(peer_address).addr.is_ipv6()) + + for address in invalid_addresses: + peer_address = PeerAddress(Protocol.TCP, address, 40403) + self.assertFalse(PeerEndpoint(peer_address).addr.is_ipv6()) diff --git a/tests/p2p/test_peer_id.py b/tests/p2p/test_peer_id.py index 56dfaf79b..1f95cbd12 100644 --- a/tests/p2p/test_peer_id.py +++ b/tests/p2p/test_peer_id.py @@ -276,6 +276,10 @@ async def test_validate_entrypoint(self) -> None: peer.info.entrypoints = [PeerAddress.parse('tcp://uri_name:40403')] result = await peer.info.validate_entrypoint(protocol) self.assertTrue(result) + # if entrypoint is an IPv6 + peer.entrypoints = [PeerEndpoint.parse('tcp://[::1]:40403')] + result = await peer.info.validate_entrypoint(protocol) + self.assertTrue(result) # test invalid. DNS in test mode will resolve to '127.0.0.1:40403' protocol.entrypoint = PeerEndpoint.parse('tcp://45.45.45.45:40403') result = await peer.info.validate_entrypoint(protocol) @@ -298,6 +302,10 @@ def getPeer(self) -> DummyPeer: peer.info.entrypoints = [PeerAddress.parse('tcp://uri_name:40403')] result = await peer.info.validate_entrypoint(protocol) self.assertTrue(result) + # if entrypoint is an IPv6 + peer.entrypoints = [PeerEndpoint.parse('tcp://[2001:db8::ff00:42:8329]:40403')] + result = await peer.info.validate_entrypoint(protocol) + self.assertTrue(result) class SyncV1PeerIdTest(unittest.SyncV1Params, BasePeerIdTest): diff --git a/tests/p2p/test_protocol.py b/tests/p2p/test_protocol.py index 841a45929..708af1f0d 100644 --- a/tests/p2p/test_protocol.py +++ b/tests/p2p/test_protocol.py @@ -3,6 +3,7 @@ from unittest.mock import Mock, patch from twisted.internet import defer +from twisted.internet.address import IPv4Address from twisted.internet.protocol import Protocol from twisted.python.failure import Failure @@ -10,7 +11,7 @@ from hathor.p2p.manager import ConnectionsManager from hathor.p2p.messages import ProtocolMessages from hathor.p2p.peer import PrivatePeer -from hathor.p2p.peer_endpoint import PeerAddress +from hathor.p2p.peer_endpoint import PeerAddress, PeerEndpoint from hathor.p2p.protocol import HathorLineReceiver, HathorProtocol from hathor.simulator import FakeConnection from hathor.util import json_dumps, json_loadb @@ -201,6 +202,87 @@ def test_valid_hello(self) -> None: self.assertFalse(self.conn.tr1.disconnecting) self.assertFalse(self.conn.tr2.disconnecting) + def test_hello_without_ipv6_capability(self) -> None: + """Tests the connection between peers with and without the IPV6 capability. + Expected behavior: the entrypoint with IPV6 is not relayed. + """ + network = 'testnet' + manager1 = self.create_peer( + network, + peer=self.peer1, + capabilities=[self._settings.CAPABILITY_IPV6, self._settings.CAPABILITY_SYNC_VERSION] + ) + manager2 = self.create_peer( + network, + peer=self.peer2, + capabilities=[self._settings.CAPABILITY_SYNC_VERSION] + ) + + port1 = FakeConnection._get_port(manager1) + port2 = FakeConnection._get_port(manager2) + + addr1 = IPv4Address('TCP', '192.168.1.1', port1) + addr2 = IPv4Address('TCP', '192.168.1.1', port2) + + entrypoint_1_ipv6 = PeerEndpoint.parse('tcp://[::1]:54321') + entrypoint_1_ipv4 = PeerEndpoint.parse(f'tcp://192.168.1.1:{port1}') + entrypoint_2_ipv4 = PeerEndpoint.parse(f'tcp://192.168.1.1:{port2}') + + self.peer1.info.entrypoints.append(entrypoint_1_ipv6.addr) + self.peer1.info.entrypoints.append(entrypoint_1_ipv4.addr) + self.peer2.info.entrypoints.append(entrypoint_2_ipv4.addr) + + conn = FakeConnection(manager1, manager2, addr1=addr1, addr2=addr2) + + conn.run_one_step() # HELLO + conn.run_one_step() # PEER-ID + + self.assertEqual(len(conn.proto1.peer.info.entrypoints), 1) + self.assertEqual(len(conn.proto2.peer.info.entrypoints), 1) + self.assertEqual(conn.proto1.peer.info.entrypoints[0].host, '192.168.1.1') + self.assertEqual(conn.proto2.peer.info.entrypoints[0].host, '192.168.1.1') + + def test_hello_with_ipv6_capability(self) -> None: + """Tests the connection between peers with the IPV6 capability. + Expected behavior: the entrypoint with IPV6 is relayed. + """ + network = 'testnet' + manager1 = self.create_peer( + network, + peer=self.peer1, + capabilities=[self._settings.CAPABILITY_IPV6, self._settings.CAPABILITY_SYNC_VERSION] + ) + manager2 = self.create_peer( + network, + peer=self.peer2, + capabilities=[self._settings.CAPABILITY_IPV6, self._settings.CAPABILITY_SYNC_VERSION] + ) + + port1 = FakeConnection._get_port(manager1) + port2 = FakeConnection._get_port(manager2) + + addr1 = IPv4Address('TCP', '192.168.1.1', port1) + addr2 = IPv4Address('TCP', '192.168.1.1', port2) + + entrypoint_1_ipv6 = PeerEndpoint.parse('tcp://[::1]:54321') + entrypoint_1_ipv4 = PeerEndpoint.parse(f'tcp://192.168.1.1:{port1}') + entrypoint_2_ipv4 = PeerEndpoint.parse(f'tcp://192.168.1.1:{port2}') + + self.peer1.info.entrypoints.append(entrypoint_1_ipv6.addr) + self.peer1.info.entrypoints.append(entrypoint_1_ipv4.addr) + self.peer2.info.entrypoints.append(entrypoint_2_ipv4.addr) + + conn = FakeConnection(manager1, manager2, addr1=addr1, addr2=addr2) + + conn.run_one_step() # HELLO + conn.run_one_step() # PEER-ID + + self.assertEqual(len(conn.proto1.peer.info.entrypoints), 1) + self.assertEqual(len(conn.proto2.peer.info.entrypoints), 2) + self.assertTrue('::1' in map(lambda x: x.host, conn.proto2.peer.info.entrypoints)) + self.assertTrue('192.168.1.1' in map(lambda x: x.host, conn.proto2.peer.info.entrypoints)) + self.assertEqual(conn.proto1.peer.info.entrypoints[0].host, '192.168.1.1') + def test_invalid_same_peer_id(self) -> None: manager3 = self.create_peer(self.network, peer=self.peer1) conn = FakeConnection(self.manager1, manager3) diff --git a/tests/unittest.py b/tests/unittest.py index 94cef1c34..4a659f6bd 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -226,7 +226,9 @@ def create_peer( # type: ignore[no-untyped-def] pubsub: PubSubManager | None = None, event_storage: EventStorage | None = None, enable_event_queue: bool | None = None, - use_memory_storage: bool | None = None + use_memory_storage: bool | None = None, + enable_ipv6: bool = False, + disable_ipv4: bool = False, ): # TODO: Add -> HathorManager here. It breaks the lint in a lot of places. enable_sync_v1, enable_sync_v2 = self._syncVersionFlags(enable_sync_v1, enable_sync_v2) @@ -290,6 +292,15 @@ def create_peer( # type: ignore[no-untyped-def] if utxo_index: builder.enable_utxo_index() + if capabilities is not None: + builder.set_capabilities(capabilities) + + if enable_ipv6: + builder.enable_ipv6() + + if disable_ipv4: + builder.disable_ipv4() + daa = DifficultyAdjustmentAlgorithm(settings=self._settings, test_mode=TestMode.TEST_ALL_WEIGHT) builder.set_daa(daa) manager = self.create_peer_from_builder(builder, start_manager=start_manager) From cf3fa8b5f7aad262b443f04825cf0d3c23016f15 Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Tue, 26 Nov 2024 14:58:00 -0300 Subject: [PATCH 54/61] chore(feature-activation): delay NOP feature (#1187) --- hathor/cli/events_simulator/events_simulator.py | 2 ++ hathor/conf/testnet.py | 10 +++++----- hathor/conf/testnet.yml | 10 +++++----- 3 files changed, 12 insertions(+), 10 deletions(-) diff --git a/hathor/cli/events_simulator/events_simulator.py b/hathor/cli/events_simulator/events_simulator.py index 897c57bf3..23fe64418 100644 --- a/hathor/cli/events_simulator/events_simulator.py +++ b/hathor/cli/events_simulator/events_simulator.py @@ -88,6 +88,8 @@ def execute(args: Namespace, reactor: 'ReactorProtocol') -> None: forwarding_ws_factory.start(stream_id='simulator_stream_id') scenario.simulate(simulator, manager) + assert manager.wallet is not None + log.info('final result', balances=manager.wallet.get_balance_per_address(simulator.settings.HATHOR_TOKEN_UID)) reactor.listenTCP(args.port, site) reactor.run() diff --git a/hathor/conf/testnet.py b/hathor/conf/testnet.py index 4743e9f4e..2334a7d7e 100644 --- a/hathor/conf/testnet.py +++ b/hathor/conf/testnet.py @@ -73,11 +73,11 @@ # NOP feature to test Feature Activation for Transactions Feature.NOP_FEATURE_1: Criteria( bit=0, - # N = 4_354_560 - # Expected to be reached around Sunday, 2024-11-17. - # Right now the best block is 4_326_600 on testnet (2024-11-07). - start_height=4_354_560, # N - timeout_height=4_394_880, # N + 2 * 20160 (2 weeks after the start) + # N = 4_394_880 + # start_height expected to be reached around Sunday, 2024-12-01. + # Right now the best block is 4_377_375 on testnet (2024-11-25). + start_height=4_394_880, # N + timeout_height=4_475_520, # N + 4 * 20160 (4 weeks after the start) minimum_activation_height=0, lock_in_on_timeout=False, version='0.63.0', diff --git a/hathor/conf/testnet.yml b/hathor/conf/testnet.yml index 8babd1f32..336794a22 100644 --- a/hathor/conf/testnet.yml +++ b/hathor/conf/testnet.yml @@ -54,11 +54,11 @@ FEATURE_ACTIVATION: # NOP feature to test Feature Activation for Transactions NOP_FEATURE_1: bit: 0 - # N = 4_354_560 - # Expected to be reached around Sunday, 2024-11-17. - # Right now the best block is 4_326_600 on testnet (2024-11-07). - start_height: 4_354_560 # N - timeout_height: 4_394_880 # N + 2 * 20160 (2 weeks after the start) + # N = 4_394_880 + # start_height expected to be reached around Sunday, 2024-12-01. + # Right now the best block is 4_377_375 on testnet (2024-11-25). + start_height: 4_394_880 # N + timeout_height: 4_475_520 # N + 4 * 20160 (4 weeks after the start) minimum_activation_height: 0 lock_in_on_timeout: false version: 0.63.0 From f2fb42a9646b2e6245e53ab3bc84e140e5ec1711 Mon Sep 17 00:00:00 2001 From: Jan Segre Date: Wed, 27 Nov 2024 16:03:13 +0100 Subject: [PATCH 55/61] chore: update mainnet and testnet checkpoints and include helper script --- extras/update_checkpoints.py | 112 +++++++++++++++++++++++++++++++++++ hathor/conf/mainnet.py | 14 +++++ hathor/conf/mainnet.yml | 14 +++++ hathor/conf/testnet.py | 27 +++++++++ hathor/conf/testnet.yml | 27 +++++++++ 5 files changed, 194 insertions(+) create mode 100755 extras/update_checkpoints.py diff --git a/extras/update_checkpoints.py b/extras/update_checkpoints.py new file mode 100755 index 000000000..e83538b5c --- /dev/null +++ b/extras/update_checkpoints.py @@ -0,0 +1,112 @@ +#!/usr/bin/env python +""" +Usage: update_checkpoints.py [-h] [-n NETWORK] + +Helper script to update the config checkpoint list. + +options: + -h, --help show this help message and exit + -n NETWORK, --network NETWORK + The network to update (default: mainnet) + +For example: + +$ ./extras/update_checkpoints.py +New checkpoints to add for mainnet: + + 4_800_000: 00000000000000000716b8d9e96591ba7cb2d02c3d2d1d98d514f41c240fdff7 + 4_900_000: 0000000000000000079b1c1ebf48d351a7d31dcc55c5b4cf79ade79089a20f5a + 5_000_000: 000000000000000006c9167db1cc7e93fcf1c3014da6c6221390d03d1640c9b3 + + cp(4_800_000, bytes.fromhex('00000000000000000716b8d9e96591ba7cb2d02c3d2d1d98d514f41c240fdff7')), + cp(4_900_000, bytes.fromhex('0000000000000000079b1c1ebf48d351a7d31dcc55c5b4cf79ade79089a20f5a')), + cp(5_000_000, bytes.fromhex('000000000000000006c9167db1cc7e93fcf1c3014da6c6221390d03d1640c9b3')), + +The output can then be copied and pasted into `hathor/conf/mainnet.yml` and `hathor/conf/mainnet.py` +""" + +import requests +import yaml +import argparse + +# Built-in network configurations +NETWORKS: dict[str, dict[str, str]] = { + 'mainnet': { + 'config_file': 'hathor/conf/mainnet.yml', + 'node_url': 'https://node1.mainnet.hathor.network/v1a', + }, + 'testnet': { + 'config_file': 'hathor/conf/testnet.yml', + 'node_url': 'https://node1.golf.testnet.hathor.network/v1a', + }, + # Add more networks as needed +} + +CHECKPOINT_INTERVAL: int = 100_000 + + +def get_latest_height(node_url: str) -> int: + """Fetch the latest block height.""" + response = requests.get(f'{node_url}/transaction?type=block&count=1') + response.raise_for_status() + return response.json()['transactions'][0]['height'] + + +def get_hash_for_height(node_url: str, height: int) -> str: + """Fetch the hash for a given block height.""" + response = requests.get(f'{node_url}/block_at_height?height={height}') + response.raise_for_status() + return response.json()['block']['tx_id'] + + +def load_checkpoints(config_file: str) -> dict[str, int]: + """Load the checkpoints from the specified YAML config file.""" + with open(config_file, 'r') as file: + data = yaml.safe_load(file) + return data.get('CHECKPOINTS', {}) + + +def print_new_checkpoints(network_name: str) -> None: + """Print new checkpoints for the specified network.""" + if network_name not in NETWORKS: + print(f'Error: Unknown network {network_name}. Available networks: {", ".join(NETWORKS.keys())}') + return + + # Get the network configuration + network_config = NETWORKS[network_name] + config_file = network_config['config_file'] + node_url = network_config['node_url'] + + # Load existing checkpoints from the YAML file + current_checkpoints = load_checkpoints(config_file) + + # Get the latest block height + latest_height = get_latest_height(node_url) + + # Determine missing checkpoints + new_checkpoints = {} + for height in range(CHECKPOINT_INTERVAL, latest_height + 1, CHECKPOINT_INTERVAL): + if height not in current_checkpoints: + block_hash = get_hash_for_height(node_url, height) + new_checkpoints[height] = block_hash + + # Print new checkpoints + if new_checkpoints: + print(f'New checkpoints to add for {network_name}:\n') + for height, block_hash in sorted(new_checkpoints.items()): + print(f' {height:_}: {block_hash}') + print() + for height, block_hash in sorted(new_checkpoints.items()): + print(f''' cp({height:_}, bytes.fromhex('{block_hash}')),''') + else: + print(f'No new checkpoints needed for {network_name}. All up to date.') + + +if __name__ == '__main__': + # Parse command-line arguments + parser = argparse.ArgumentParser(description='Helper script to update the config checkpoint list.') + parser.add_argument('-n', '--network', default='mainnet', help='The network to update (default: mainnet)') + args = parser.parse_args() + + # Print new checkpoints for the specified network + print_new_checkpoints(args.network) diff --git a/hathor/conf/mainnet.py b/hathor/conf/mainnet.py index f580f9481..c5614e34b 100644 --- a/hathor/conf/mainnet.py +++ b/hathor/conf/mainnet.py @@ -72,6 +72,20 @@ cp(3_400_000, bytes.fromhex('000000000000000077242c961a0c6f708bc671a8372eb8b095311f091fddc6c3')), cp(3_500_000, bytes.fromhex('000000000000000a34ba20552c3cae9549b9c5ca07f644cf005328c948aa54d8')), cp(3_600_000, bytes.fromhex('000000000000000011031d9ff030cd9e6fe8a3766bbeda6f6337c40dd30fa65f')), + cp(3_700_000, bytes.fromhex('0000000000000006c6e7295efcf0929173cc47ece41afc652410b72f36cbeeda')), + cp(3_800_000, bytes.fromhex('00000000000000122f57d59c7f6736a83483dcf71c34978102d7e04ce4dc9a5d')), + cp(3_900_000, bytes.fromhex('00000000000000069edf3300d6c41451485d7aabdbea34425a2411b880e8a976')), + cp(4_000_000, bytes.fromhex('00000000000000043b11a6c86c3cdaf773a5183737f136e196e816f862e1e3ba')), + cp(4_100_000, bytes.fromhex('0000000000000020822d529b6fcd8611f5a174b1f44a6c478a2fec64a80233ad')), + cp(4_200_000, bytes.fromhex('00000000000000052ffc34875fab4e545bc9dc76f1212c4fdafab3b6d7a026cd')), + cp(4_300_000, bytes.fromhex('000000000000000e1ea2af0e25087c0977e944dd0ffdae5fdff54dda85ed95be')), + cp(4_400_000, bytes.fromhex('0000000000000000020dab883c57e21829b590ef61ff5230f5fdc9d572300945')), + cp(4_500_000, bytes.fromhex('00000000000000034d5ddf802a8ac8fbf17cf50747041e433d28d9f2bcb6ef02')), + cp(4_600_000, bytes.fromhex('000000000000000055bb4e5b6d942da13cb631f318cfdc292793f28ef8a338ca')), + cp(4_700_000, bytes.fromhex('000000000000000002ae1d75811b1050fc98ee7ef30c48cde117ebbb42f47e22')), + cp(4_800_000, bytes.fromhex('00000000000000000716b8d9e96591ba7cb2d02c3d2d1d98d514f41c240fdff7')), + cp(4_900_000, bytes.fromhex('0000000000000000079b1c1ebf48d351a7d31dcc55c5b4cf79ade79089a20f5a')), + cp(5_000_000, bytes.fromhex('000000000000000006c9167db1cc7e93fcf1c3014da6c6221390d03d1640c9b3')), ], SOFT_VOIDED_TX_IDS=list(map(bytes.fromhex, [ '0000000012a922a6887497bed9c41e5ed7dc7213cae107db295602168266cd02', diff --git a/hathor/conf/mainnet.yml b/hathor/conf/mainnet.yml index d32845449..d05a07e08 100644 --- a/hathor/conf/mainnet.yml +++ b/hathor/conf/mainnet.yml @@ -53,6 +53,20 @@ CHECKPOINTS: 3_400_000: 000000000000000077242c961a0c6f708bc671a8372eb8b095311f091fddc6c3 3_500_000: 000000000000000a34ba20552c3cae9549b9c5ca07f644cf005328c948aa54d8 3_600_000: 000000000000000011031d9ff030cd9e6fe8a3766bbeda6f6337c40dd30fa65f + 3_700_000: 0000000000000006c6e7295efcf0929173cc47ece41afc652410b72f36cbeeda + 3_800_000: 00000000000000122f57d59c7f6736a83483dcf71c34978102d7e04ce4dc9a5d + 3_900_000: 00000000000000069edf3300d6c41451485d7aabdbea34425a2411b880e8a976 + 4_000_000: 00000000000000043b11a6c86c3cdaf773a5183737f136e196e816f862e1e3ba + 4_100_000: 0000000000000020822d529b6fcd8611f5a174b1f44a6c478a2fec64a80233ad + 4_200_000: 00000000000000052ffc34875fab4e545bc9dc76f1212c4fdafab3b6d7a026cd + 4_300_000: 000000000000000e1ea2af0e25087c0977e944dd0ffdae5fdff54dda85ed95be + 4_400_000: 0000000000000000020dab883c57e21829b590ef61ff5230f5fdc9d572300945 + 4_500_000: 00000000000000034d5ddf802a8ac8fbf17cf50747041e433d28d9f2bcb6ef02 + 4_600_000: 000000000000000055bb4e5b6d942da13cb631f318cfdc292793f28ef8a338ca + 4_700_000: 000000000000000002ae1d75811b1050fc98ee7ef30c48cde117ebbb42f47e22 + 4_800_000: 00000000000000000716b8d9e96591ba7cb2d02c3d2d1d98d514f41c240fdff7 + 4_900_000: 0000000000000000079b1c1ebf48d351a7d31dcc55c5b4cf79ade79089a20f5a + 5_000_000: 000000000000000006c9167db1cc7e93fcf1c3014da6c6221390d03d1640c9b3 SOFT_VOIDED_TX_IDS: - 0000000012a922a6887497bed9c41e5ed7dc7213cae107db295602168266cd02 diff --git a/hathor/conf/testnet.py b/hathor/conf/testnet.py index 2334a7d7e..f2d322489 100644 --- a/hathor/conf/testnet.py +++ b/hathor/conf/testnet.py @@ -53,6 +53,33 @@ cp(1_400_000, bytes.fromhex('000000000df9cb786c68a643a52a67c22ab54e8b8e41cbe9b761133f6c8abbfe')), cp(1_500_000, bytes.fromhex('000000000c3591805f4748480b59ac1788f754fc004930985a487580e2b5de8f')), cp(1_600_000, bytes.fromhex('00000000060adfdfd7d488d4d510b5779cf35a3c50df7bcff941fbb6957be4d2')), + cp(1_700_000, bytes.fromhex('0000000007afc04aebad15b14fcd93c1b5193dc503b190433f55be8c218b6d12')), + cp(1_800_000, bytes.fromhex('00000000126f16af2ba934a60cf8f2da32d3ed2688c56ce8ff477e483a3ffc42')), + cp(1_900_000, bytes.fromhex('0000000005d2a2ba2231663187b460396189af0ffca7b2e93fccc85cde04cbdc')), + cp(2_000_000, bytes.fromhex('000000000009a8451ff2d5ec54951d717da2766aedb3131485466cc993879ee1')), + cp(2_100_000, bytes.fromhex('0000000009f961804cd7f43da05f08a94a2fa09f82c7d605afc5982ab242a7e4')), + cp(2_200_000, bytes.fromhex('0000000002e260b970846a89c23e754a763e7c5f1578b6ec4e67bdb94c667997')), + cp(2_300_000, bytes.fromhex('0000000006e0894c8f7fd029fe446a42433875647759183ba3fbb0ff0b7ceb64')), + cp(2_400_000, bytes.fromhex('0000000011ab28f3be17e3a098307fa73750cc8d74f1f60cfb44b524a60c94ec')), + cp(2_500_000, bytes.fromhex('00000000045d2bcc10c896bfc7d1f28788e3530a81f50ee096f386eec772634f')), + cp(2_600_000, bytes.fromhex('000000000766b9ac25e2ece5685effa834e61284e38f368c841210606bb1fdfc')), + cp(2_700_000, bytes.fromhex('0000000005d0ee31d0f47f6ff9aa570b9f25b9d44a8a59cea0e0f8a1729b9c90')), + cp(2_800_000, bytes.fromhex('000000000a5bd4f266fa13d2c0594cabf6465758f7f5814bde626032706b81e5')), + cp(2_900_000, bytes.fromhex('000000000b11b0a09ff0d7c2cfd9228f31c53008e700532e439d3a3d9c63fb8e')), + cp(3_000_000, bytes.fromhex('00000000013289569569cd51580183a2c870dfe5a395adaa00ae66fefe51af3d')), + cp(3_100_000, bytes.fromhex('00000000170c55e6ec207400bfc42786c1e0c32fe045a1d815f930daf2bf3020')), + cp(3_200_000, bytes.fromhex('00000000149986cb99c202136bd388fb2a7fcba4bdfd6ac049069ac5e08a587f')), + cp(3_300_000, bytes.fromhex('000000000e16f87ac7133639cb52a99574944b8457939396e7faf1615fcfdb0f')), + cp(3_400_000, bytes.fromhex('000000000f551f6224a459904436072f5ff10fd3db17f2d7e25b1ef9b149c121')), + cp(3_500_000, bytes.fromhex('0000000006572b8cf41130e88776adf8583e970905df2afe593ca31c91ab0c4c')), + cp(3_600_000, bytes.fromhex('000000000215fcc7018cc31bbfb943ca43c6297529fa008bf34665f3ac64d340')), + cp(3_700_000, bytes.fromhex('000000000dbf5e8ab4f90f2187db6db429c9d0cb8169051ce8a9e79b810509d7')), + cp(3_800_000, bytes.fromhex('00000000030411ec36c7f5386a94e147460d86592f85459e0eadd5cd0e3da7b4')), + cp(3_900_000, bytes.fromhex('000000000bc2c7078a3c59d878196f1491aad45a0df9d312909d85482ac8d714')), + cp(4_000_000, bytes.fromhex('000000000eba0dae3ec27cf5596ef49731744edebadb9fbae42160b6aa2e2461')), + cp(4_100_000, bytes.fromhex('00000000052aa77fd8db71d5306257f9fe068c3401d95b17fcedcccfc9b76c82')), + cp(4_200_000, bytes.fromhex('00000000010a8dae043c84fcb2cef6a2b42a28279b95af20ab5a098acf2a3565')), + cp(4_300_000, bytes.fromhex('000000000019da781ef75fa5f59c5537d8ed18b64c589c3e036109cfb1d84f7d')), ], FEATURE_ACTIVATION=FeatureActivationSettings( default_threshold=15_120, # 15120 = 75% of evaluation_interval (20160) diff --git a/hathor/conf/testnet.yml b/hathor/conf/testnet.yml index 336794a22..f8dcf5290 100644 --- a/hathor/conf/testnet.yml +++ b/hathor/conf/testnet.yml @@ -35,6 +35,33 @@ CHECKPOINTS: 1_400_000: 000000000df9cb786c68a643a52a67c22ab54e8b8e41cbe9b761133f6c8abbfe 1_500_000: 000000000c3591805f4748480b59ac1788f754fc004930985a487580e2b5de8f 1_600_000: 00000000060adfdfd7d488d4d510b5779cf35a3c50df7bcff941fbb6957be4d2 + 1_700_000: 0000000007afc04aebad15b14fcd93c1b5193dc503b190433f55be8c218b6d12 + 1_800_000: 00000000126f16af2ba934a60cf8f2da32d3ed2688c56ce8ff477e483a3ffc42 + 1_900_000: 0000000005d2a2ba2231663187b460396189af0ffca7b2e93fccc85cde04cbdc + 2_000_000: 000000000009a8451ff2d5ec54951d717da2766aedb3131485466cc993879ee1 + 2_100_000: 0000000009f961804cd7f43da05f08a94a2fa09f82c7d605afc5982ab242a7e4 + 2_200_000: 0000000002e260b970846a89c23e754a763e7c5f1578b6ec4e67bdb94c667997 + 2_300_000: 0000000006e0894c8f7fd029fe446a42433875647759183ba3fbb0ff0b7ceb64 + 2_400_000: 0000000011ab28f3be17e3a098307fa73750cc8d74f1f60cfb44b524a60c94ec + 2_500_000: 00000000045d2bcc10c896bfc7d1f28788e3530a81f50ee096f386eec772634f + 2_600_000: 000000000766b9ac25e2ece5685effa834e61284e38f368c841210606bb1fdfc + 2_700_000: 0000000005d0ee31d0f47f6ff9aa570b9f25b9d44a8a59cea0e0f8a1729b9c90 + 2_800_000: 000000000a5bd4f266fa13d2c0594cabf6465758f7f5814bde626032706b81e5 + 2_900_000: 000000000b11b0a09ff0d7c2cfd9228f31c53008e700532e439d3a3d9c63fb8e + 3_000_000: 00000000013289569569cd51580183a2c870dfe5a395adaa00ae66fefe51af3d + 3_100_000: 00000000170c55e6ec207400bfc42786c1e0c32fe045a1d815f930daf2bf3020 + 3_200_000: 00000000149986cb99c202136bd388fb2a7fcba4bdfd6ac049069ac5e08a587f + 3_300_000: 000000000e16f87ac7133639cb52a99574944b8457939396e7faf1615fcfdb0f + 3_400_000: 000000000f551f6224a459904436072f5ff10fd3db17f2d7e25b1ef9b149c121 + 3_500_000: 0000000006572b8cf41130e88776adf8583e970905df2afe593ca31c91ab0c4c + 3_600_000: 000000000215fcc7018cc31bbfb943ca43c6297529fa008bf34665f3ac64d340 + 3_700_000: 000000000dbf5e8ab4f90f2187db6db429c9d0cb8169051ce8a9e79b810509d7 + 3_800_000: 00000000030411ec36c7f5386a94e147460d86592f85459e0eadd5cd0e3da7b4 + 3_900_000: 000000000bc2c7078a3c59d878196f1491aad45a0df9d312909d85482ac8d714 + 4_000_000: 000000000eba0dae3ec27cf5596ef49731744edebadb9fbae42160b6aa2e2461 + 4_100_000: 00000000052aa77fd8db71d5306257f9fe068c3401d95b17fcedcccfc9b76c82 + 4_200_000: 00000000010a8dae043c84fcb2cef6a2b42a28279b95af20ab5a098acf2a3565 + 4_300_000: 000000000019da781ef75fa5f59c5537d8ed18b64c589c3e036109cfb1d84f7d FEATURE_ACTIVATION: default_threshold: 15_120 # 15120 = 75% of evaluation_interval (20160) From 52a8fcbbe600864d3bd7693dc9b177f4431e0244 Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Wed, 4 Dec 2024 17:55:47 -0300 Subject: [PATCH 56/61] chore(events): remove event queue from unsafe mode (#1190) --- hathor/builder/cli_builder.py | 13 ++++++++----- hathor/builder/resources_builder.py | 2 +- hathor/cli/run_node.py | 5 +++-- hathor/cli/run_node_args.py | 1 + tests/others/test_cli_builder.py | 2 +- 5 files changed, 14 insertions(+), 9 deletions(-) diff --git a/hathor/builder/cli_builder.py b/hathor/builder/cli_builder.py index a5724de98..059007190 100644 --- a/hathor/builder/cli_builder.py +++ b/hathor/builder/cli_builder.py @@ -242,6 +242,9 @@ def create_manager(self, reactor: Reactor) -> HathorManager: pubsub = PubSubManager(reactor) if self._args.x_enable_event_queue: + self.log.warn('--x-enable-event-queue is deprecated and will be removed, use --enable-event-queue instead') + + if self._args.x_enable_event_queue or self._args.enable_event_queue: self.event_ws_factory = EventWebsocketFactory( peer_id=str(peer.id), settings=settings, @@ -270,8 +273,8 @@ def create_manager(self, reactor: Reactor) -> HathorManager: full_verification = False if self._args.x_full_verification: self.check_or_raise( - not self._args.x_enable_event_queue, - '--x-full-verification cannot be used with --x-enable-event-queue' + not self._args.x_enable_event_queue and not self._args.enable_event_queue, + '--x-full-verification cannot be used with --enable-event-queue' ) full_verification = True @@ -282,8 +285,8 @@ def create_manager(self, reactor: Reactor) -> HathorManager: execution_manager=execution_manager ) - if self._args.x_enable_event_queue: - self.log.info('--x-enable-event-queue flag provided. ' + if self._args.x_enable_event_queue or self._args.enable_event_queue: + self.log.info('--enable-event-queue flag provided. ' 'The events detected by the full node will be stored and can be retrieved by clients') self.feature_service = FeatureService(settings=settings, tx_storage=tx_storage) @@ -378,7 +381,7 @@ def create_manager(self, reactor: Reactor) -> HathorManager: checkpoints=settings.CHECKPOINTS, environment_info=get_environment_info(args=str(self._args), peer_id=str(peer.id)), full_verification=full_verification, - enable_event_queue=self._args.x_enable_event_queue, + enable_event_queue=self._args.x_enable_event_queue or self._args.enable_event_queue, bit_signaling_service=bit_signaling_service, verification_service=verification_service, cpu_mining_service=cpu_mining_service, diff --git a/hathor/builder/resources_builder.py b/hathor/builder/resources_builder.py index f067cc3d9..bfc47e7a7 100644 --- a/hathor/builder/resources_builder.py +++ b/hathor/builder/resources_builder.py @@ -307,7 +307,7 @@ def create_resources(self) -> server.Site: ws_factory.subscribe(self.manager.pubsub) # Event websocket resource - if self._args.x_enable_event_queue: + if self._args.x_enable_event_queue or self._args.enable_event_queue: root.putChild(b'event_ws', WebSocketResource(self.event_ws_factory)) root.putChild(b'event', EventResource(self.manager._event_manager)) diff --git a/hathor/cli/run_node.py b/hathor/cli/run_node.py index 58cface89..ba4f40816 100644 --- a/hathor/cli/run_node.py +++ b/hathor/cli/run_node.py @@ -55,7 +55,6 @@ class RunNode: ('--x-sync-bridge', lambda args: bool(args.x_sync_bridge)), ('--x-sync-v1-only', lambda args: bool(args.x_sync_v1_only)), ('--x-sync-v2-only', lambda args: bool(args.x_sync_v2_only)), - ('--x-enable-event-queue', lambda args: bool(args.x_enable_event_queue)), ('--x-asyncio-reactor', lambda args: bool(args.x_asyncio_reactor)), ('--x-ipython-kernel', lambda args: bool(args.x_ipython_kernel)), ] @@ -146,7 +145,9 @@ def create_parser(cls) -> ArgumentParser: sync_args.add_argument('--x-sync-bridge', action='store_true', help='Enable running both sync protocols.') parser.add_argument('--x-localhost-only', action='store_true', help='Only connect to peers on localhost') parser.add_argument('--x-rocksdb-indexes', action='store_true', help=SUPPRESS) - parser.add_argument('--x-enable-event-queue', action='store_true', help='Enable event queue mechanism') + parser.add_argument('--x-enable-event-queue', action='store_true', + help='Deprecated: use --enable-event-queue instead.') + parser.add_argument('--enable-event-queue', action='store_true', help='Enable event queue mechanism') parser.add_argument('--peer-id-blacklist', action='extend', default=[], nargs='+', type=str, help='Peer IDs to forbid connection') parser.add_argument('--config-yaml', type=str, help='Configuration yaml filepath') diff --git a/hathor/cli/run_node_args.py b/hathor/cli/run_node_args.py index dca87ed16..6f076253f 100644 --- a/hathor/cli/run_node_args.py +++ b/hathor/cli/run_node_args.py @@ -76,6 +76,7 @@ class RunNodeArgs(BaseModel, extra=Extra.allow): x_localhost_only: bool x_rocksdb_indexes: bool x_enable_event_queue: bool + enable_event_queue: bool peer_id_blacklist: list[str] config_yaml: Optional[str] signal_support: set[Feature] diff --git a/tests/others/test_cli_builder.py b/tests/others/test_cli_builder.py index a83f00899..d1a95c6e6 100644 --- a/tests/others/test_cli_builder.py +++ b/tests/others/test_cli_builder.py @@ -192,4 +192,4 @@ def test_event_queue_with_memory_storage(self): def test_event_queue_with_full_verification(self): args = ['--x-enable-event-queue', '--memory-storage', '--x-full-verification'] - self._build_with_error(args, '--x-full-verification cannot be used with --x-enable-event-queue') + self._build_with_error(args, '--x-full-verification cannot be used with --enable-event-queue') From 2a12952bbd7658009e551781ed3d12807afc960b Mon Sep 17 00:00:00 2001 From: Jan Segre Date: Tue, 20 Aug 2024 16:22:30 +0200 Subject: [PATCH 57/61] refactor(p2p): move received peers storage to protocol --- hathor/conf/settings.py | 9 ++ hathor/p2p/manager.py | 140 ++++++++++++++------ hathor/p2p/peer.py | 35 ++--- hathor/p2p/peer_discovery/bootstrap.py | 4 +- hathor/p2p/peer_discovery/dns.py | 4 +- hathor/p2p/peer_discovery/peer_discovery.py | 8 +- hathor/p2p/peer_endpoint.py | 13 ++ hathor/p2p/peer_storage.py | 24 +++- hathor/p2p/protocol.py | 22 +++ hathor/p2p/resources/add_peers.py | 2 +- hathor/p2p/states/ready.py | 2 +- hathor/simulator/fake_connection.py | 5 +- tests/others/test_metrics.py | 2 +- tests/p2p/test_bootstrap.py | 4 +- tests/p2p/test_connections.py | 10 +- tests/p2p/test_connectivity.py | 56 ++++++++ tests/p2p/test_peer_id.py | 22 +-- tests/p2p/test_peer_storage.py | 30 +++++ tests/p2p/test_protocol.py | 22 +-- tests/poa/test_poa_simulation.py | 5 +- tests/resources/p2p/test_status.py | 4 +- 21 files changed, 321 insertions(+), 102 deletions(-) create mode 100644 tests/p2p/test_connectivity.py create mode 100644 tests/p2p/test_peer_storage.py diff --git a/hathor/conf/settings.py b/hathor/conf/settings.py index 3edb664a7..9fd45bc3f 100644 --- a/hathor/conf/settings.py +++ b/hathor/conf/settings.py @@ -288,6 +288,9 @@ def GENESIS_TX2_TIMESTAMP(self) -> int: # Maximum period without receiving any messages from ther peer (in seconds). PEER_IDLE_TIMEOUT: int = 60 + # Maximum number of entrypoints that we accept that a peer broadcasts + PEER_MAX_ENTRYPOINTS: int = 30 + # Filepath of ca certificate file to generate connection certificates CA_FILEPATH: str = os.path.join(os.path.dirname(__file__), '../p2p/ca.crt') @@ -431,6 +434,12 @@ def GENESIS_TX2_TIMESTAMP(self) -> int: # more than enough for the forseeable future MAX_MEMPOOL_RECEIVING_TIPS: int = 1000 + # Max number of peers simultanously stored in the node + MAX_VERIFIED_PEERS: int = 10_000 + + # Max number of peers simultanously stored per-connection + MAX_UNVERIFIED_PEERS_PER_CONN: int = 100 + # Used to enable nano contracts. # # This should NEVER be enabled for mainnet and testnet, since both networks will diff --git a/hathor/p2p/manager.py b/hathor/p2p/manager.py index d7e7045c9..56371da69 100644 --- a/hathor/p2p/manager.py +++ b/hathor/p2p/manager.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +from collections import deque from typing import TYPE_CHECKING, Any, Iterable, NamedTuple, Optional from structlog import get_logger @@ -30,7 +31,7 @@ from hathor.p2p.peer_discovery import PeerDiscovery from hathor.p2p.peer_endpoint import PeerAddress, PeerEndpoint from hathor.p2p.peer_id import PeerId -from hathor.p2p.peer_storage import UnverifiedPeerStorage, VerifiedPeerStorage +from hathor.p2p.peer_storage import VerifiedPeerStorage from hathor.p2p.protocol import HathorProtocol from hathor.p2p.rate_limiter import RateLimiter from hathor.p2p.states.ready import ReadyState @@ -81,10 +82,10 @@ class GlobalRateLimiter: manager: Optional['HathorManager'] connections: set[HathorProtocol] connected_peers: dict[PeerId, HathorProtocol] + new_connection_from_queue: deque[PeerId] connecting_peers: dict[IStreamClientEndpoint, _ConnectingPeer] handshaking_peers: set[HathorProtocol] whitelist_only: bool - unverified_peer_storage: UnverifiedPeerStorage verified_peer_storage: VerifiedPeerStorage _sync_factories: dict[SyncVersion, SyncAgentFactory] _enabled_sync_versions: set[SyncVersion] @@ -156,12 +157,12 @@ def __init__( # List of peers connected and ready to communicate. self.connected_peers = {} - # List of peers received from the network. - # We cannot trust their identity before we connect to them. - self.unverified_peer_storage = UnverifiedPeerStorage() + # Queue of ready peer-id's used by connect_to_peer_from_connection_queue to choose the next peer to pull a + # random new connection from + self.new_connection_from_queue = deque() # List of known peers. - self.verified_peer_storage = VerifiedPeerStorage() # dict[string (peer.id), PublicPeer] + self.verified_peer_storage = VerifiedPeerStorage(rng=self.rng, max_size=self._settings.MAX_VERIFIED_PEERS) # Maximum unseen time before removing a peer (seconds). self.max_peer_unseen_dt: float = 30 * 60 # 30-minutes @@ -181,6 +182,11 @@ def __init__( # Timestamp of the last time sync was updated. self._last_sync_rotate: float = 0. + # Connect to new peers in a timed loop, instead of as soon as possible + self.lc_connect = LoopingCall(self.connect_to_peer_from_connection_queue) + self.lc_connect.clock = self.reactor + self.lc_connect_interval = 0.2 # seconds + # A timer to try to reconnect to the disconnect known peers. if self._settings.ENABLE_PEER_WHITELIST: self.wl_reconnect = LoopingCall(self.update_whitelist) @@ -272,7 +278,7 @@ def do_discovery(self) -> None: Do a discovery and connect on all discovery strategies. """ for peer_discovery in self.peer_discoveries: - coro = peer_discovery.discover_and_connect(self.connect_to) + coro = peer_discovery.discover_and_connect(self.connect_to_endpoint) Deferred.fromCoroutine(coro) def disable_rate_limiter(self) -> None: @@ -293,6 +299,7 @@ def start(self) -> None: if self.manager is None: raise TypeError('Class was built incorrectly without a HathorManager.') + self._start_peer_connect_loop() self.lc_reconnect.start(5, now=False) self.lc_sync_update.start(self.lc_sync_update_interval, now=False) @@ -319,7 +326,28 @@ def _handle_whitelist_reconnect_err(self, *args: Any, **kwargs: Any) -> None: self.log.error('whitelist reconnect had an exception. Start looping call again.', args=args, kwargs=kwargs) self.reactor.callLater(30, self._start_whitelist_reconnect) + def _start_peer_connect_loop(self) -> None: + # The deferred returned by the LoopingCall start method + # executes when the looping call stops running + # https://docs.twistedmatrix.com/en/stable/api/twisted.internet.task.LoopingCall.html + d = self.lc_connect.start(self.lc_connect_interval, now=True) + d.addErrback(self._handle_peer_connect_err) + + def _handle_peer_connect_err(self, *args: Any, **kwargs: Any) -> None: + # This method will be called when an exception happens inside the peer connect loop + # and ends up stopping the looping call. + # We log the error and start the looping call again. + self.log.error( + 'connect_to_peer_from_connection_queue had an exception. Start looping call again.', + args=args, + kwargs=kwargs, + ) + self.reactor.callLater(self.lc_connect_interval, self._start_peer_connect_loop) + def stop(self) -> None: + if self.lc_connect.running: + self.lc_connect.stop() + if self.lc_reconnect.running: self.lc_reconnect.stop() @@ -406,10 +434,10 @@ def on_peer_ready(self, protocol: HathorProtocol) -> None: """Called when a peer is ready.""" assert protocol.peer is not None self.verified_peer_storage.add_or_replace(protocol.peer) - assert protocol.peer.id is not None self.handshaking_peers.remove(protocol) - self.unverified_peer_storage.pop(protocol.peer.id, None) + for conn in self.iter_all_connections(): + conn.unverified_peer_storage.remove(protocol.peer) # we emit the event even if it's a duplicate peer as a matching # NETWORK_PEER_DISCONNECTED will be emitted regardless @@ -419,7 +447,8 @@ def on_peer_ready(self, protocol: HathorProtocol) -> None: peers_count=self._get_peers_count() ) - if protocol.peer.id in self.connected_peers: + peer_id = protocol.peer.id + if peer_id in self.connected_peers: # connected twice to same peer self.log.warn('duplicate connection to peer', protocol=protocol) conn = self.get_connection_to_drop(protocol) @@ -428,7 +457,11 @@ def on_peer_ready(self, protocol: HathorProtocol) -> None: # the new connection is being dropped, so don't save it to connected_peers return - self.connected_peers[protocol.peer.id] = protocol + self.connected_peers[peer_id] = protocol + if peer_id not in self.new_connection_from_queue: + self.new_connection_from_queue.append(peer_id) + else: + self.log.warn('peer already in queue', peer=str(peer_id)) # In case it was a retry, we must reset the data only here, after it gets ready protocol.peer.info.reset_retry_timestamp() @@ -436,7 +469,7 @@ def on_peer_ready(self, protocol: HathorProtocol) -> None: if len(self.connected_peers) <= self.MAX_ENABLED_SYNC: protocol.enable_sync() - if protocol.peer.id in self.always_enable_sync: + if peer_id in self.always_enable_sync: protocol.enable_sync() # Notify other peers about this new peer connection. @@ -456,7 +489,8 @@ def on_peer_disconnect(self, protocol: HathorProtocol) -> None: if protocol in self.handshaking_peers: self.handshaking_peers.remove(protocol) if protocol._peer is not None: - existing_protocol = self.connected_peers.pop(protocol.peer.id, None) + peer_id = protocol.peer.id + existing_protocol = self.connected_peers.pop(peer_id, None) if existing_protocol is None: # in this case, the connection was closed before it got to READY state return @@ -466,7 +500,10 @@ def on_peer_disconnect(self, protocol: HathorProtocol) -> None: # A check for duplicate connections is done during PEER_ID state, but there's still a # chance it can happen if both connections start at the same time and none of them has # reached READY state while the other is on PEER_ID state - self.connected_peers[protocol.peer.id] = existing_protocol + self.connected_peers[peer_id] = existing_protocol + elif peer_id in self.new_connection_from_queue: + # now we're sure it can be removed from new_connection_from_queue + self.new_connection_from_queue.remove(peer_id) self.pubsub.publish( HathorEvents.NETWORK_PEER_DISCONNECTED, protocol=protocol, @@ -499,15 +536,6 @@ def is_peer_connected(self, peer_id: PeerId) -> bool: """ return peer_id in self.connected_peers - def on_receive_peer(self, peer: UnverifiedPeer, origin: Optional[ReadyState] = None) -> None: - """ Update a peer information in our storage, and instantly attempt to connect - to it if it is not connected yet. - """ - if peer.id == self.my_peer.id: - return - peer = self.unverified_peer_storage.add_or_merge(peer) - self.connect_to_if_not_connected(peer, int(self.reactor.seconds())) - def peers_cleanup(self) -> None: """Clean up aged peers.""" now = self.reactor.seconds() @@ -523,11 +551,45 @@ def peers_cleanup(self) -> None: for remove_peer in to_be_removed: self.verified_peer_storage.remove(remove_peer) - def reconnect_to_all(self) -> None: - """ It is called by the `lc_reconnect` timer and tries to connect to all known - peers. + def connect_to_peer_from_connection_queue(self) -> None: + """ It is called by the `lc_connect` looping call and tries to connect to a new peer. + """ + if not self.new_connection_from_queue: + self.log.debug('connection queue is empty') + return + assert self.manager is not None + self.log.debug('connect to peer from connection queue') + candidate_new_peers: list[UnverifiedPeer] + # we don't know if we will find a candidate, so we can't do `while True:` + for _ in range(len(self.new_connection_from_queue)): + # for a deque([1, 2, 3, 4]) this will get 1 and modify it to deque([2, 3, 4, 1]) + next_from_peer_id = self.new_connection_from_queue[0] + self.new_connection_from_queue.rotate(-1) + + protocol = self.connected_peers.get(next_from_peer_id) + if protocol is None: + self.log.error('expected protocol not found', peer_id=str(next_from_peer_id)) + assert self.new_connection_from_queue.pop() == next_from_peer_id + continue + candidate_new_peers = [ + candidate_peer + for candidate_peer_id, candidate_peer in protocol.unverified_peer_storage.items() + if candidate_peer_id not in self.connected_peers or candidate_peer_id not in self.connecting_peers + ] + if candidate_new_peers: + break + else: + self.log.debug('no new peers in the connection queue') + # this means we rotated through the whole queue and did not find any candidate + return - TODO(epnichols): Should we always connect to *all*? Should there be a max #? + peer = self.rng.choice(candidate_new_peers) + self.log.debug('random peer chosen', peer=str(peer.id), entrypoints=peer.info.entrypoints_as_str()) + now = self.reactor.seconds() + self.connect_to_peer(peer, int(now)) + + def reconnect_to_all(self) -> None: + """ It is called by the `lc_reconnect` timer and tries to connect to all known peers. """ self.peers_cleanup() # when we have no connected peers left, run the discovery process again @@ -536,10 +598,10 @@ def reconnect_to_all(self) -> None: if now - self._last_discovery >= self.PEER_DISCOVERY_INTERVAL: self._last_discovery = now self.do_discovery() - # We need to use list() here because the dict might change inside connect_to_if_not_connected + # We need to use list() here because the dict might change inside connect_to_peer # when the peer is disconnected and without entrypoint for peer in list(self.verified_peer_storage.values()): - self.connect_to_if_not_connected(peer, int(now)) + self.connect_to_peer(peer, int(now)) def update_whitelist(self) -> Deferred[None]: from twisted.web.client import readBody @@ -582,7 +644,7 @@ def _update_whitelist_cb(self, body: bytes) -> None: for peer_id in peers_to_remove: self.manager.remove_peer_from_whitelist_and_disconnect(peer_id) - def connect_to_if_not_connected(self, peer: UnverifiedPeer | PublicPeer, now: int) -> None: + def connect_to_peer(self, peer: UnverifiedPeer | PublicPeer, now: int) -> None: """ Attempts to connect if it is not connected to the peer. """ if not peer.info.entrypoints or ( @@ -602,15 +664,16 @@ def connect_to_if_not_connected(self, peer: UnverifiedPeer | PublicPeer, now: in assert peer.id is not None if peer.info.can_retry(now): if self.enable_ipv6 and not self.disable_ipv4: - addr = self.rng.choice(peer.info.entrypoints) + addr = self.rng.choice(list(peer.info.entrypoints)) elif self.enable_ipv6 and self.disable_ipv4: addr = self.rng.choice(peer.info.get_ipv6_only_entrypoints()) elif not self.enable_ipv6 and not self.disable_ipv4: addr = self.rng.choice(peer.info.get_ipv4_only_entrypoints()) else: raise ValueError('IPv4 is disabled and IPv6 is not enabled') - - self.connect_to(addr.with_id(peer.id), peer) + self.connect_to_endpoint(addr.with_id(peer.id), peer) + else: + self.log.debug('connecting too often, skip retrying', peer=str(peer.id)) def _connect_to_callback( self, @@ -628,14 +691,17 @@ def _connect_to_callback( protocol.wrappedProtocol.on_outbound_connect(entrypoint, peer) self.connecting_peers.pop(endpoint) - def connect_to( + def connect_to_endpoint( self, entrypoint: PeerEndpoint, peer: UnverifiedPeer | PublicPeer | None = None, use_ssl: bool | None = None, ) -> None: - """ Attempt to connect to a peer, even if a connection already exists. - Usually you should call `connect_to_if_not_connected`. + """ Attempt to connect directly to an endpoint, prefer calling `connect_to_peer` when possible. + + This method does not take into account the peer's id (since we might not even know it, or have verified it even + if we know). But this method will check if there's already a connection open to the given endpoint and skip it + if there is one. If `use_ssl` is True, then the connection will be wraped by a TLS. """ @@ -747,7 +813,7 @@ def update_hostname_entrypoints(self, *, old_hostname: str | None, new_hostname: def _add_hostname_entrypoint(self, hostname: str, address: IPv4Address | IPv6Address) -> None: hostname_entrypoint = PeerAddress.from_hostname_address(hostname, address) - self.my_peer.info.entrypoints.append(hostname_entrypoint) + self.my_peer.info.entrypoints.add(hostname_entrypoint) def get_connection_to_drop(self, protocol: HathorProtocol) -> HathorProtocol: """ When there are duplicate connections, determine which one should be dropped. diff --git a/hathor/p2p/peer.py b/hathor/p2p/peer.py index 8bc963b93..f3e0dfa05 100644 --- a/hathor/p2p/peer.py +++ b/hathor/p2p/peer.py @@ -106,11 +106,11 @@ class PeerInfo: """ Stores entrypoint and connection attempts information. """ - entrypoints: list[PeerAddress] = field(default_factory=list) - retry_timestamp: int = 0 # should only try connecting to this peer after this timestamp - retry_interval: int = 5 # how long to wait for next connection retry. It will double for each failure - retry_attempts: int = 0 # how many retries were made - last_seen: float = inf # last time this peer was seen + entrypoints: set[PeerAddress] = field(default_factory=set) + retry_timestamp: int = 0 # should only try connecting to this peer after this timestamp + retry_interval: int = 5 # how long to wait for next connection retry. It will double for each failure + retry_attempts: int = 0 # how many retries were made + last_seen: float = inf # last time this peer was seen flags: set[str] = field(default_factory=set) _settings: HathorSettings = field(default_factory=get_global_settings, repr=False) @@ -121,21 +121,18 @@ def get_ipv6_only_entrypoints(self) -> list[PeerAddress]: return list(filter(lambda e: e.is_ipv6(), self.entrypoints)) def ipv4_entrypoints_as_str(self) -> list[str]: - return list(map(str, self.get_ipv4_only_entrypoints())) + return sorted(map(str, self.get_ipv4_only_entrypoints())) def ipv6_entrypoints_as_str(self) -> list[str]: - return list(map(str, self.get_ipv6_only_entrypoints())) + return sorted(map(str, self.get_ipv6_only_entrypoints())) def entrypoints_as_str(self) -> list[str]: """Return a list of entrypoints serialized as str""" - return list(map(str, self.entrypoints)) + return sorted(map(str, self.entrypoints)) def _merge(self, other: PeerInfo) -> None: """Actual merge execution, must only be made after verifications.""" - # Merge entrypoints. - for ep in other.entrypoints: - if ep not in self.entrypoints: - self.entrypoints.append(ep) + self.entrypoints.update(other.entrypoints) async def validate_entrypoint(self, protocol: HathorProtocol) -> bool: """ Validates if connection entrypoint is one of the peer entrypoints @@ -237,7 +234,7 @@ def create_from_json(cls, data: dict[str, Any]) -> Self: It is to create an UnverifiedPeer from a peer connection. """ peer_id = PeerId(data['id']) - endpoints = [] + endpoints = set() for endpoint_str in data.get('entrypoints', []): # We have to parse using PeerEndpoint to be able to support older peers that still @@ -245,12 +242,14 @@ def create_from_json(cls, data: dict[str, Any]) -> Self: endpoint = PeerEndpoint.parse(endpoint_str) if endpoint.peer_id is not None and endpoint.peer_id != peer_id: raise ValueError(f'conflicting peer_id: {endpoint.peer_id} != {peer_id}') - endpoints.append(endpoint.addr) + endpoints.add(endpoint.addr) - return cls( + obj = cls( id=peer_id, info=PeerInfo(entrypoints=endpoints), ) + obj.validate() + return obj def merge(self, other: UnverifiedPeer) -> None: """ Merge two UnverifiedPeer objects, checking that they have the same @@ -259,6 +258,12 @@ def merge(self, other: UnverifiedPeer) -> None: """ assert self.id == other.id self.info._merge(other.info) + self.validate() + + def validate(self) -> None: + """Check if there are too many entrypoints.""" + if len(self.info.entrypoints) > self.info._settings.PEER_MAX_ENTRYPOINTS: + raise InvalidPeerIdException('too many entrypoints') @dataclass(slots=True) diff --git a/hathor/p2p/peer_discovery/bootstrap.py b/hathor/p2p/peer_discovery/bootstrap.py index 55b5e9f16..23399e2ed 100644 --- a/hathor/p2p/peer_discovery/bootstrap.py +++ b/hathor/p2p/peer_discovery/bootstrap.py @@ -37,6 +37,6 @@ def __init__(self, entrypoints: list[PeerEndpoint]): self.entrypoints = entrypoints @override - async def discover_and_connect(self, connect_to: Callable[[PeerEndpoint], None]) -> None: + async def discover_and_connect(self, connect_to_endpoint: Callable[[PeerEndpoint], None]) -> None: for entrypoint in self.entrypoints: - connect_to(entrypoint) + connect_to_endpoint(entrypoint) diff --git a/hathor/p2p/peer_discovery/dns.py b/hathor/p2p/peer_discovery/dns.py index c5dfe74d6..9ef792a96 100644 --- a/hathor/p2p/peer_discovery/dns.py +++ b/hathor/p2p/peer_discovery/dns.py @@ -53,13 +53,13 @@ def do_lookup_text(self, host: str) -> Deferred[LookupResult]: return lookupText(host) @override - async def discover_and_connect(self, connect_to: Callable[[PeerEndpoint], None]) -> None: + async def discover_and_connect(self, connect_to_endpoint: Callable[[PeerEndpoint], None]) -> None: """ Run DNS lookup for host and connect to it This is executed when starting the DNS Peer Discovery and first connecting to the network """ for host in self.hosts: for entrypoint in (await self.dns_seed_lookup(host)): - connect_to(entrypoint) + connect_to_endpoint(entrypoint) async def dns_seed_lookup(self, host: str) -> set[PeerEndpoint]: """ Run a DNS lookup for TXT, A, and AAAA records and return a list of connection strings. diff --git a/hathor/p2p/peer_discovery/peer_discovery.py b/hathor/p2p/peer_discovery/peer_discovery.py index 7d040fae2..ae8ee626b 100644 --- a/hathor/p2p/peer_discovery/peer_discovery.py +++ b/hathor/p2p/peer_discovery/peer_discovery.py @@ -23,10 +23,10 @@ class PeerDiscovery(ABC): """ @abstractmethod - async def discover_and_connect(self, connect_to: Callable[[PeerEndpoint], None]) -> None: - """ This method must discover the peers and call `connect_to` for each of them. + async def discover_and_connect(self, connect_to_endpoint: Callable[[PeerEndpoint], None]) -> None: + """ This method must discover the peers and call `connect_to_endpoint` for each of them. - :param connect_to: Function which will be called for each discovered peer. - :type connect_to: function + :param connect_to_endpoint: Function which will be called for each discovered peer. + :type connect_to_endpoint: function """ raise NotImplementedError diff --git a/hathor/p2p/peer_endpoint.py b/hathor/p2p/peer_endpoint.py index 62e4624a2..b98ec28fc 100644 --- a/hathor/p2p/peer_endpoint.py +++ b/hathor/p2p/peer_endpoint.py @@ -64,6 +64,9 @@ """ IPV6_REGEX = re.compile(r'''^(([0-9a-fA-F]{1,4}:){7}([0-9a-fA-F]{1,4}|:)|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:))$''') # noqa: E501 +# A host with length 64 and over would be rejected later by twisted +MAX_HOST_LEN = 63 + class Protocol(Enum): TCP = 'tcp' @@ -261,6 +264,14 @@ def parse(cls, description: str) -> PeerEndpoint: >>> str(PeerEndpoint.parse('tcp://foo.bar.baz:40403/')) 'tcp://foo.bar.baz:40403' + >>> str(PeerEndpoint.parse('tcp://foooooooooooooooooooo.baaaaaaaaaaaaaaaaaar.baaaaaaaaaaaaaaaaaaz:40403/')) + 'tcp://foooooooooooooooooooo.baaaaaaaaaaaaaaaaaar.baaaaaaaaaaaaaaaaaaz:40403' + + >>> PeerEndpoint.parse('tcp://foooooooooooooooooooo.baaaaaaaaaaaaaaaaaar.baaaaaaaaaaaaaaaaaazz:40403/') + Traceback (most recent call last): + ... + ValueError: hostname too long + >>> PeerEndpoint.parse('tcp://127.0.0.1:40403/?id=123') Traceback (most recent call last): ... @@ -317,6 +328,8 @@ def _parse_address_parts(description: str) -> tuple[Protocol, str, int, str]: host = url.hostname if host is None: raise ValueError(f'expected a host: "{description}"') + if len(host) > MAX_HOST_LEN: + raise ValueError('hostname too long') port = url.port if port is None: raise ValueError(f'expected a port: "{description}"') diff --git a/hathor/p2p/peer_storage.py b/hathor/p2p/peer_storage.py index b6a433077..6f3744439 100644 --- a/hathor/p2p/peer_storage.py +++ b/hathor/p2p/peer_storage.py @@ -18,6 +18,7 @@ from hathor.p2p.peer import PublicPeer, UnverifiedPeer from hathor.p2p.peer_id import PeerId +from hathor.util import Random class GenericPeer(Protocol): @@ -36,6 +37,18 @@ class _BasePeerStorage(dict[PeerId, PeerType]): """ Base class for VerifiedPeerStorage and UnverifiedPeerStorage, do not use directly. """ + def __init__(self, *, rng: Random, max_size: int) -> None: + self.rng = rng + self.max_size = max_size + + def _ensure_max_size(self) -> None: + to_remove_count = len(self) - self.max_size + if to_remove_count < 1: + return + to_remove = self.rng.choices(list(self.keys()), k=to_remove_count) + for k in to_remove: + self.pop(k) + def add(self, peer: PeerType) -> None: """ Add a new peer to the storage. @@ -45,6 +58,7 @@ def add(self, peer: PeerType) -> None: if peer.id in self: raise ValueError('Peer has already been added') self[peer.id] = peer + self._ensure_max_size() def add_or_merge(self, peer: PeerType) -> PeerType: """ Add a peer to the storage if it has not been added yet. Otherwise, merge it with the existing peer. @@ -76,14 +90,16 @@ def remove(self, peer: GenericPeer) -> None: class VerifiedPeerStorage(_BasePeerStorage[PublicPeer]): - """ VerifiedPeerStorage is used to store all peers that we have connected to and verified. + """ Used to store all peers that we have connected to and verified. - It is a dict of PublicPeer objects, and peers can be retrieved by their `peer.id`. + It is a dict of `PublicPeer` objects that should live in the `ConnectionsManager`, the keys are the `peer.id` of + the remote peers. """ class UnverifiedPeerStorage(_BasePeerStorage[UnverifiedPeer]): - """ UnverifiedPeerStorage is used to store all received peers, we haven't verified their ids/entrypoints yet. + """ Used to store all peers that we have connected to and verified. - It is a dict of Peer objects, and peers can be retrieved by their `peer.id`. + It is a dict of `UnverifiedPeer` objects that should live in a `HathorProtocol`, the keys are the `peer.id` of + the remote peers. """ diff --git a/hathor/p2p/protocol.py b/hathor/p2p/protocol.py index cd90601e8..b582fcb77 100644 --- a/hathor/p2p/protocol.py +++ b/hathor/p2p/protocol.py @@ -29,6 +29,7 @@ from hathor.p2p.peer import PrivatePeer, PublicPeer, UnverifiedPeer from hathor.p2p.peer_endpoint import PeerEndpoint from hathor.p2p.peer_id import PeerId +from hathor.p2p.peer_storage import UnverifiedPeerStorage from hathor.p2p.rate_limiter import RateLimiter from hathor.p2p.states import BaseState, HelloState, PeerIdState, ReadyState from hathor.p2p.sync_version import SyncVersion @@ -164,6 +165,13 @@ def __init__( self.use_ssl: bool = use_ssl + # List of peers received from the network. + # We cannot trust their identity before we connect to them. + self.unverified_peer_storage = UnverifiedPeerStorage( + rng=self.connections.rng, + max_size=self._settings.MAX_UNVERIFIED_PEERS_PER_CONN, + ) + # Protocol version is initially unset self.sync_version = None @@ -368,6 +376,20 @@ def disconnect(self, reason: str = '', *, force: bool = False) -> None: else: transport.abortConnection() + def on_receive_peer(self, peer: UnverifiedPeer) -> None: + """ Update a peer information in our storage, the manager's connection loop will pick it later. + """ + # ignore when the remote echo backs our own peer + if peer.id == self.my_peer.id: + return + # ignore peers we've already connected to + if peer.id in self.connections.verified_peer_storage: + return + # merge with known previous information received from this peer since we don't know what's right (a peer can + # change their entrypoints, but the old could still echo, since we haven't connected yet don't assume anything + # and just merge them) + self.unverified_peer_storage.add_or_merge(peer) + def handle_error(self, payload: str) -> None: """ Executed when an ERROR command is received. """ diff --git a/hathor/p2p/resources/add_peers.py b/hathor/p2p/resources/add_peers.py index c8faeb5dc..fcfe9732d 100644 --- a/hathor/p2p/resources/add_peers.py +++ b/hathor/p2p/resources/add_peers.py @@ -86,7 +86,7 @@ def already_connected(endpoint: PeerEndpoint) -> bool: pd = BootstrapPeerDiscovery(filtered_peers) # this fires and forget the coroutine, which is compatible with the original behavior - coro = pd.discover_and_connect(self.manager.connections.connect_to) + coro = pd.discover_and_connect(self.manager.connections.connect_to_endpoint) Deferred.fromCoroutine(coro) ret = {'success': True, 'peers': [str(p) for p in filtered_peers]} diff --git a/hathor/p2p/states/ready.py b/hathor/p2p/states/ready.py index fe1924347..7d10dcc98 100644 --- a/hathor/p2p/states/ready.py +++ b/hathor/p2p/states/ready.py @@ -187,7 +187,7 @@ def handle_peers(self, payload: str) -> None: for data in received_peers: peer = UnverifiedPeer.create_from_json(data) if self.protocol.connections: - self.protocol.connections.on_receive_peer(peer, origin=self) + self.protocol.on_receive_peer(peer) self.log.debug('received peers', payload=payload) def send_ping_if_necessary(self) -> None: diff --git a/hathor/simulator/fake_connection.py b/hathor/simulator/fake_connection.py index 3c030c901..4f569e818 100644 --- a/hathor/simulator/fake_connection.py +++ b/hathor/simulator/fake_connection.py @@ -275,9 +275,10 @@ def reconnect(self) -> None: self._proto1 = self.manager1.connections.server_factory.buildProtocol(self.addr2) self._proto2 = self.manager2.connections.client_factory.buildProtocol(self.addr1) - # When _fake_bootstrap_id is set we don't pass the peer because that's how bootstrap calls connect_to() + # When _fake_bootstrap_id is set we don't pass the peer because that's how bootstrap calls + # connect_to_endpoint() peer = self._proto1.my_peer.to_unverified_peer() if self._fake_bootstrap_id is False else None - self.manager2.connections.connect_to(self.entrypoint, peer) + self.manager2.connections.connect_to_endpoint(self.entrypoint, peer) connecting_peers = list(self.manager2.connections.connecting_peers.values()) for connecting_peer in connecting_peers: diff --git a/tests/others/test_metrics.py b/tests/others/test_metrics.py index b46f6985b..0fb201377 100644 --- a/tests/others/test_metrics.py +++ b/tests/others/test_metrics.py @@ -72,7 +72,7 @@ def test_connections_manager_integration(self): # Execution endpoint = PeerEndpoint.parse('tcp://127.0.0.1:8005') # This will trigger sending to the pubsub one of the network events - manager.connections.connect_to(endpoint, use_ssl=True) + manager.connections.connect_to_endpoint(endpoint, use_ssl=True) self.run_to_completion() diff --git a/tests/p2p/test_bootstrap.py b/tests/p2p/test_bootstrap.py index 9855a0fda..7ae668057 100644 --- a/tests/p2p/test_bootstrap.py +++ b/tests/p2p/test_bootstrap.py @@ -19,10 +19,10 @@ def __init__(self, mocked_host_ports: list[tuple[str, int]]): self.mocked_host_ports = mocked_host_ports @override - async def discover_and_connect(self, connect_to: Callable[[PeerEndpoint], None]) -> None: + async def discover_and_connect(self, connect_to_endpoint: Callable[[PeerEndpoint], None]) -> None: for host, port in self.mocked_host_ports: addr = PeerAddress(Protocol.TCP, host, port) - connect_to(addr.with_id()) + connect_to_endpoint(addr.with_id()) class MockDNSPeerDiscovery(DNSPeerDiscovery): diff --git a/tests/p2p/test_connections.py b/tests/p2p/test_connections.py index db5a85f1e..a0c910dda 100644 --- a/tests/p2p/test_connections.py +++ b/tests/p2p/test_connections.py @@ -18,7 +18,7 @@ def test_manager_connections(self) -> None: manager: HathorManager = self.create_peer('testnet', enable_sync_v1=True, enable_sync_v2=False) endpoint = PeerEndpoint.parse('tcp://127.0.0.1:8005') - manager.connections.connect_to(endpoint, use_ssl=True) + manager.connections.connect_to_endpoint(endpoint, use_ssl=True) self.assertIn(endpoint, manager.connections.iter_not_ready_endpoints()) self.assertNotIn(endpoint, manager.connections.iter_ready_connections()) @@ -36,7 +36,7 @@ def test_manager_disabled_ipv6(self) -> None: ) endpoint = PeerEndpoint.parse('tcp://[::1]:8005') - manager.connections.connect_to(endpoint, use_ssl=True) + manager.connections.connect_to_endpoint(endpoint, use_ssl=True) self.assertNotIn(endpoint, manager.connections.iter_not_ready_endpoints()) self.assertNotIn(endpoint, manager.connections.iter_ready_connections()) @@ -54,10 +54,10 @@ def test_manager_enabled_ipv6_and_ipv4(self) -> None: ) endpoint_ipv6 = PeerEndpoint.parse('tcp://[::3:2:1]:8005') - manager.connections.connect_to(endpoint_ipv6, use_ssl=True) + manager.connections.connect_to_endpoint(endpoint_ipv6, use_ssl=True) endpoint_ipv4 = PeerEndpoint.parse('tcp://1.2.3.4:8005') - manager.connections.connect_to(endpoint_ipv4, use_ssl=True) + manager.connections.connect_to_endpoint(endpoint_ipv4, use_ssl=True) self.assertIn( endpoint_ipv4.addr.host, @@ -84,7 +84,7 @@ def test_manager_disabled_ipv4(self) -> None: ) endpoint = PeerEndpoint.parse('tcp://127.0.0.1:8005') - manager.connections.connect_to(endpoint, use_ssl=True) + manager.connections.connect_to_endpoint(endpoint, use_ssl=True) self.assertEqual(0, len(list(manager.connections.iter_not_ready_endpoints()))) self.assertEqual(0, len(list(manager.connections.iter_ready_connections()))) diff --git a/tests/p2p/test_connectivity.py b/tests/p2p/test_connectivity.py new file mode 100644 index 000000000..328a17050 --- /dev/null +++ b/tests/p2p/test_connectivity.py @@ -0,0 +1,56 @@ +import time +import urllib +from contextlib import contextmanager +from typing import Generator + +import requests + +from tests.utils import run_server + + +@contextmanager +def _run_servers_context(count: int) -> Generator[list[tuple[str, str]], None, None]: + """ Runs `count` number of `test.utils.run_server` that bootstrap in chain, yields a (endpoint, status_url) list. + """ + if count > 80: + raise ValueError('cannot start more than 80 processes at once') + start_port = 8005 + endpoint_and_status_urls = [] + processes = [] + try: + previous_endpoint: None | str = None + for listen_port in range(start_port, start_port + count): + status_port = listen_port + 80 + endpoint = f'tcp://127.0.0.1:{listen_port}' + status_url = f'http://127.0.0.1:{status_port}' + # XXX: it's important for run_server to be inside the try because if it fails it will still terminate the + # ones that were previously started because they would have made it into the processes list + processes.append(run_server(listen=listen_port, status=status_port, bootstrap=previous_endpoint)) + endpoint_and_status_urls.append((endpoint, status_url)) + previous_endpoint = endpoint + yield endpoint_and_status_urls + finally: + for process in processes: + # XXX: this assumes process.terminate() will not fail + process.terminate() + + +def test_manager_connection_transitivity() -> None: + """ Creates a chain of 4 peers that bootstrap to the previous one, they should all connect to each other. + """ + with _run_servers_context(4) as endpoint_status_pairs: + assert len(endpoint_status_pairs) == 4 + time.sleep(1) # 1 sec should be more than enough for the peers to connect to themselves + + statuses = [ + requests.get(urllib.parse.urljoin(status_url, '/v1a/status/')).json() + for _, status_url in endpoint_status_pairs + ] + + all_peer_ids = set(status['server']['id'] for status in statuses) + + for status in statuses: + peer_id = status['server']['id'] + all_other_peer_ids = all_peer_ids - {peer_id} + connected_peer_ids = {i['id'] for i in status['connections']['connected_peers']} + assert all_other_peer_ids == connected_peer_ids diff --git a/tests/p2p/test_peer_id.py b/tests/p2p/test_peer_id.py index 1f95cbd12..75030843b 100644 --- a/tests/p2p/test_peer_id.py +++ b/tests/p2p/test_peer_id.py @@ -72,7 +72,7 @@ def test_sign_verify_fail(self) -> None: def test_merge_peer(self) -> None: # Testing peer storage with merge of peers - peer_storage = VerifiedPeerStorage() + peer_storage = VerifiedPeerStorage(rng=self.rng, max_size=100) p1 = PrivatePeer.auto_generated() p2 = PrivatePeer.auto_generated() @@ -86,19 +86,19 @@ def test_merge_peer(self) -> None: peer = peer_storage[p1.id] self.assertEqual(peer.id, p1.id) self.assertEqual(peer.public_key, p1.public_key) - self.assertEqual(peer.info.entrypoints, []) + self.assertEqual(peer.info.entrypoints, set()) ep1 = PeerAddress.parse('tcp://127.0.0.1:1001') ep2 = PeerAddress.parse('tcp://127.0.0.1:1002') ep3 = PeerAddress.parse('tcp://127.0.0.1:1003') p3 = PrivatePeer.auto_generated().to_public_peer() - p3.info.entrypoints.append(ep1) - p3.info.entrypoints.append(ep2) + p3.info.entrypoints.add(ep1) + p3.info.entrypoints.add(ep2) p4 = PublicPeer(UnverifiedPeer(id=p3.id), public_key=p3.public_key) - p4.info.entrypoints.append(ep2) - p4.info.entrypoints.append(ep3) + p4.info.entrypoints.add(ep2) + p4.info.entrypoints.add(ep3) peer_storage.add_or_merge(p4) self.assertEqual(len(peer_storage), 2) @@ -213,16 +213,16 @@ def test_unverified_peer_to_json_roundtrip(self) -> None: peer_json_simple = dict( id=str(peer_id), - entrypoints=[addr1, addr2, addr3] + entrypoints=sorted({addr1, addr2, addr3}) ) result = UnverifiedPeer.create_from_json(peer_json_simple) assert result.id == peer_id - assert result.info.entrypoints == [ + assert result.info.entrypoints == { PeerAddress.parse(addr1), PeerAddress.parse(addr2), PeerAddress.parse(addr3), - ] + } assert result.to_json() == peer_json_simple # We support this for compatibility with old peers that may send ids in the URLs @@ -237,11 +237,11 @@ def test_unverified_peer_to_json_roundtrip(self) -> None: result = UnverifiedPeer.create_from_json(peer_json_with_ids) assert result.id == peer_id - assert result.info.entrypoints == [ + assert result.info.entrypoints == { PeerAddress.parse(addr1), PeerAddress.parse(addr2), PeerAddress.parse(addr3), - ] + } assert result.to_json() == peer_json_simple # the roundtrip erases the ids from the URLs other_peer_id = PrivatePeer.auto_generated().id diff --git a/tests/p2p/test_peer_storage.py b/tests/p2p/test_peer_storage.py new file mode 100644 index 000000000..8fbbad4f6 --- /dev/null +++ b/tests/p2p/test_peer_storage.py @@ -0,0 +1,30 @@ +import pytest + +from hathor.p2p.peer_storage import UnverifiedPeerStorage, VerifiedPeerStorage +from hathor.util import Random +from tests.unittest import PEER_ID_POOL + + +@pytest.fixture +def rng() -> Random: + import secrets + seed = secrets.randbits(64) + return Random(seed) + + +def test_unverified_peer_storage_max_size(rng: Random) -> None: + max_size = 5 + peer_storage = UnverifiedPeerStorage(rng=rng, max_size=max_size) + for i in range(2 * max_size): + peer = PEER_ID_POOL[i].to_unverified_peer() + peer_storage.add(peer) + assert len(peer_storage) == max_size + + +def test_verified_peer_storage_max_size(rng: Random) -> None: + max_size = 5 + peer_storage = VerifiedPeerStorage(rng=rng, max_size=max_size) + for i in range(2 * max_size): + peer = PEER_ID_POOL[i].to_public_peer() + peer_storage.add(peer) + assert len(peer_storage) == max_size diff --git a/tests/p2p/test_protocol.py b/tests/p2p/test_protocol.py index 708af1f0d..7a054b578 100644 --- a/tests/p2p/test_protocol.py +++ b/tests/p2p/test_protocol.py @@ -78,8 +78,8 @@ def test_on_connect(self) -> None: def test_peer_with_entrypoint(self) -> None: entrypoint_str = 'tcp://192.168.1.1:54321' entrypoint = PeerAddress.parse(entrypoint_str) - self.peer1.info.entrypoints.append(entrypoint) - self.peer2.info.entrypoints.append(entrypoint) + self.peer1.info.entrypoints.add(entrypoint) + self.peer2.info.entrypoints.add(entrypoint) self.conn.run_one_step() # HELLO msg1 = self.conn.peek_tr1_value() @@ -228,9 +228,9 @@ def test_hello_without_ipv6_capability(self) -> None: entrypoint_1_ipv4 = PeerEndpoint.parse(f'tcp://192.168.1.1:{port1}') entrypoint_2_ipv4 = PeerEndpoint.parse(f'tcp://192.168.1.1:{port2}') - self.peer1.info.entrypoints.append(entrypoint_1_ipv6.addr) - self.peer1.info.entrypoints.append(entrypoint_1_ipv4.addr) - self.peer2.info.entrypoints.append(entrypoint_2_ipv4.addr) + self.peer1.info.entrypoints.add(entrypoint_1_ipv6.addr) + self.peer1.info.entrypoints.add(entrypoint_1_ipv4.addr) + self.peer2.info.entrypoints.add(entrypoint_2_ipv4.addr) conn = FakeConnection(manager1, manager2, addr1=addr1, addr2=addr2) @@ -239,8 +239,8 @@ def test_hello_without_ipv6_capability(self) -> None: self.assertEqual(len(conn.proto1.peer.info.entrypoints), 1) self.assertEqual(len(conn.proto2.peer.info.entrypoints), 1) - self.assertEqual(conn.proto1.peer.info.entrypoints[0].host, '192.168.1.1') - self.assertEqual(conn.proto2.peer.info.entrypoints[0].host, '192.168.1.1') + self.assertEqual(next(iter(conn.proto1.peer.info.entrypoints)).host, '192.168.1.1') + self.assertEqual(next(iter(conn.proto2.peer.info.entrypoints)).host, '192.168.1.1') def test_hello_with_ipv6_capability(self) -> None: """Tests the connection between peers with the IPV6 capability. @@ -268,9 +268,9 @@ def test_hello_with_ipv6_capability(self) -> None: entrypoint_1_ipv4 = PeerEndpoint.parse(f'tcp://192.168.1.1:{port1}') entrypoint_2_ipv4 = PeerEndpoint.parse(f'tcp://192.168.1.1:{port2}') - self.peer1.info.entrypoints.append(entrypoint_1_ipv6.addr) - self.peer1.info.entrypoints.append(entrypoint_1_ipv4.addr) - self.peer2.info.entrypoints.append(entrypoint_2_ipv4.addr) + self.peer1.info.entrypoints.add(entrypoint_1_ipv6.addr) + self.peer1.info.entrypoints.add(entrypoint_1_ipv4.addr) + self.peer2.info.entrypoints.add(entrypoint_2_ipv4.addr) conn = FakeConnection(manager1, manager2, addr1=addr1, addr2=addr2) @@ -281,7 +281,7 @@ def test_hello_with_ipv6_capability(self) -> None: self.assertEqual(len(conn.proto2.peer.info.entrypoints), 2) self.assertTrue('::1' in map(lambda x: x.host, conn.proto2.peer.info.entrypoints)) self.assertTrue('192.168.1.1' in map(lambda x: x.host, conn.proto2.peer.info.entrypoints)) - self.assertEqual(conn.proto1.peer.info.entrypoints[0].host, '192.168.1.1') + self.assertEqual(next(iter(conn.proto1.peer.info.entrypoints)).host, '192.168.1.1') def test_invalid_same_peer_id(self) -> None: manager3 = self.create_peer(self.network, peer=self.peer1) diff --git a/tests/poa/test_poa_simulation.py b/tests/poa/test_poa_simulation.py index b0b787f6e..096a93fd1 100644 --- a/tests/poa/test_poa_simulation.py +++ b/tests/poa/test_poa_simulation.py @@ -30,6 +30,7 @@ from hathor.crypto.util import get_address_b58_from_public_key_bytes, get_public_key_bytes_compressed from hathor.manager import HathorManager from hathor.simulator import FakeConnection +from hathor.simulator.trigger import StopWhenTrue from hathor.transaction import BaseTransaction, Block, TxInput, TxOutput from hathor.transaction.genesis import generate_new_genesis from hathor.transaction.poa import PoaBlock @@ -156,8 +157,8 @@ def test_two_producers(self) -> None: connection = FakeConnection(manager1, manager2) self.simulator.add_connection(connection) - # both managers are producing blocks - self.simulator.run(100) + trigger = StopWhenTrue(lambda: manager2.tx_storage.get_block_count() == 12) + assert self.simulator.run(200, trigger=trigger) assert manager1.tx_storage.get_block_count() == 12 assert manager2.tx_storage.get_block_count() == 12 assert manager1.tx_storage.get_best_block_tips() == manager2.tx_storage.get_best_block_tips() diff --git a/tests/resources/p2p/test_status.py b/tests/resources/p2p/test_status.py index 646ba6903..e7c322f74 100644 --- a/tests/resources/p2p/test_status.py +++ b/tests/resources/p2p/test_status.py @@ -18,13 +18,13 @@ def setUp(self): super().setUp() self.web = StubSite(StatusResource(self.manager)) address1 = IPv4Address('TCP', '192.168.1.1', 54321) - self.manager.connections.my_peer.info.entrypoints.append(PeerAddress.from_address(address1)) + self.manager.connections.my_peer.info.entrypoints.add(PeerAddress.from_address(address1)) self.manager.peers_whitelist.append(self.get_random_peer_from_pool().id) self.manager.peers_whitelist.append(self.get_random_peer_from_pool().id) self.manager2 = self.create_peer('testnet') address2 = IPv4Address('TCP', '192.168.1.1', 54322) - self.manager2.connections.my_peer.info.entrypoints.append(PeerAddress.from_address(address2)) + self.manager2.connections.my_peer.info.entrypoints.add(PeerAddress.from_address(address2)) self.conn1 = FakeConnection(self.manager, self.manager2, addr1=address1, addr2=address2) @inlineCallbacks From f89376531936f740ce91fb1802581772475a6541 Mon Sep 17 00:00:00 2001 From: Jan Segre Date: Wed, 11 Dec 2024 17:19:20 +0100 Subject: [PATCH 58/61] chore(docker): include python suffix tags on rc images --- extras/github/docker.py | 9 +++------ extras/github/test_docker.py | 15 ++++++++++----- 2 files changed, 13 insertions(+), 11 deletions(-) diff --git a/extras/github/docker.py b/extras/github/docker.py index 3a0c667e9..b7ee9bbf4 100644 --- a/extras/github/docker.py +++ b/extras/github/docker.py @@ -97,12 +97,9 @@ def extract_pyver(filename): tags = set() - # We don't want a tag with a python suffix for release-candidates - if is_release_candidate: - version = base_version - else: - version = base_version + '-' + suffix - tags.add(version) + # Always include -python{Version} suffix variant + version = base_version + '-' + suffix + tags.add(version) if suffix == default_python: tags.add(base_version) diff --git a/extras/github/test_docker.py b/extras/github/test_docker.py index a38060acc..b5db27e67 100644 --- a/extras/github/test_docker.py +++ b/extras/github/test_docker.py @@ -97,13 +97,14 @@ def test_release_candidate_non_default_python(self): self.assertEqual(base_version, 'v0.53.0-rc.1') output = prep_tags(os.environ, base_version, is_release_candidate) + version_with_python = f'{base_version}-python{NON_DEFAULT_PYTHON_VERSION}' self.assertNotIn('slack-notification-version', output) - self.assertEqual(output['version'], base_version) + self.assertEqual(output['version'], version_with_python) self.assertEqual(output['login-dockerhub'], 'true') self.assertEqual(output['login-ghcr'], 'false') - self.assertEqual(output['tags'], 'dont-push--local-only') - self.assertEqual(output['push'], 'false') + self.assertEqual(output['tags'], f'mock_image:{version_with_python}') + self.assertEqual(output['push'], 'true') self.assertEqual(output['dockerfile'], 'Dockerfile') def test_release_candidate_default_python(self): @@ -127,12 +128,16 @@ def test_release_candidate_default_python(self): self.assertEqual(base_version, 'v0.53.0-rc.1') output = prep_tags(os.environ, base_version, is_release_candidate) + version_with_python = f'{base_version}-python{DEFAULT_PYTHON_VERSION}' self.assertEqual(output['slack-notification-version'], base_version) - self.assertEqual(output['version'], base_version) + self.assertEqual(output['version'], version_with_python) self.assertEqual(output['login-dockerhub'], 'true') self.assertEqual(output['login-ghcr'], 'false') - self.assertEqual(output['tags'], 'mock_image:v0.53.0-rc.1') + self.assertEqual( + set(output['tags'].split(',')), + {f'mock_image:{version_with_python}', 'mock_image:v0.53.0-rc.1'}, + ) self.assertEqual(output['push'], 'true') self.assertEqual(output['dockerfile'], 'Dockerfile') From 692a74c09c8a319cb157eb3222c86d4db0916738 Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Wed, 11 Dec 2024 20:38:29 -0300 Subject: [PATCH 59/61] fix: acc weight regression (#1193) --- hathor/transaction/base_transaction.py | 19 +++++++----- .../resources/transaction_confirmation.py | 29 +++++++++++++------ .../test_transaction_confirmation.py | 5 ++-- 3 files changed, 34 insertions(+), 19 deletions(-) diff --git a/hathor/transaction/base_transaction.py b/hathor/transaction/base_transaction.py index 41cbab100..a6a7b85ac 100644 --- a/hathor/transaction/base_transaction.py +++ b/hathor/transaction/base_transaction.py @@ -22,7 +22,7 @@ from abc import ABC, abstractmethod from enum import IntEnum from itertools import chain -from math import inf, isfinite, log +from math import isfinite, log from struct import error as StructError, pack from typing import TYPE_CHECKING, Any, ClassVar, Generic, Iterator, Optional, TypeAlias, TypeVar @@ -638,7 +638,12 @@ def reset_metadata(self) -> None: self.storage.save_transaction(self, only_metadata=True) - def update_accumulated_weight(self, *, stop_value: float = inf, save_file: bool = True) -> TransactionMetadata: + def update_accumulated_weight( + self, + *, + stop_value: int | None = None, + save_file: bool = True, + ) -> TransactionMetadata: """Calculates the tx's accumulated weight and update its metadata. It starts at the current transaction and does a BFS to the tips. In the @@ -656,10 +661,10 @@ def update_accumulated_weight(self, *, stop_value: float = inf, save_file: bool assert self.storage is not None metadata = self.get_metadata() - if metadata.accumulated_weight > stop_value: + if stop_value is not None and metadata.accumulated_weight > stop_value: return metadata - accumulated_weight = weight_to_work(self.weight) + work = weight_to_work(self.weight) # TODO Another optimization is that, when we calculate the acc weight of a transaction, we # also partially calculate the acc weight of its descendants. If it were a DFS, when returning @@ -674,11 +679,11 @@ def update_accumulated_weight(self, *, stop_value: float = inf, save_file: bool from hathor.transaction.storage.traversal import BFSTimestampWalk bfs_walk = BFSTimestampWalk(self.storage, is_dag_funds=True, is_dag_verifications=True, is_left_to_right=True) for tx in bfs_walk.run(self, skip_root=True): - accumulated_weight += weight_to_work(tx.weight) - if accumulated_weight > stop_value: + work += weight_to_work(tx.weight) + if stop_value is not None and work > stop_value: break - metadata.accumulated_weight = accumulated_weight + metadata.accumulated_weight = work if save_file: self.storage.save_transaction(self, only_metadata=True) diff --git a/hathor/transaction/resources/transaction_confirmation.py b/hathor/transaction/resources/transaction_confirmation.py index 153d2b8f8..62ab52208 100644 --- a/hathor/transaction/resources/transaction_confirmation.py +++ b/hathor/transaction/resources/transaction_confirmation.py @@ -12,12 +12,16 @@ # See the License for the specific language governing permissions and # limitations under the License. -from math import log +from math import log2 from typing import Any from hathor.api_util import Resource, get_args, get_missing_params_msg, set_cors, validate_tx_hash from hathor.cli.openapi_files.register import register_resource +from hathor.manager import HathorManager from hathor.util import json_dumpb +from hathor.utils.weight import weight_to_work, work_to_weight + +N_CONFIRMATION_BLOCKS: int = 6 @register_resource @@ -28,7 +32,7 @@ class TransactionAccWeightResource(Resource): """ isLeaf = True - def __init__(self, manager): + def __init__(self, manager: HathorManager) -> None: # Important to have the manager so we can know the tx_storage self.manager = manager @@ -48,15 +52,21 @@ def _render_GET_data(self, requested_hash: str) -> dict[str, Any]: if meta.first_block: block = self.manager.tx_storage.get_transaction(meta.first_block) - stop_value = block.weight + log(6, 2) - meta = tx.update_accumulated_weight(stop_value=stop_value) - data['accumulated_weight'] = meta.accumulated_weight - data['accumulated_bigger'] = meta.accumulated_weight > stop_value + stop_value = block.weight + log2(N_CONFIRMATION_BLOCKS) + meta = tx.update_accumulated_weight(stop_value=weight_to_work(stop_value)) + acc_weight = work_to_weight(meta.accumulated_weight) + acc_weight_raw = str(meta.accumulated_weight) + data['accumulated_weight'] = acc_weight + data['accumulated_weight_raw'] = acc_weight_raw + data['accumulated_bigger'] = acc_weight > stop_value data['stop_value'] = stop_value - data['confirmation_level'] = min(meta.accumulated_weight / stop_value, 1) + data['confirmation_level'] = min(acc_weight / stop_value, 1) else: meta = tx.update_accumulated_weight() - data['accumulated_weight'] = meta.accumulated_weight + acc_weight = work_to_weight(meta.accumulated_weight) + acc_weight_raw = str(meta.accumulated_weight) + data['accumulated_weight'] = acc_weight + data['accumulated_weight_raw'] = acc_weight_raw data['accumulated_bigger'] = False data['confirmation_level'] = 0 return data @@ -125,7 +135,8 @@ def render_GET(self, request): 'success': { 'summary': 'Success', 'value': { - 'accumulated_weight': 43237, + 'accumulated_weight': 15.4, + 'accumulated_weight_raw': '43238', 'confirmation_level': 0.88, 'stop_value': 14.5, 'accumulated_bigger': True, diff --git a/tests/resources/transaction/test_transaction_confirmation.py b/tests/resources/transaction/test_transaction_confirmation.py index 3b95f7718..a04eeb641 100644 --- a/tests/resources/transaction/test_transaction_confirmation.py +++ b/tests/resources/transaction/test_transaction_confirmation.py @@ -2,7 +2,6 @@ from hathor.simulator.utils import add_new_blocks from hathor.transaction.resources import TransactionAccWeightResource -from hathor.utils.weight import weight_to_work from tests import unittest from tests.resources.base_resource import StubSite, _BaseResourceTest from tests.utils import add_blocks_unlock_reward, add_new_transactions @@ -25,7 +24,7 @@ def test_get_data(self): ) data_success = response_success.json_value() self.assertTrue(data_success['success']) - self.assertEqual(data_success['accumulated_weight'], weight_to_work(genesis_tx.weight)) + self.assertEqual(data_success['accumulated_weight'], genesis_tx.weight) self.assertEqual(data_success['confirmation_level'], 0) # Adding blocks to have funds @@ -39,7 +38,7 @@ def test_get_data(self): {b'id': bytes(tx.hash.hex(), 'utf-8')} ) data_success2 = response_success2.json_value() - self.assertGreater(data_success2['accumulated_weight'], weight_to_work(tx.weight)) + self.assertGreater(data_success2['accumulated_weight'], tx.weight) self.assertEqual(data_success2['confirmation_level'], 1) # Test sending hash that does not exist From 92477434c6f46dd89bfcaff01639301643dfcc4e Mon Sep 17 00:00:00 2001 From: Jan Segre Date: Fri, 3 Jan 2025 17:15:22 +0100 Subject: [PATCH 60/61] chore(p2p): decrease p2p.max_enabled_sync's default value: 16 -> 8 --- hathor/conf/settings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hathor/conf/settings.py b/hathor/conf/settings.py index 9fd45bc3f..654ca398f 100644 --- a/hathor/conf/settings.py +++ b/hathor/conf/settings.py @@ -403,7 +403,7 @@ def GENESIS_TX2_TIMESTAMP(self) -> int: PARTIALLY_VALIDATED_ID: bytes = b'pending-validation' # Maximum number of sync running simultaneously. - MAX_ENABLED_SYNC: int = 16 + MAX_ENABLED_SYNC: int = 8 # Time to update the peers that are running sync. SYNC_UPDATE_INTERVAL: int = 10 * 60 # seconds From a53877893012825a97fcb97860b0ca84dc0df0b3 Mon Sep 17 00:00:00 2001 From: Jan Segre Date: Fri, 3 Jan 2025 17:38:07 +0100 Subject: [PATCH 61/61] chore(feature_activation): update start_height before release --- hathor/conf/testnet.py | 10 +++++----- hathor/conf/testnet.yml | 10 +++++----- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/hathor/conf/testnet.py b/hathor/conf/testnet.py index f2d322489..dc6a42907 100644 --- a/hathor/conf/testnet.py +++ b/hathor/conf/testnet.py @@ -100,11 +100,11 @@ # NOP feature to test Feature Activation for Transactions Feature.NOP_FEATURE_1: Criteria( bit=0, - # N = 4_394_880 - # start_height expected to be reached around Sunday, 2024-12-01. - # Right now the best block is 4_377_375 on testnet (2024-11-25). - start_height=4_394_880, # N - timeout_height=4_475_520, # N + 4 * 20160 (4 weeks after the start) + # N = 4_495_680 + # Expected to be reached around Tuesday, 2025-01-06. + # Right now the best block is 4_489_259 on testnet (2025-01-03). + start_height=4_495_680, # N + timeout_height=4_576_320, # N + 4 * 20160 (4 weeks after the start) minimum_activation_height=0, lock_in_on_timeout=False, version='0.63.0', diff --git a/hathor/conf/testnet.yml b/hathor/conf/testnet.yml index f8dcf5290..7a4fb0452 100644 --- a/hathor/conf/testnet.yml +++ b/hathor/conf/testnet.yml @@ -81,11 +81,11 @@ FEATURE_ACTIVATION: # NOP feature to test Feature Activation for Transactions NOP_FEATURE_1: bit: 0 - # N = 4_394_880 - # start_height expected to be reached around Sunday, 2024-12-01. - # Right now the best block is 4_377_375 on testnet (2024-11-25). - start_height: 4_394_880 # N - timeout_height: 4_475_520 # N + 4 * 20160 (4 weeks after the start) + # N = 4_495_680 + # Expected to be reached around Tuesday, 2025-01-06. + # Right now the best block is 4_489_259 on testnet (2025-01-03). + start_height: 4_495_680 # N + timeout_height: 4_576_320 # N + 4 * 20160 (4 weeks after the start) minimum_activation_height: 0 lock_in_on_timeout: false version: 0.63.0