From 26d31709d41dd03ff0bb1cc108a02b8b64e44001 Mon Sep 17 00:00:00 2001 From: harisang Date: Thu, 28 Nov 2024 13:39:49 +0200 Subject: [PATCH 1/6] add raw batch data syncing --- src/main.py | 27 +++--- src/sync/batch_data.py | 56 +++++++------ src/sync/common.py | 183 +++++++++++++++++++++-------------------- 3 files changed, 136 insertions(+), 130 deletions(-) diff --git a/src/main.py b/src/main.py index e8a0fa8..6834222 100644 --- a/src/main.py +++ b/src/main.py @@ -37,6 +37,7 @@ def __init__(self) -> None: arguments, _ = parser.parse_known_args() self.sync_table: SyncTable = arguments.sync_table + def main() -> None: """ Main function @@ -49,19 +50,19 @@ def main() -> None: web3 = Web3( Web3.HTTPProvider(os.environ.get("NODE_URL" + "_" + node_suffix(network))) ) - - # if args.sync_table == SyncTable.BATCH_DATA: - # table = os.environ["BATCH_DATA_TARGET_TABLE"] - # assert table, "BATCH DATA sync needs a BATCH_DATA_TARGET_TABLE env" - # asyncio.run( - # sync_batch_data( - # web3, - # orderbook, - # config=BatchDataSyncConfig(table), - # ) - # ) - # else: - # log.error(f"unsupported sync_table '{args.sync_table}'") + + if args.sync_table == SyncTable.BATCH_DATA: + table = os.environ["BATCH_DATA_TARGET_TABLE"] + assert table, "BATCH DATA sync needs a BATCH_DATA_TARGET_TABLE env" + asyncio.run( + sync_batch_data( + web3, + orderbook, + config=BatchDataSyncConfig(table), + ) + ) + else: + log.error(f"unsupported sync_table '{args.sync_table}'") if __name__ == "__main__": diff --git a/src/sync/batch_data.py b/src/sync/batch_data.py index f296b8d..60a8a22 100644 --- a/src/sync/batch_data.py +++ b/src/sync/batch_data.py @@ -13,30 +13,34 @@ log = set_log(__name__) -# async def sync_batch_data( -# node: Web3, -# orderbook: OrderbookFetcher, -# config: BatchDataSyncConfig, -# ) -> None: -# """Batch data Sync Logic""" -# load_dotenv() -# network = os.environ["NETWORK"] +async def sync_batch_data( + node: Web3, + orderbook: OrderbookFetcher, + config: BatchDataSyncConfig, +) -> None: + """Batch data Sync Logic""" + load_dotenv() + network = os.environ["NETWORK"] -# block_range_list, months_list, is_even = compute_block_and_month_range(node) -# for i, _ in enumerate(block_range_list): -# start_block = block_range_list[i][0] -# end_block = block_range_list[i][1] -# if is_even[i]: -# table_name = "raw_batch_data_latest_even_month_" + str(network) -# else: -# table_name = "raw_batch_data_latest_odd_month_" + str(network) -# block_range = BlockRange(block_from=start_block, block_to=end_block) -# log.info( -# f"About to process block range ({start_block}, {end_block}) for month {months_list[i]}" -# ) -# batch_data = orderbook.get_batch_data(block_range) -# log.info("SQL query successfully executed. About to update analytics table.") -# batch_data.to_sql(table_name,orderbook._pg_engine(OrderbookEnv.ANALYTICS),if_exists= 'replace') -# log.info( -# f"batch data sync run completed successfully for month {months_list[i]}" -# ) + block_range_list, months_list, is_even = compute_block_and_month_range(node) + for i, _ in enumerate(block_range_list): + start_block = block_range_list[i][0] + end_block = block_range_list[i][1] + if is_even[i]: + table_name = "raw_batch_data_latest_even_month_" + str(network) + else: + table_name = "raw_batch_data_latest_odd_month_" + str(network) + block_range = BlockRange(block_from=start_block, block_to=end_block) + log.info( + f"About to process block range ({start_block}, {end_block}) for month {months_list[i]}" + ) + batch_data = orderbook.get_batch_data(block_range) + log.info("SQL query successfully executed. About to update analytics table.") + batch_data.to_sql( + table_name, + orderbook._pg_engine(OrderbookEnv.ANALYTICS), + if_exists="replace", + ) + log.info( + f"batch data sync run completed successfully for month {months_list[i]}" + ) diff --git a/src/sync/common.py b/src/sync/common.py index 9ea81b9..1d2bcfc 100644 --- a/src/sync/common.py +++ b/src/sync/common.py @@ -7,106 +7,107 @@ log = set_log(__name__) -# def find_block_with_timestamp(node: Web3, time_stamp: float) -> int: -# """ -# This implements binary search and returns the smallest block number -# whose timestamp is at least as large as the time_stamp argument passed in the function -# """ -# end_block_number = int(node.eth.get_block("finalized")["number"]) -# start_block_number = 1 -# close_in_seconds = 30 -# while True: -# mid_block_number = (start_block_number + end_block_number) // 2 -# block = node.eth.get_block(mid_block_number) -# block_time = block["timestamp"] -# difference_in_seconds = int((time_stamp - block_time)) +def find_block_with_timestamp(node: Web3, time_stamp: float) -> int: + """ + This implements binary search and returns the smallest block number + whose timestamp is at least as large as the time_stamp argument passed in the function + """ + end_block_number = int(node.eth.get_block("finalized")["number"]) + start_block_number = 1 + close_in_seconds = 30 -# if abs(difference_in_seconds) < close_in_seconds: -# break + while True: + mid_block_number = (start_block_number + end_block_number) // 2 + block = node.eth.get_block(mid_block_number) + block_time = block["timestamp"] + difference_in_seconds = int((time_stamp - block_time)) -# if difference_in_seconds < 0: -# end_block_number = mid_block_number - 1 -# else: -# start_block_number = mid_block_number + 1 + if abs(difference_in_seconds) < close_in_seconds: + break -# ## we now brute-force to ensure we have found the right block -# for b in range(mid_block_number - 200, mid_block_number + 200): -# block = node.eth.get_block(b) -# block_time_stamp = block["timestamp"] -# if block_time_stamp >= time_stamp: -# return int(block["number"]) -# # fallback in case correct block number hasn't been found -# # in that case, we will include some more blocks than necessary -# return mid_block_number + 200 + if difference_in_seconds < 0: + end_block_number = mid_block_number - 1 + else: + start_block_number = mid_block_number + 1 + ## we now brute-force to ensure we have found the right block + for b in range(mid_block_number - 200, mid_block_number + 200): + block = node.eth.get_block(b) + block_time_stamp = block["timestamp"] + if block_time_stamp >= time_stamp: + return int(block["number"]) + # fallback in case correct block number hasn't been found + # in that case, we will include some more blocks than necessary + return mid_block_number + 200 -# def compute_block_and_month_range( # pylint: disable=too-many-locals -# node: Web3, -# ) -> Tuple[List[Tuple[int, int]], List[str], List[bool]]: -# """ -# This determines the block range and the relevant months -# for which we will compute and upload data on Dune. -# """ -# # We first compute the relevant block range -# # Here, we assume that the job runs at least once every 24h -# # Because of that, if it is the first day of month, we also -# # compute the previous month's table just to be on the safe side -# latest_finalized_block = node.eth.get_block("finalized") +def compute_block_and_month_range( # pylint: disable=too-many-locals + node: Web3, +) -> Tuple[List[Tuple[int, int]], List[str], List[bool]]: + """ + This determines the block range and the relevant months + for which we will compute and upload data on Dune. + """ + # We first compute the relevant block range + # Here, we assume that the job runs at least once every 24h + # Because of that, if it is the first day of month, we also + # compute the previous month's table just to be on the safe side -# current_month_end_block = int(latest_finalized_block["number"]) -# current_month_end_timestamp = latest_finalized_block["timestamp"] + latest_finalized_block = node.eth.get_block("finalized") -# current_month_end_datetime = datetime.fromtimestamp( -# current_month_end_timestamp, tz=timezone.utc -# ) -# current_month_start_datetime = datetime( -# current_month_end_datetime.year, current_month_end_datetime.month, 1, 00, 00 -# ) -# current_month_start_timestamp = current_month_start_datetime.replace( -# tzinfo=timezone.utc -# ).timestamp() + current_month_end_block = int(latest_finalized_block["number"]) + current_month_end_timestamp = latest_finalized_block["timestamp"] -# current_month_start_block = find_block_with_timestamp( -# node, current_month_start_timestamp -# ) + current_month_end_datetime = datetime.fromtimestamp( + current_month_end_timestamp, tz=timezone.utc + ) + current_month_start_datetime = datetime( + current_month_end_datetime.year, current_month_end_datetime.month, 1, 00, 00 + ) + current_month_start_timestamp = current_month_start_datetime.replace( + tzinfo=timezone.utc + ).timestamp() -# current_month = ( -# f"{current_month_end_datetime.year}_{current_month_end_datetime.month}" -# ) -# if current_month_end_datetime.month % 2 == 0: -# is_even = [True] -# else: -# is_even = [False] -# months_list = [current_month] -# block_range = [(current_month_start_block, current_month_end_block)] -# if current_month_end_datetime.day == 1: -# is_even.append(not is_even[0]) -# if current_month_end_datetime.month == 1: -# previous_month = f"{current_month_end_datetime.year - 1}_12" -# previous_month_start_datetime = datetime( -# current_month_end_datetime.year - 1, 12, 1, 00, 00 -# ) -# else: -# previous_month = f"""{current_month_end_datetime.year}_ -# {current_month_end_datetime.month - 1} -# """ -# previous_month_start_datetime = datetime( -# current_month_end_datetime.year, -# current_month_end_datetime.month - 1, -# 1, -# 00, -# 00, -# ) -# months_list.append(previous_month) -# previous_month_start_timestamp = previous_month_start_datetime.replace( -# tzinfo=timezone.utc -# ).timestamp() -# previous_month_start_block = find_block_with_timestamp( -# node, previous_month_start_timestamp -# ) -# previous_month_end_block = current_month_start_block -# block_range.append((previous_month_start_block, previous_month_end_block)) + current_month_start_block = find_block_with_timestamp( + node, current_month_start_timestamp + ) -# return block_range, months_list, is_even + current_month = ( + f"{current_month_end_datetime.year}_{current_month_end_datetime.month}" + ) + if current_month_end_datetime.month % 2 == 0: + is_even = [True] + else: + is_even = [False] + months_list = [current_month] + block_range = [(current_month_start_block, current_month_end_block)] + if current_month_end_datetime.day == 1: + is_even.append(not is_even[0]) + if current_month_end_datetime.month == 1: + previous_month = f"{current_month_end_datetime.year - 1}_12" + previous_month_start_datetime = datetime( + current_month_end_datetime.year - 1, 12, 1, 00, 00 + ) + else: + previous_month = f"""{current_month_end_datetime.year}_ + {current_month_end_datetime.month - 1} + """ + previous_month_start_datetime = datetime( + current_month_end_datetime.year, + current_month_end_datetime.month - 1, + 1, + 00, + 00, + ) + months_list.append(previous_month) + previous_month_start_timestamp = previous_month_start_datetime.replace( + tzinfo=timezone.utc + ).timestamp() + previous_month_start_block = find_block_with_timestamp( + node, previous_month_start_timestamp + ) + previous_month_end_block = current_month_start_block + block_range.append((previous_month_start_block, previous_month_end_block)) + + return block_range, months_list, is_even From bd603b6b60ffcb41b7fa29cca9e058dac3f86c8b Mon Sep 17 00:00:00 2001 From: harisang Date: Thu, 28 Nov 2024 14:06:12 +0200 Subject: [PATCH 2/6] cleanup --- __init__.py | 0 src/main.py | 6 ------ src/sync/batch_data.py | 3 --- src/sync/common.py | 1 - src/sync/config.py | 15 --------------- 5 files changed, 25 deletions(-) create mode 100644 __init__.py delete mode 100644 src/sync/config.py diff --git a/__init__.py b/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/main.py b/src/main.py index 6834222..9a3be95 100644 --- a/src/main.py +++ b/src/main.py @@ -11,9 +11,6 @@ from src.fetch.orderbook import OrderbookFetcher from src.logger import set_log from src.models.tables import SyncTable -from src.sync.config import ( - BatchDataSyncConfig, -) from src.sync.batch_data import sync_batch_data from src.utils import node_suffix @@ -52,13 +49,10 @@ def main() -> None: ) if args.sync_table == SyncTable.BATCH_DATA: - table = os.environ["BATCH_DATA_TARGET_TABLE"] - assert table, "BATCH DATA sync needs a BATCH_DATA_TARGET_TABLE env" asyncio.run( sync_batch_data( web3, orderbook, - config=BatchDataSyncConfig(table), ) ) else: diff --git a/src/sync/batch_data.py b/src/sync/batch_data.py index 60a8a22..dbad352 100644 --- a/src/sync/batch_data.py +++ b/src/sync/batch_data.py @@ -1,11 +1,9 @@ """Main Entry point for batch data sync""" import os from dotenv import load_dotenv -from dune_client.client import DuneClient from web3 import Web3 from src.fetch.orderbook import OrderbookFetcher, OrderbookEnv from src.logger import set_log -from src.sync.config import BatchDataSyncConfig from src.sync.common import compute_block_and_month_range from src.models.block_range import BlockRange @@ -16,7 +14,6 @@ async def sync_batch_data( node: Web3, orderbook: OrderbookFetcher, - config: BatchDataSyncConfig, ) -> None: """Batch data Sync Logic""" load_dotenv() diff --git a/src/sync/common.py b/src/sync/common.py index 1d2bcfc..0d8457c 100644 --- a/src/sync/common.py +++ b/src/sync/common.py @@ -7,7 +7,6 @@ log = set_log(__name__) - def find_block_with_timestamp(node: Web3, time_stamp: float) -> int: """ This implements binary search and returns the smallest block number diff --git a/src/sync/config.py b/src/sync/config.py deleted file mode 100644 index 1db4cbe..0000000 --- a/src/sync/config.py +++ /dev/null @@ -1,15 +0,0 @@ -"""Configuration details for sync jobs""" -from __future__ import annotations - -from dataclasses import dataclass -from pathlib import Path - - -@dataclass -class BatchDataSyncConfig: - """Configuration for batch data sync.""" - - # The name of the table to upload to - table: str = "batch_data_test" - # Description of the table (for creation) - description: str = "Table containing raw batch data" From 2fe6b6d44f969215e894171393fecd3c931ea9cc Mon Sep 17 00:00:00 2001 From: harisang Date: Thu, 28 Nov 2024 15:16:54 +0200 Subject: [PATCH 3/6] add .env.sample --- .env.sample | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/.env.sample b/.env.sample index e69de29..01d70e8 100644 --- a/.env.sample +++ b/.env.sample @@ -0,0 +1,18 @@ +#Orderbook DB Credentials +BARN_DB_URL= +PROD_DB_URL= +ANALYTICS_DB_URL= + + +NETWORK= + +NODE_URL_ETHEREUM= +NODE_URL_GNOSIS= +NODE_URL_ARBITRUM= + +EPSILON_LOWER_ETHEREUM= +EPSILON_UPPER_ETHEREUM= +EPSILON_LOWER_GNOSIS= +EPSILON_UPPER_GNOSIS= +EPSILON_LOWER_ARBITRUM= +EPSILON_UPPER_ARBITRUM= From 26da1edf355c3c920d2a82b262ad1133ac9b7590 Mon Sep 17 00:00:00 2001 From: harisang Date: Thu, 28 Nov 2024 19:03:57 +0200 Subject: [PATCH 4/6] minor fixes and adding table creation sql --- src/main.py | 13 +++---------- src/sql/orderbook/create_batch_table.sql | 18 ++++++++++++++++++ src/sync/batch_data.py | 9 +++------ 3 files changed, 24 insertions(+), 16 deletions(-) create mode 100644 src/sql/orderbook/create_batch_table.sql diff --git a/src/main.py b/src/main.py index 9a3be95..a643108 100644 --- a/src/main.py +++ b/src/main.py @@ -42,19 +42,12 @@ def main() -> None: load_dotenv() args = ScriptArgs() orderbook = OrderbookFetcher() - network = os.environ.get("NETWORK", "mainnet") + network = node_suffix(os.environ.get("NETWORK", "mainnet")) log.info(f"Network is set to: {network}") - web3 = Web3( - Web3.HTTPProvider(os.environ.get("NODE_URL" + "_" + node_suffix(network))) - ) + web3 = Web3(Web3.HTTPProvider(os.environ.get("NODE_URL" + "_" + network))) if args.sync_table == SyncTable.BATCH_DATA: - asyncio.run( - sync_batch_data( - web3, - orderbook, - ) - ) + asyncio.run(sync_batch_data(web3, orderbook, network)) else: log.error(f"unsupported sync_table '{args.sync_table}'") diff --git a/src/sql/orderbook/create_batch_table.sql b/src/sql/orderbook/create_batch_table.sql new file mode 100644 index 0000000..7b723cd --- /dev/null +++ b/src/sql/orderbook/create_batch_table.sql @@ -0,0 +1,18 @@ +-- sample table name for creating the intermediate tables used in the analytics db to store batch data +create table raw_batch_data_latest_odd_month_gnosis ( + environment varchar(6) not null, + auction_id bigint not null, + settlement_block bigint, + block_deadline bigint not null, + tx_hash bytea, + solver bytea not null, + execution_cost numeric(78,0), + surplus numeric(78,0), + protocol_fee numeric(78,0), + network_fee numeric(78,0), + uncapped_payment_native_token numeric(78,0) not null, + capped_payment numeric (78,0) not null, + winning_score numeric(78,0) not null, + reference_score numeric(78,0) not null, + PRIMARY KEY (block_deadline, auction_id, environment) +); diff --git a/src/sync/batch_data.py b/src/sync/batch_data.py index dbad352..e22f9a9 100644 --- a/src/sync/batch_data.py +++ b/src/sync/batch_data.py @@ -12,21 +12,18 @@ async def sync_batch_data( - node: Web3, - orderbook: OrderbookFetcher, + node: Web3, orderbook: OrderbookFetcher, network: str ) -> None: """Batch data Sync Logic""" - load_dotenv() - network = os.environ["NETWORK"] block_range_list, months_list, is_even = compute_block_and_month_range(node) for i, _ in enumerate(block_range_list): start_block = block_range_list[i][0] end_block = block_range_list[i][1] if is_even[i]: - table_name = "raw_batch_data_latest_even_month_" + str(network) + table_name = "raw_batch_data_latest_even_month_" + network.lower() else: - table_name = "raw_batch_data_latest_odd_month_" + str(network) + table_name = "raw_batch_data_latest_odd_month_" + network.lower() block_range = BlockRange(block_from=start_block, block_to=end_block) log.info( f"About to process block range ({start_block}, {end_block}) for month {months_list[i]}" From b94716c7675047251410a060c6b7896967bd6a30 Mon Sep 17 00:00:00 2001 From: harisang Date: Fri, 29 Nov 2024 17:53:11 +0200 Subject: [PATCH 5/6] update black --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 781868c..8a3e215 100644 --- a/requirements.txt +++ b/requirements.txt @@ -10,7 +10,7 @@ SQLAlchemy>=2.0,<3.0 web3==6.20.3 pandas-stubs>=1.5.1.221024 boto3-stubs>=1.26.12 -black==23.12.0 +black>=24.3.0 mypy==1.3.0 mypy-extensions==1.0.0 pylint>=2.14.4 From 360068d732b431ea2f9016487e016ee690861fce Mon Sep 17 00:00:00 2001 From: harisang Date: Sat, 30 Nov 2024 02:20:49 +0200 Subject: [PATCH 6/6] add simple script to force recomputation of previous month --- src/main.py | 6 ++-- src/scripts/batch_data_past_two_months.py | 40 +++++++++++++++++++++++ src/scripts/delete_from.py | 26 --------------- src/sync/batch_data.py | 16 +++++++-- src/sync/common.py | 6 ++-- 5 files changed, 61 insertions(+), 33 deletions(-) create mode 100644 src/scripts/batch_data_past_two_months.py delete mode 100644 src/scripts/delete_from.py diff --git a/src/main.py b/src/main.py index a643108..6570278 100644 --- a/src/main.py +++ b/src/main.py @@ -1,9 +1,9 @@ """Main Entry point for app_hash sync""" + import argparse import asyncio import os from dataclasses import dataclass -from pathlib import Path from dotenv import load_dotenv from web3 import Web3 @@ -47,7 +47,9 @@ def main() -> None: web3 = Web3(Web3.HTTPProvider(os.environ.get("NODE_URL" + "_" + network))) if args.sync_table == SyncTable.BATCH_DATA: - asyncio.run(sync_batch_data(web3, orderbook, network)) + asyncio.run( + sync_batch_data(web3, orderbook, network, recompute_previous_month=False) + ) else: log.error(f"unsupported sync_table '{args.sync_table}'") diff --git a/src/scripts/batch_data_past_two_months.py b/src/scripts/batch_data_past_two_months.py new file mode 100644 index 0000000..86813ea --- /dev/null +++ b/src/scripts/batch_data_past_two_months.py @@ -0,0 +1,40 @@ +""" +Script to recompute batch rewards for the current and previous month. +""" + +import argparse +import os +from web3 import Web3 +from dotenv import load_dotenv +import asyncio + + +from src.models.tables import SyncTable +from src.fetch.orderbook import OrderbookFetcher +from src.logger import set_log +from src.utils import node_suffix +from src.sync.batch_data import sync_batch_data + + +log = set_log(__name__) + +if __name__ == "__main__": + load_dotenv() + parser = argparse.ArgumentParser("Script Arguments") + parser.add_argument( # pylint: disable=duplicate-code + "--sync-table", + type=SyncTable, + required=True, + choices=list(SyncTable), + ) + args, _ = parser.parse_known_args() + + orderbook = OrderbookFetcher() + network = node_suffix(os.environ.get("NETWORK", "mainnet")) + log.info(f"Network is set to: {network}") + web3 = Web3(Web3.HTTPProvider(os.environ.get("NODE_URL" + "_" + network))) + + if args.sync_table == SyncTable.BATCH_DATA: + asyncio.run(sync_batch_data(web3, orderbook, network, recompute_previous_month=True)) + else: + log.error(f"unsupported sync_table '{args.sync_table}'") \ No newline at end of file diff --git a/src/scripts/delete_from.py b/src/scripts/delete_from.py deleted file mode 100644 index a608e19..0000000 --- a/src/scripts/delete_from.py +++ /dev/null @@ -1,26 +0,0 @@ -""" -Script to empty AWS bucket. -Used for re-deployments involving schema change. -""" -import argparse - -from dotenv import load_dotenv - -from src.models.tables import SyncTable - -if __name__ == "__main__": - load_dotenv() - parser = argparse.ArgumentParser("Script Arguments") - parser.add_argument( - "--from-block", - type=int, - required=True, - ) - parser.add_argument( # pylint: disable=duplicate-code - "--sync-table", - type=SyncTable, - required=True, - choices=list(SyncTable), - ) - args, _ = parser.parse_known_args() - diff --git a/src/sync/batch_data.py b/src/sync/batch_data.py index e22f9a9..17a6df5 100644 --- a/src/sync/batch_data.py +++ b/src/sync/batch_data.py @@ -1,4 +1,5 @@ """Main Entry point for batch data sync""" + import os from dotenv import load_dotenv from web3 import Web3 @@ -12,11 +13,20 @@ async def sync_batch_data( - node: Web3, orderbook: OrderbookFetcher, network: str + node: Web3, + orderbook: OrderbookFetcher, + network: str, + recompute_previous_month: bool, ) -> None: - """Batch data Sync Logic""" + """ + Batch data Sync Logic. The recompute_previous_month flag, when enabled, forces a recomputation + of the previous month. If it is set to False, previous month is still recomputed when the current + date is the first day of the current month. + """ - block_range_list, months_list, is_even = compute_block_and_month_range(node) + block_range_list, months_list, is_even = compute_block_and_month_range( + node, recompute_previous_month + ) for i, _ in enumerate(block_range_list): start_block = block_range_list[i][0] end_block = block_range_list[i][1] diff --git a/src/sync/common.py b/src/sync/common.py index 0d8457c..8a729da 100644 --- a/src/sync/common.py +++ b/src/sync/common.py @@ -1,4 +1,5 @@ """Shared methods between both sync scripts.""" + from datetime import datetime, timezone from typing import List, Tuple from web3 import Web3 @@ -7,6 +8,7 @@ log = set_log(__name__) + def find_block_with_timestamp(node: Web3, time_stamp: float) -> int: """ This implements binary search and returns the smallest block number @@ -42,7 +44,7 @@ def find_block_with_timestamp(node: Web3, time_stamp: float) -> int: def compute_block_and_month_range( # pylint: disable=too-many-locals - node: Web3, + node: Web3, recompute_previous_month: bool ) -> Tuple[List[Tuple[int, int]], List[str], List[bool]]: """ This determines the block range and the relevant months @@ -81,7 +83,7 @@ def compute_block_and_month_range( # pylint: disable=too-many-locals is_even = [False] months_list = [current_month] block_range = [(current_month_start_block, current_month_end_block)] - if current_month_end_datetime.day == 1: + if current_month_end_datetime.day == 1 or recompute_previous_month: is_even.append(not is_even[0]) if current_month_end_datetime.month == 1: previous_month = f"{current_month_end_datetime.year - 1}_12"