From de9ab6255f1f1f25ade04bcf0a7f4f4077f29917 Mon Sep 17 00:00:00 2001 From: rclarke0 Date: Mon, 15 Jul 2024 14:40:00 -0400 Subject: [PATCH 1/3] fix(abr-testing): makes sure that run grabbed is completed when using abr-scale --- abr-testing/abr_testing/tools/abr_scale.py | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/abr-testing/abr_testing/tools/abr_scale.py b/abr-testing/abr_testing/tools/abr_scale.py index ad526792cf8..d02bf0acfed 100644 --- a/abr-testing/abr_testing/tools/abr_scale.py +++ b/abr-testing/abr_testing/tools/abr_scale.py @@ -123,9 +123,25 @@ def get_most_recent_run_and_record( most_recent_run_id = run_list[-1]["id"] results = get_run_logs.get_run_data(most_recent_run_id, ip) # Save run information to local directory as .json file - read_robot_logs.save_run_log_to_json(ip, results, storage_directory) + saved_file_path = read_robot_logs.save_run_log_to_json( + ip, results, storage_directory + ) + # Check that last run is completed. + with open(saved_file_path) as file: + file_results = json.load(file) + try: + file_results["completedAt"] + except ValueError: + # no completedAt field, get run before the last run. + most_recent_run_id = run_list[-2]["id"] + results = get_run_logs.get_run_data(most_recent_run_id, ip) + # Save run information to local directory as .json file + saved_file_path = read_robot_logs.save_run_log_to_json( + ip, results, storage_directory + ) # Record run to google sheets. print(most_recent_run_id) + ( runs_and_robots, headers, From 077e012db033913017a8de6c4ed63afc87728282 Mon Sep 17 00:00:00 2001 From: rclarke0 Date: Mon, 15 Jul 2024 15:26:31 -0400 Subject: [PATCH 2/3] fix(abr-testing): error handling if no runs to add --- .../automation/google_sheets_tool.py | 6 +++-- .../data_collection/abr_google_drive.py | 23 +++++++++++++++---- 2 files changed, 22 insertions(+), 7 deletions(-) diff --git a/abr-testing/abr_testing/automation/google_sheets_tool.py b/abr-testing/abr_testing/automation/google_sheets_tool.py index 5e464754273..3a6590fa03b 100644 --- a/abr-testing/abr_testing/automation/google_sheets_tool.py +++ b/abr-testing/abr_testing/automation/google_sheets_tool.py @@ -136,7 +136,6 @@ def column_letter_to_index(column_letter: str) -> int: for col_offset, col_values in enumerate(data): column_index = start_column_index + col_offset - # column_letter = index_to_column_letter(column_index) for row_offset, value in enumerate(col_values): row_index = start_row + row_offset try: @@ -163,7 +162,10 @@ def column_letter_to_index(column_letter: str) -> int: ) body = {"requests": requests} - self.spread_sheet.batch_update(body=body) + try: + self.spread_sheet.batch_update(body=body) + except gspread.exceptions.APIError as e: + print(f"ERROR MESSAGE: {e}") def update_cell( self, sheet_title: str, row: int, column: int, single_data: Any diff --git a/abr-testing/abr_testing/data_collection/abr_google_drive.py b/abr-testing/abr_testing/data_collection/abr_google_drive.py index 31eba721503..789e905f7e7 100644 --- a/abr-testing/abr_testing/data_collection/abr_google_drive.py +++ b/abr-testing/abr_testing/data_collection/abr_google_drive.py @@ -39,6 +39,8 @@ def create_data_dictionary( """Pull data from run files and format into a dictionary.""" runs_and_robots: List[Any] = [] runs_and_lpc: List[Dict[str, Any]] = [] + headers: List[str] = [] + headers_lpc: List[str] = [] for filename in os.listdir(storage_directory): file_path = os.path.join(storage_directory, filename) if file_path.endswith(".json"): @@ -49,7 +51,14 @@ def create_data_dictionary( if not isinstance(file_results, dict): continue run_id = file_results.get("run_id", "NaN") + try: + start_time_test = file_results["startedAt"] + completed_time_test = file_results["completedAt"] + except KeyError: + print(f"Run {run_id} is incomplete. Skipping run.") + continue if run_id in runs_to_save: + print("started reading run.") robot = file_results.get("robot_name") protocol_name = file_results["protocol"]["metadata"].get("protocolName", "") software_version = file_results.get("API_Version", "") @@ -74,13 +83,13 @@ def create_data_dictionary( ) try: start_time = datetime.strptime( - file_results.get("startedAt", ""), "%Y-%m-%dT%H:%M:%S.%f%z" + start_time_test, "%Y-%m-%dT%H:%M:%S.%f%z" ) adjusted_start_time = start_time - timedelta(hours=4) start_date = str(adjusted_start_time.date()) start_time_str = str(adjusted_start_time).split("+")[0] complete_time = datetime.strptime( - file_results.get("completedAt", ""), "%Y-%m-%dT%H:%M:%S.%f%z" + completed_time_test, "%Y-%m-%dT%H:%M:%S.%f%z" ) adjusted_complete_time = complete_time - timedelta(hours=4) complete_time_str = str(adjusted_complete_time).split("+")[0] @@ -130,8 +139,7 @@ def create_data_dictionary( **pipette_dict, **plate_measure, } - headers: List[str] = list(row_2.keys()) - # runs_and_robots[run_id] = row_2 + headers = list(row_2.keys()) runs_and_robots.append(list(row_2.values())) # LPC Data Recording runs_and_lpc, headers_lpc = read_robot_logs.lpc_data( @@ -141,6 +149,11 @@ def create_data_dictionary( continue transposed_runs_and_robots = list(map(list, zip(*runs_and_robots))) transposed_runs_and_lpc = list(map(list, zip(*runs_and_lpc))) + try: + num_of_runs_added = len(runs_and_robots) + print(f"{num_of_runs_added} run(s) added.") + except UnboundLocalError: + print("No runs added.") return transposed_runs_and_robots, headers, transposed_runs_and_lpc, headers_lpc @@ -207,7 +220,6 @@ def create_data_dictionary( start_row = google_sheet.get_index_row() + 1 print(start_row) google_sheet.batch_update_cells(transposed_runs_and_robots, "A", start_row, "0") - # Calculate Robot Lifetimes # Add LPC to google sheet google_sheet_lpc = google_sheets_tool.google_sheet(credentials_path, "ABR-LPC", 0) @@ -216,4 +228,5 @@ def create_data_dictionary( transposed_runs_and_lpc, "A", start_row_lpc, "0" ) robots = list(set(google_sheet.get_column(1))) + # Calculate Robot Lifetimes sync_abr_sheet.determine_lifetime(google_sheet) From 53d0f69c2c6985e68b03da57d386c74bb7257a94 Mon Sep 17 00:00:00 2001 From: rclarke0 Date: Mon, 15 Jul 2024 16:19:17 -0400 Subject: [PATCH 3/3] fix(abr-testing): remove unbound error and define at beginning --- .../abr_testing/data_collection/abr_google_drive.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/abr-testing/abr_testing/data_collection/abr_google_drive.py b/abr-testing/abr_testing/data_collection/abr_google_drive.py index 789e905f7e7..3bd03cf3e3d 100644 --- a/abr-testing/abr_testing/data_collection/abr_google_drive.py +++ b/abr-testing/abr_testing/data_collection/abr_google_drive.py @@ -147,13 +147,10 @@ def create_data_dictionary( ) else: continue + num_of_runs_read = len(runs_and_robots) + print(f"Number of runs read: {num_of_runs_read}") transposed_runs_and_robots = list(map(list, zip(*runs_and_robots))) transposed_runs_and_lpc = list(map(list, zip(*runs_and_lpc))) - try: - num_of_runs_added = len(runs_and_robots) - print(f"{num_of_runs_added} run(s) added.") - except UnboundLocalError: - print("No runs added.") return transposed_runs_and_robots, headers, transposed_runs_and_lpc, headers_lpc