Skip to content

Commit

Permalink
testing graph feature
Browse files Browse the repository at this point in the history
  • Loading branch information
rclarke0 committed May 15, 2024
1 parent ee24a84 commit 07c6b45
Show file tree
Hide file tree
Showing 3 changed files with 98 additions and 0 deletions.
33 changes: 33 additions & 0 deletions abr-testing/abr_testing/automation/google_sheets_tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,3 +141,36 @@ def get_row_index_with_value(self, some_string: str, col_num: int) -> Any:
print("Row not found.")
return None
return row_index

def create__line_chart(self, titles: List[str], series: List[Dict[str, Any]], spreadsheet_id: str):
"""Create chart of data on google sheet."""
chart_data = {
"chart": {
"spec": {
"title": titles[0],
"basicChart": {
"chartType": "LINE",
"legendPosition": "BOTTOM_LEGEND",
"axis": [
{
"position": "BOTTOM_AXIS",
"title": titles[1]
},
{
"position": "LEFT_AXIS",
"title": titles[2]
}
],
"ranges": [
series
],
"headerCount": 1
}
},
"position": {
"newSheet": True
}
}
}
body = {"requests": [{"addChart": {"chart": chart_data}}]}
self.batchUpdate(spreadsheet_id, body = body).execute()
64 changes: 64 additions & 0 deletions abr-testing/abr_testing/data_collection/abr_lpc.py
Original file line number Diff line number Diff line change
@@ -1 +1,65 @@
"""Get Unique LPC Values from Run logs."""
import os
import argparse
from typing import Any, Dict, List
from abr_testing.automation import google_sheets_tool
import sys

# TODO: Remove duplicate rows
def identify_duplicate_data(all_data):
"""Determine unique sets of data."""
seen = set()
new_values = []
for row in all_data:
key = (row["Robot"], row["Errors"], row["Slot"], row["Module"], row["Adapter"], row["X"], row["Y"], row["Z"])
if key not in seen:
seen.add(key)
new_values.append(row)
return new_values

def update_sheet_with_new_values(new_values):
"""Update sheet with unique data sets only."""
google_sheet_lpc.clear()
headers = list(new_values[0].keys())
data = [headers] + [[row[col] for col in headers] for row in new_values]
google_sheet_lpc.update(data)


if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Read run logs on google drive.")
parser.add_argument(
"storage_directory",
metavar="STORAGE_DIRECTORY",
type=str,
nargs=1,
help="Path to long term storage directory for run logs.",
)
args = parser.parse_args()
storage_directory = args.storage_directory[0]
try:
credentials_path = os.path.join(storage_directory, "credentials.json")
except FileNotFoundError:
print(f"Add credentials.json file to: {storage_directory}.")
sys.exit()
google_sheet_lpc = google_sheets_tool.google_sheet(credentials_path, "ABR-LPC", 0)
sheet_data = google_sheet_lpc.get_all_data()
print(len(sheet_data))
new_values = identify_duplicate_data(sheet_data)
print(len(new_values))
update_sheet_with_new_values(new_values)

num_of_rows = len(google_sheet_lpc.get_all_data())
# Create graph
graph_title = "ABR LPC"
x_axis_title = "X Offset (mm)"
y_axis_title = "Y Offset (mm)"
titles = [graph_title, x_axis_title, y_axis_title]
series = [
{"sheetId": 0,
"startRowIndex": 0,
"endRowIndex": num_of_rows,
"startColumnIndex": 29,
"endColumnIndex": 30}
]
spreadsheet_id = "1m9c3Ql2Uez4MC_aLayeUX6YO7WMkNA-4B5xk_w8zJc4"
google_sheet_lpc.create_line_chart(titles, series, spreadsheet_id)
1 change: 1 addition & 0 deletions abr-testing/abr_testing/data_collection/abr_robot_error.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,6 +143,7 @@ def get_error_info_from_robot(
whole_description_str,
run_log_file_path,
) = get_error_info_from_robot(ip, one_run, storage_directory)
affects_version = "internal release - any"
# Get Calibration Data
saved_file_path_calibration, calibration = read_robot_logs.get_calibration_offsets(
ip, storage_directory
Expand Down

0 comments on commit 07c6b45

Please sign in to comment.