Skip to content

Commit

Permalink
Merge branch 'edge' into app_well-selection-component
Browse files Browse the repository at this point in the history
  • Loading branch information
brenthagen committed May 16, 2024
2 parents dcf40af + 6f61368 commit 4d83f74
Show file tree
Hide file tree
Showing 913 changed files with 14,822 additions and 4,592 deletions.
5 changes: 2 additions & 3 deletions .eslintrc.js
Original file line number Diff line number Diff line change
Expand Up @@ -49,13 +49,13 @@ module.exports = {
importNames: [
'useAllRunsQuery',
'useRunQuery',
'useLastRunCommandKey',
'useAllCommandsQuery',
'useCurrentMaintenanceRun',
'useDeckConfigurationQuery',
'useAllCommandsAsPreSerializedList',
],
message:
'The HTTP hook is deprecated. Utilize the equivalent notification wrapper (useNotifyX) instead.',
'HTTP hook deprecated. Use the equivalent notification wrapper (useNotifyXYZ).',
},
],
},
Expand Down Expand Up @@ -102,7 +102,6 @@ module.exports = {
'@typescript-eslint/no-unnecessary-type-assertion': 'warn',
'@typescript-eslint/no-unnecessary-boolean-literal-compare': 'warn',
'@typescript-eslint/no-unsafe-argument': 'warn',
'@typescript-eslint/consistent-type-imports': 'warn',
'@typescript-eslint/consistent-indexed-object-style': 'warn',
'@typescript-eslint/no-confusing-void-expression': 'warn',
'@typescript-eslint/ban-types': 'warn',
Expand Down
63 changes: 62 additions & 1 deletion abr-testing/abr_testing/automation/google_sheets_tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,14 +87,33 @@ def delete_row(self, row_index: int) -> None:
"""Delete Row from google sheet."""
self.worksheet.delete_rows(row_index)

def batch_delete_rows(self, row_indices: List[int]) -> None:
"""Batch delete rows in list of indices."""
delete_body = {
"requests": [
{
"deleteDimension": {
"range": {
"sheetId": 0,
"dimension": "ROWS",
"startIndex": index,
"endIndex": index + 1,
}
}
}
for index in row_indices
]
}
self.spread_sheet.batch_update(body=delete_body)

def update_cell(
self, row: int, column: int, single_data: Any
) -> Tuple[int, int, Any]:
"""Update ONE individual cell according to a row and column."""
self.worksheet.update_cell(row, column, single_data)
return row, column, single_data

def get_all_data(self) -> Dict[str, Any]:
def get_all_data(self) -> List[Dict[str, Any]]:
"""Get all the Data recorded from worksheet."""
return self.worksheet.get_all_records()

Expand Down Expand Up @@ -141,3 +160,45 @@ def get_row_index_with_value(self, some_string: str, col_num: int) -> Any:
print("Row not found.")
return None
return row_index

def create_line_chart(
self,
titles: List[str],
series: List[Dict[str, Any]],
domains: List[Dict[str, Any]],
) -> None:
"""Create chart of data on google sheet."""
request_body = {
"requests": [
{
"addChart": {
"chart": {
"spec": {
"title": titles[0],
"basicChart": {
"chartType": "LINE",
"legendPosition": "RIGHT_LEGEND",
"axis": [
{"position": "BOTTOM_AXIS", "title": titles[1]},
{"position": "LEFT_AXIS", "title": titles[2]},
],
"domains": domains,
"series": series,
"headerCount": 1,
},
},
"position": {
"overlayPosition": {
"anchorCell": {
"sheetId": 0,
"rowIndex": 1,
"columnIndex": 1,
}
}
},
}
}
}
]
}
self.spread_sheet.batch_update(body=request_body)
22 changes: 16 additions & 6 deletions abr-testing/abr_testing/data_collection/abr_google_drive.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,10 @@ def create_data_dictionary(
runs_to_save: Union[Set[str], str],
storage_directory: str,
issue_url: str,
) -> Tuple[Dict[Any, Dict[str, Any]], List]:
) -> Tuple[Dict[str, Dict[str, Any]], List[str], Dict[str, Dict[str, Any]], List[str]]:
"""Pull data from run files and format into a dictionary."""
runs_and_robots = {}
runs_and_robots: Dict[Any, Dict[str, Any]] = {}
runs_and_lpc: Dict[Any, Dict[str, Any]] = {}
for filename in os.listdir(storage_directory):
file_path = os.path.join(storage_directory, filename)
if file_path.endswith(".json"):
Expand Down Expand Up @@ -108,6 +109,7 @@ def create_data_dictionary(
hs_dict = read_robot_logs.hs_commands(file_results)
tm_dict = read_robot_logs.temperature_module_commands(file_results)
notes = {"Note1": "", "Jira Link": issue_url}
row_for_lpc = {**row, **all_modules, **notes}
row_2 = {
**row,
**all_modules,
Expand All @@ -116,11 +118,15 @@ def create_data_dictionary(
**tm_dict,
**tc_dict,
}
headers = list(row_2.keys())
headers: List[str] = list(row_2.keys())
runs_and_robots[run_id] = row_2
# LPC Data Recording
runs_and_lpc, headers_lpc = read_robot_logs.lpc_data(
file_results, row_for_lpc, runs_and_lpc
)
else:
continue
return runs_and_robots, headers
return runs_and_robots, headers, runs_and_lpc, headers_lpc


if __name__ == "__main__":
Expand Down Expand Up @@ -164,7 +170,6 @@ def create_data_dictionary(
google_sheet = google_sheets_tool.google_sheet(
credentials_path, google_sheet_name, 0
)
google_sheet_lpc = google_sheets_tool.google_sheet(credentials_path, "ABR-LPC", 0)

run_ids_on_gs = google_sheet.get_column(2)
run_ids_on_gs = set(run_ids_on_gs)
Expand All @@ -178,9 +183,14 @@ def create_data_dictionary(
run_ids_on_gd, run_ids_on_gs
)
# Add missing runs to google sheet
runs_and_robots, headers = create_data_dictionary(
runs_and_robots, headers, runs_and_lpc, headers_lpc = create_data_dictionary(
missing_runs_from_gs, storage_directory, ""
)
read_robot_logs.write_to_local_and_google_sheet(
runs_and_robots, storage_directory, google_sheet_name, google_sheet, headers
)
# Add LPC to google sheet
google_sheet_lpc = google_sheets_tool.google_sheet(credentials_path, "ABR-LPC", 0)
read_robot_logs.write_to_local_and_google_sheet(
runs_and_lpc, storage_directory, "ABR-LPC", google_sheet_lpc, headers_lpc
)
57 changes: 56 additions & 1 deletion abr-testing/abr_testing/data_collection/abr_lpc.py
Original file line number Diff line number Diff line change
@@ -1 +1,56 @@
"""Get Unique LPC Values from Run logs."""
"""Automated LPC Data Analysis."""
import os
import argparse
from abr_testing.automation import google_sheets_tool
import sys


def remove_duplicate_data() -> None:
"""Determine unique sets of data."""
seen = set()
new_values = []
row_indices = []
sheet_data = google_sheet_lpc.get_all_data()
for i, row in enumerate(sheet_data):
key = (
row["Robot"],
row["Software Version"],
row["Errors"],
row["Slot"],
row["Module"],
row["Adapter"],
row["X"],
row["Y"],
row["Z"],
)

if key not in seen:
seen.add(key)
new_values.append(row)
else:
row_indices.append(i)
if len(row_indices) > 0:
google_sheet_lpc.batch_delete_rows(row_indices)


if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Read run logs on google drive.")
parser.add_argument(
"storage_directory",
metavar="STORAGE_DIRECTORY",
type=str,
nargs=1,
help="Path to long term storage directory for run logs.",
)
args = parser.parse_args()
storage_directory = args.storage_directory[0]
try:
credentials_path = os.path.join(storage_directory, "credentials.json")
except FileNotFoundError:
print(f"Add credentials.json file to: {storage_directory}.")
sys.exit()
google_sheet_lpc = google_sheets_tool.google_sheet(credentials_path, "ABR-LPC", 0)
print(len(google_sheet_lpc.get_all_data()))
remove_duplicate_data()
num_of_rows = print(len(google_sheet_lpc.get_all_data()))
# TODO: automate data analysis
45 changes: 20 additions & 25 deletions abr-testing/abr_testing/data_collection/abr_robot_error.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,7 @@
import shutil
import os
import subprocess
import json
import sys
import gspread # type: ignore[import]


def get_error_runs_from_robot(ip: str) -> List[str]:
Expand Down Expand Up @@ -145,6 +143,7 @@ def get_error_info_from_robot(
whole_description_str,
run_log_file_path,
) = get_error_info_from_robot(ip, one_run, storage_directory)
affects_version = "internal release - any"
# Get Calibration Data
saved_file_path_calibration, calibration = read_robot_logs.get_calibration_offsets(
ip, storage_directory
Expand Down Expand Up @@ -183,35 +182,31 @@ def get_error_info_from_robot(
# CONNECT TO GOOGLE DRIVE
credentials_path = os.path.join(storage_directory, "credentials.json")
google_sheet_name = "ABR-run-data"
try:
google_drive = google_drive_tool.google_drive(
credentials_path,
"1Cvej0eadFOTZr9ILRXJ0Wg65ymOtxL4m",
"rhyann.clarke@opentrons.ocm",
)
print("Connected to google drive.")
except json.decoder.JSONDecodeError:
print(
"Credential file is damaged. Get from https://console.cloud.google.com/apis/credentials"
)
sys.exit()
google_drive = google_drive_tool.google_drive(
credentials_path,
"1Cvej0eadFOTZr9ILRXJ0Wg65ymOtxL4m",
"rhyann.clarke@opentrons.ocm",
)
# CONNECT TO GOOGLE SHEET
try:
google_sheet = google_sheets_tool.google_sheet(
credentials_path, google_sheet_name, 0
)
print(f"Connected to google sheet: {google_sheet_name}")
except gspread.exceptions.APIError:
print("ERROR: Check google sheet name. Check credentials file.")
sys.exit()
google_sheet = google_sheets_tool.google_sheet(
credentials_path, google_sheet_name, 0
)
# WRITE ERRORED RUN TO GOOGLE SHEET
error_run_log = os.path.join(error_folder_path, os.path.basename(run_log_file_path))
google_drive.upload_file(error_run_log)
run_id = os.path.basename(error_run_log).split("_")[1].split(".")[0]
runs_and_robots, headers = abr_google_drive.create_data_dictionary(
run_id, error_folder_path, issue_url
)
(
runs_and_robots,
headers,
runs_and_lpc,
headers_lpc,
) = abr_google_drive.create_data_dictionary(run_id, error_folder_path, issue_url)
read_robot_logs.write_to_local_and_google_sheet(
runs_and_robots, storage_directory, google_sheet_name, google_sheet, headers
)
print("Wrote run to ABR-run-data")
# Add LPC to google sheet
google_sheet_lpc = google_sheets_tool.google_sheet(credentials_path, "ABR-LPC", 0)
read_robot_logs.write_to_local_and_google_sheet(
runs_and_lpc, storage_directory, "ABR-LPC", google_sheet_lpc, headers_lpc
)
54 changes: 36 additions & 18 deletions abr-testing/abr_testing/data_collection/read_robot_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,17 @@
import sys


def lpc_data(file_results: Dict[str, Any], protocol_info: Dict) -> List[Dict[str, Any]]:
def lpc_data(
file_results: Dict[str, Any],
protocol_info: Dict[str, Any],
runs_and_lpc: Dict[str, Any],
) -> Tuple[Dict[str, Dict[str, Any]], List[str]]:
"""Get labware offsets from one run log."""
offsets = file_results.get("labwareOffsets", "")
all_offsets: List[Dict[str, Any]] = []
n = 0
# TODO: per UNIQUE slot AND LABWARE TYPE only keep the most recent LPC recording
if len(offsets) > 0:
unique_offsets: Dict[Any, Any] = {}
for offset in offsets:
labware_type = offset.get("definitionUri", "")
slot = offset["location"].get("slotName", "")
Expand All @@ -29,19 +35,32 @@ def lpc_data(file_results: Dict[str, Any], protocol_info: Dict) -> List[Dict[str
y_offset = offset["vector"].get("y", 0.0)
z_offset = offset["vector"].get("z", 0.0)
created_at = offset.get("createdAt", "")
row = {
"createdAt": created_at,
"Labware Type": labware_type,
"Slot": slot,
"Module": module_location,
"Adapter": adapter,
"X": x_offset,
"Y": y_offset,
"Z": z_offset,
}
row2 = {**protocol_info, **row}
all_offsets.append(row2)
return all_offsets
if (
slot,
labware_type,
) not in unique_offsets or created_at > unique_offsets[
(slot, labware_type)
][
"createdAt"
]:
unique_offsets[(slot, labware_type)] = {
**protocol_info,
"createdAt": created_at,
"Labware Type": labware_type,
"Slot": slot,
"Module": module_location,
"Adapter": adapter,
"X": x_offset,
"Y": y_offset,
"Z": z_offset,
}
for item in unique_offsets:
run_id = protocol_info["Run_ID"] + "_" + str(n)
runs_and_lpc[run_id] = unique_offsets[item]
n += 1
headers_lpc = list(unique_offsets[(slot, labware_type)].keys())

return runs_and_lpc, headers_lpc


def command_time(command: Dict[str, str]) -> Tuple[float, float]:
Expand Down Expand Up @@ -323,13 +342,12 @@ def write_to_local_and_google_sheet(
"""Write data dictionary to google sheet and local csv."""
sheet_location = os.path.join(storage_directory, file_name)
file_exists = os.path.exists(sheet_location) and os.path.getsize(sheet_location) > 0
list_of_runs = list(runs_and_robots.keys())
with open(sheet_location, "a", newline="") as f:
writer = csv.writer(f)
if not file_exists:
writer.writerow(header)
for run in range(len(list_of_runs)):
row = runs_and_robots[list_of_runs[run]].values()
for run in runs_and_robots:
row = runs_and_robots[run].values()
row_list = list(row)
writer.writerow(row_list)
google_sheet.write_header(header)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,12 @@
sys.exit()
# Get Runs from Storage and Read Logs
run_ids_in_storage = read_robot_logs.get_run_ids_from_storage(run_log_file_path)
runs_and_robots, header = abr_google_drive.create_data_dictionary(
(
runs_and_robots,
header,
runs_and_lpc,
lpc_headers,
) = abr_google_drive.create_data_dictionary(
run_ids_in_storage, run_log_file_path, ""
)
list_of_runs = list(runs_and_robots.keys())
Expand Down

0 comments on commit 4d83f74

Please sign in to comment.