From 7e287751b0c2d2c685a98eabb821994dffdb7e2d Mon Sep 17 00:00:00 2001 From: Max Marrone Date: Fri, 9 Aug 2024 12:27:15 -0400 Subject: [PATCH] refactor(robot-server): Use new `Annotated` syntax for FastAPI dependencies (#15838) --- .../robot_server/client_data/router.py | 2 +- .../robot_server/client_data/store.py | 4 +- .../commands/get_default_orchestrator.py | 10 +- robot-server/robot_server/commands/router.py | 91 ++++----- .../robot_server/data_files/dependencies.py | 14 +- .../robot_server/data_files/router.py | 44 +++-- .../fastapi_dependencies.py | 32 ++-- .../robot_server/deck_configuration/router.py | 14 +- robot-server/robot_server/hardware.py | 19 +- robot-server/robot_server/health/router.py | 10 +- .../robot_server/instruments/router.py | 4 +- .../maintenance_runs/dependencies.py | 22 ++- .../maintenance_runs/router/base_router.py | 48 ++--- .../router/commands_router.py | 112 +++++------ .../maintenance_runs/router/labware_router.py | 17 +- .../modules/module_data_mapper.py | 4 +- robot-server/robot_server/modules/router.py | 10 +- .../persistence/fastapi_dependencies.py | 12 +- .../robot_server/protocols/dependencies.py | 28 +-- robot-server/robot_server/protocols/router.py | 175 +++++++++--------- .../robot/control/dependencies.py | 4 +- .../robot_server/robot/control/router.py | 14 +- .../robot_server/runs/dependencies.py | 40 ++-- .../runs/router/actions_router.py | 33 ++-- .../robot_server/runs/router/base_router.py | 91 ++++----- .../runs/router/commands_router.py | 117 ++++++------ .../runs/router/labware_router.py | 16 +- .../robot_server/service/dependencies.py | 6 +- .../robot_server/service/labware/router.py | 9 +- .../service/legacy/routers/control.py | 18 +- .../legacy/routers/deck_calibration.py | 4 +- .../service/legacy/routers/logs.py | 18 +- .../service/legacy/routers/modules.py | 12 +- .../service/legacy/routers/motors.py | 6 +- .../service/legacy/routers/networking.py | 37 ++-- .../service/legacy/routers/pipettes.py | 26 +-- .../service/legacy/routers/settings.py | 32 ++-- .../notifications/notification_client.py | 4 +- .../notifications/publisher_notifier.py | 6 +- .../deck_configuration_publisher.py | 8 +- .../publishers/maintenance_runs_publisher.py | 8 +- .../publishers/runs_publisher.py | 12 +- .../service/pipette_offset/router.py | 9 +- .../robot_server/service/session/router.py | 18 +- .../robot_server/service/task_runner.py | 8 +- .../robot_server/service/tip_length/router.py | 104 ++++++----- .../robot_server/subsystems/router.py | 30 ++- .../tests/commands/test_get_default_engine.py | 11 +- robot-server/tests/data_files/test_router.py | 6 + .../router/test_base_router.py | 3 + .../router/test_commands_router.py | 4 + robot-server/tests/modules/test_router.py | 2 + .../tests/protocols/test_protocols_router.py | 101 +++++++++- .../tests/runs/router/test_actions_router.py | 3 + .../tests/runs/router/test_base_router.py | 37 ++++ .../tests/runs/router/test_commands_router.py | 13 +- robot-server/tests/test_versioning.py | 6 +- 57 files changed, 920 insertions(+), 628 deletions(-) diff --git a/robot-server/robot_server/client_data/router.py b/robot-server/robot_server/client_data/router.py index d04eaeea9ad..aec0e54b8cc 100644 --- a/robot-server/robot_server/client_data/router.py +++ b/robot-server/robot_server/client_data/router.py @@ -89,7 +89,7 @@ async def put_client_data( # noqa: D103 ) async def get_client_data( # noqa: D103 key: Key, - store: ClientDataStore = fastapi.Depends(get_client_data_store), + store: Annotated[ClientDataStore, fastapi.Depends(get_client_data_store)], ) -> SimpleBody[ClientData]: try: return SimpleBody.construct(data=store.get(key)) diff --git a/robot-server/robot_server/client_data/store.py b/robot-server/robot_server/client_data/store.py index 2fcd90e8feb..40d7e503f77 100644 --- a/robot-server/robot_server/client_data/store.py +++ b/robot-server/robot_server/client_data/store.py @@ -1,5 +1,7 @@ """An in-memory store for arbitrary client-defined JSON objects.""" +from typing import Annotated + import fastapi from server_utils.fastapi_utils.app_state import ( @@ -49,7 +51,7 @@ def delete_all(self) -> None: async def get_client_data_store( - app_state: AppState = fastapi.Depends(get_app_state), + app_state: Annotated[AppState, fastapi.Depends(get_app_state)], ) -> ClientDataStore: """A FastAPI dependency to return the server's singleton `ClientDataStore`.""" store = _app_state_accessor.get_from(app_state) diff --git a/robot-server/robot_server/commands/get_default_orchestrator.py b/robot-server/robot_server/commands/get_default_orchestrator.py index f6428452142..282b765ab86 100644 --- a/robot-server/robot_server/commands/get_default_orchestrator.py +++ b/robot-server/robot_server/commands/get_default_orchestrator.py @@ -1,5 +1,5 @@ """Get the default protocol engine.""" -from typing_extensions import Literal +from typing import Annotated, Literal from fastapi import Depends, status @@ -34,9 +34,11 @@ class RunActive(ErrorDetails): async def get_default_orchestrator( - run_orchestrator_store: RunOrchestratorStore = Depends(get_run_orchestrator_store), - hardware_api: HardwareControlAPI = Depends(get_hardware), - module_identifier: ModuleIdentifier = Depends(ModuleIdentifier), + run_orchestrator_store: Annotated[ + RunOrchestratorStore, Depends(get_run_orchestrator_store) + ], + hardware_api: Annotated[HardwareControlAPI, Depends(get_hardware)], + module_identifier: Annotated[ModuleIdentifier, Depends(ModuleIdentifier)], ) -> RunOrchestrator: """Get the default run orchestrator with attached modules loaded.""" try: diff --git a/robot-server/robot_server/commands/router.py b/robot-server/robot_server/commands/router.py index 15f2d2d3860..ef4c58c692c 100644 --- a/robot-server/robot_server/commands/router.py +++ b/robot-server/robot_server/commands/router.py @@ -1,6 +1,5 @@ """Router for top-level /commands endpoints.""" -from typing import List, Optional, cast -from typing_extensions import Final, Literal +from typing import Annotated, Final, List, Literal, Optional, cast from fastapi import APIRouter, Depends, Query, status @@ -65,35 +64,39 @@ class CommandNotFound(ErrorDetails): ) async def create_command( request_body: RequestModelWithStatelessCommandCreate, - waitUntilComplete: bool = Query( - False, - description=( - "If `false`, return immediately, while the new command is still queued." - " If `true`, only return once the new command succeeds or fails," - " or when the timeout is reached. See the `timeout` query parameter." + orchestrator: Annotated[RunOrchestrator, Depends(get_default_orchestrator)], + waitUntilComplete: Annotated[ + bool, + Query( + description=( + "If `false`, return immediately, while the new command is still queued." + " If `true`, only return once the new command succeeds or fails," + " or when the timeout is reached. See the `timeout` query parameter." + ), ), - ), - timeout: Optional[int] = Query( - default=None, - gt=0, - description=( - "If `waitUntilComplete` is `true`," - " the maximum time in milliseconds to wait before returning." - " The default is infinite." - "\n\n" - "The timer starts as soon as you enqueue the new command with this request," - " *not* when the new command starts running. So if there are other commands" - " in the queue before the new one, they will also count towards the" - " timeout." - "\n\n" - "If the timeout elapses before the command succeeds or fails," - " the command will be returned with its current status." - "\n\n" - "Compatibility note: on robot software v6.2.0 and older," - " the default was 30 seconds, not infinite." + ] = False, + timeout: Annotated[ + Optional[int], + Query( + gt=0, + description=( + "If `waitUntilComplete` is `true`," + " the maximum time in milliseconds to wait before returning." + " The default is infinite." + "\n\n" + "The timer starts as soon as you enqueue the new command with this request," + " *not* when the new command starts running. So if there are other commands" + " in the queue before the new one, they will also count towards the" + " timeout." + "\n\n" + "If the timeout elapses before the command succeeds or fails," + " the command will be returned with its current status." + "\n\n" + "Compatibility note: on robot software v6.2.0 and older," + " the default was 30 seconds, not infinite." + ), ), - ), - orchestrator: RunOrchestrator = Depends(get_default_orchestrator), + ] = None, ) -> PydanticResponse[SimpleBody[StatelessCommand]]: """Enqueue and execute a command. @@ -133,19 +136,23 @@ async def create_command( }, ) async def get_commands_list( - orchestrator: RunOrchestrator = Depends(get_default_orchestrator), - cursor: Optional[int] = Query( - None, - description=( - "The starting index of the desired first command in the list." - " If unspecified, a cursor will be selected automatically" - " based on the currently running or most recently executed command." + orchestrator: Annotated[RunOrchestrator, Depends(get_default_orchestrator)], + cursor: Annotated[ + Optional[int], + Query( + description=( + "The starting index of the desired first command in the list." + " If unspecified, a cursor will be selected automatically" + " based on the currently running or most recently executed command." + ), ), - ), - pageLength: int = Query( - _DEFAULT_COMMAND_LIST_LENGTH, - description="The maximum number of commands in the list to return.", - ), + ] = None, + pageLength: Annotated[ + int, + Query( + description="The maximum number of commands in the list to return.", + ), + ] = _DEFAULT_COMMAND_LIST_LENGTH, ) -> PydanticResponse[SimpleMultiBody[StatelessCommand]]: """Get a list of stateless commands. @@ -180,7 +187,7 @@ async def get_commands_list( ) async def get_command( commandId: str, - orchestrator: RunOrchestrator = Depends(get_default_orchestrator), + orchestrator: Annotated[RunOrchestrator, Depends(get_default_orchestrator)], ) -> PydanticResponse[SimpleBody[StatelessCommand]]: """Get a single stateless command. diff --git a/robot-server/robot_server/data_files/dependencies.py b/robot-server/robot_server/data_files/dependencies.py index 77aab325b6a..fa677505888 100644 --- a/robot-server/robot_server/data_files/dependencies.py +++ b/robot-server/robot_server/data_files/dependencies.py @@ -1,7 +1,7 @@ """FastAPI dependencies for data files endpoints.""" from pathlib import Path from asyncio import Lock as AsyncLock -from typing import Final +from typing import Annotated, Final from anyio import Path as AsyncPath from fastapi import Depends @@ -32,8 +32,8 @@ async def get_data_files_directory( - app_state: AppState = Depends(get_app_state), - persistent_directory: Path = Depends(get_active_persistence_directory), + app_state: Annotated[AppState, Depends(get_app_state)], + persistent_directory: Annotated[Path, Depends(get_active_persistence_directory)], ) -> Path: """Get the directory to save the protocol files, creating it if needed.""" async with _data_files_directory_init_lock: @@ -47,9 +47,9 @@ async def get_data_files_directory( async def get_data_files_store( - app_state: AppState = Depends(get_app_state), - sql_engine: SQLEngine = Depends(get_sql_engine), - data_files_directory: Path = Depends(get_data_files_directory), + app_state: Annotated[AppState, Depends(get_app_state)], + sql_engine: Annotated[SQLEngine, Depends(get_sql_engine)], + data_files_directory: Annotated[Path, Depends(get_data_files_directory)], ) -> DataFilesStore: """Get a singleton DataFilesStore to keep track of uploaded data files.""" async with _data_files_store_init_lock: @@ -61,7 +61,7 @@ async def get_data_files_store( def get_data_file_auto_deleter( - data_files_store: DataFilesStore = Depends(get_data_files_store), + data_files_store: Annotated[DataFilesStore, Depends(get_data_files_store)], ) -> DataFileAutoDeleter: """Get a `DataFileAutoDeleter` to delete old data files.""" return DataFileAutoDeleter( diff --git a/robot-server/robot_server/data_files/router.py b/robot-server/robot_server/data_files/router.py index 6d0d153ae68..5463712a77b 100644 --- a/robot-server/robot_server/data_files/router.py +++ b/robot-server/robot_server/data_files/router.py @@ -2,7 +2,7 @@ from datetime import datetime from pathlib import Path from textwrap import dedent -from typing import Optional, Literal, Union +from typing import Annotated, Optional, Literal, Union from fastapi import APIRouter, UploadFile, File, Form, Depends, Response, status from opentrons.protocol_reader import FileHasher, FileReaderWriter @@ -82,19 +82,25 @@ class UnexpectedFileFormat(ErrorDetails): }, ) async def upload_data_file( - file: Optional[UploadFile] = File(default=None, description="Data file to upload"), - file_path: Optional[str] = Form( - default=None, - description="Absolute path to a file on the robot.", - alias="filePath", - ), - data_files_directory: Path = Depends(get_data_files_directory), - data_files_store: DataFilesStore = Depends(get_data_files_store), - data_file_auto_deleter: DataFileAutoDeleter = Depends(get_data_file_auto_deleter), - file_reader_writer: FileReaderWriter = Depends(get_file_reader_writer), - file_hasher: FileHasher = Depends(get_file_hasher), - file_id: str = Depends(get_unique_id, use_cache=False), - created_at: datetime = Depends(get_current_time), + data_files_directory: Annotated[Path, Depends(get_data_files_directory)], + data_files_store: Annotated[DataFilesStore, Depends(get_data_files_store)], + data_file_auto_deleter: Annotated[ + DataFileAutoDeleter, Depends(get_data_file_auto_deleter) + ], + file_reader_writer: Annotated[FileReaderWriter, Depends(get_file_reader_writer)], + file_hasher: Annotated[FileHasher, Depends(get_file_hasher)], + file_id: Annotated[str, Depends(get_unique_id, use_cache=False)], + created_at: Annotated[datetime, Depends(get_current_time)], + file: Annotated[ + Optional[UploadFile], File(description="Data file to upload") + ] = None, + file_path: Annotated[ + Optional[str], + Form( + description="Absolute path to a file on the robot.", + alias="filePath", + ), + ] = None, ) -> PydanticResponse[SimpleBody[DataFile]]: """Save the uploaded data file to persistent storage and update database.""" if all([file, file_path]): @@ -162,7 +168,7 @@ async def upload_data_file( ) async def get_data_file_info_by_id( dataFileId: str, - data_files_store: DataFilesStore = Depends(get_data_files_store), + data_files_store: Annotated[DataFilesStore, Depends(get_data_files_store)], ) -> PydanticResponse[SimpleBody[DataFile]]: """Get data file info by ID. @@ -198,9 +204,9 @@ async def get_data_file_info_by_id( ) async def get_data_file( dataFileId: str, - data_files_directory: Path = Depends(get_data_files_directory), - data_files_store: DataFilesStore = Depends(get_data_files_store), - file_reader_writer: FileReaderWriter = Depends(get_file_reader_writer), + data_files_directory: Annotated[Path, Depends(get_data_files_directory)], + data_files_store: Annotated[DataFilesStore, Depends(get_data_files_store)], + file_reader_writer: Annotated[FileReaderWriter, Depends(get_file_reader_writer)], ) -> Response: """Get the requested data file by id.""" try: @@ -228,7 +234,7 @@ async def get_data_file( responses={status.HTTP_200_OK: {"model": SimpleMultiBody[str]}}, ) async def get_all_data_files( - data_files_store: DataFilesStore = Depends(get_data_files_store), + data_files_store: Annotated[DataFilesStore, Depends(get_data_files_store)], ) -> PydanticResponse[SimpleMultiBody[DataFile]]: """Get a list of all data files stored on the robot server. diff --git a/robot-server/robot_server/deck_configuration/fastapi_dependencies.py b/robot-server/robot_server/deck_configuration/fastapi_dependencies.py index f2cae3ab468..eb1a98fd6f9 100644 --- a/robot-server/robot_server/deck_configuration/fastapi_dependencies.py +++ b/robot-server/robot_server/deck_configuration/fastapi_dependencies.py @@ -2,7 +2,7 @@ from pathlib import Path -from typing import Optional +from typing import Annotated, Optional import fastapi @@ -33,12 +33,14 @@ async def get_deck_configuration_store( - app_state: AppState = fastapi.Depends(get_app_state), - deck_type: DeckType = fastapi.Depends(get_deck_type), - persistence_directory: Path = fastapi.Depends(get_active_persistence_directory), - deck_configuration_publisher: DeckConfigurationPublisher = fastapi.Depends( - get_deck_configuration_publisher - ), + app_state: Annotated[AppState, fastapi.Depends(get_app_state)], + deck_type: Annotated[DeckType, fastapi.Depends(get_deck_type)], + persistence_directory: Annotated[ + Path, fastapi.Depends(get_active_persistence_directory) + ], + deck_configuration_publisher: Annotated[ + DeckConfigurationPublisher, fastapi.Depends(get_deck_configuration_publisher) + ], ) -> DeckConfigurationStore: """Return the server's singleton `DeckConfigurationStore`.""" deck_configuration_store = _accessor.get_from(app_state) @@ -57,14 +59,14 @@ async def get_deck_configuration_store( # TODO(mm, 2024-02-07): Resolve the duplication between these two implementations. async def get_deck_configuration_store_failsafe( - app_state: AppState = fastapi.Depends(get_app_state), - deck_type: DeckType = fastapi.Depends(get_deck_type), - persistence_directory: Optional[Path] = fastapi.Depends( - get_active_persistence_directory_failsafe - ), - deck_configuration_publisher: DeckConfigurationPublisher = fastapi.Depends( - get_deck_configuration_publisher - ), + app_state: Annotated[AppState, fastapi.Depends(get_app_state)], + deck_type: Annotated[DeckType, fastapi.Depends(get_deck_type)], + persistence_directory: Annotated[ + Optional[Path], fastapi.Depends(get_active_persistence_directory_failsafe) + ], + deck_configuration_publisher: Annotated[ + DeckConfigurationPublisher, fastapi.Depends(get_deck_configuration_publisher) + ], ) -> Optional[DeckConfigurationStore]: """Return the server's singleton `DeckConfigurationStore`. diff --git a/robot-server/robot_server/deck_configuration/router.py b/robot-server/robot_server/deck_configuration/router.py index f458d1af194..cfb31c9f030 100644 --- a/robot-server/robot_server/deck_configuration/router.py +++ b/robot-server/robot_server/deck_configuration/router.py @@ -2,7 +2,7 @@ from datetime import datetime -from typing import Union +from typing import Annotated, Union import fastapi from starlette.status import HTTP_422_UNPROCESSABLE_ENTITY @@ -62,9 +62,11 @@ ) async def put_deck_configuration( # noqa: D103 request_body: RequestModel[models.DeckConfigurationRequest], - store: DeckConfigurationStore = fastapi.Depends(get_deck_configuration_store), - now: datetime = fastapi.Depends(get_current_time), - deck_definition: DeckDefinitionV5 = fastapi.Depends(get_deck_definition), + store: Annotated[ + DeckConfigurationStore, fastapi.Depends(get_deck_configuration_store) + ], + now: Annotated[datetime, fastapi.Depends(get_current_time)], + deck_definition: Annotated[DeckDefinitionV5, fastapi.Depends(get_deck_definition)], ) -> PydanticResponse[ Union[ SimpleBody[models.DeckConfigurationResponse], @@ -104,7 +106,9 @@ async def put_deck_configuration( # noqa: D103 }, ) async def get_deck_configuration( # noqa: D103 - store: DeckConfigurationStore = fastapi.Depends(get_deck_configuration_store), + store: Annotated[ + DeckConfigurationStore, fastapi.Depends(get_deck_configuration_store) + ], ) -> PydanticResponse[SimpleBody[models.DeckConfigurationResponse]]: return await PydanticResponse.create( content=SimpleBody.construct(data=await store.get()) diff --git a/robot-server/robot_server/hardware.py b/robot-server/robot_server/hardware.py index 039f727ce87..e6a49ebed7f 100644 --- a/robot-server/robot_server/hardware.py +++ b/robot-server/robot_server/hardware.py @@ -6,6 +6,7 @@ from typing import ( TYPE_CHECKING, cast, + Annotated, Awaitable, Callable, Iterator, @@ -241,7 +242,7 @@ async def fbl_clean_up(app_state: AppState) -> None: # TODO(mm, 2022-10-18): Deduplicate this background initialization infrastructure # with similar code used for initializing the persistence layer. async def get_thread_manager( - app_state: AppState = Depends(get_app_state), + app_state: Annotated[AppState, Depends(get_app_state)], ) -> ThreadManagedHardware: """Get the ThreadManager'd HardwareAPI as a route dependency. @@ -285,7 +286,7 @@ async def get_thread_manager( async def get_hardware( - thread_manager: ThreadManagedHardware = Depends(get_thread_manager), + thread_manager: Annotated[ThreadManagedHardware, Depends(get_thread_manager)], ) -> HardwareControlAPI: """Get the HardwareAPI as a route dependency. @@ -323,7 +324,7 @@ def get_ot3_hardware( def get_ot2_hardware( - thread_manager: ThreadManagedHardware = Depends(get_thread_manager), + thread_manager: Annotated[ThreadManagedHardware, Depends(get_thread_manager)], ) -> "API": """Get an OT2 hardware controller.""" if not thread_manager.wraps_instance(API): @@ -334,9 +335,9 @@ def get_ot2_hardware( async def get_firmware_update_manager( - app_state: AppState = Depends(get_app_state), - thread_manager: ThreadManagedHardware = Depends(get_thread_manager), - task_runner: TaskRunner = Depends(get_task_runner), + app_state: Annotated[AppState, Depends(get_app_state)], + thread_manager: Annotated[ThreadManagedHardware, Depends(get_thread_manager)], + task_runner: Annotated[TaskRunner, Depends(get_task_runner)], ) -> FirmwareUpdateManager: """Get an update manager to track firmware update statuses.""" hardware = get_ot3_hardware(thread_manager) @@ -351,8 +352,8 @@ async def get_firmware_update_manager( async def get_estop_handler( - app_state: AppState = Depends(get_app_state), - thread_manager: ThreadManagedHardware = Depends(get_thread_manager), + app_state: Annotated[AppState, Depends(get_app_state)], + thread_manager: Annotated[ThreadManagedHardware, Depends(get_thread_manager)], ) -> EstopHandler: """Get an Estop Handler for working with the estop.""" hardware = get_ot3_hardware(thread_manager) @@ -380,7 +381,7 @@ async def get_deck_type() -> DeckType: async def get_deck_definition( - deck_type: DeckType = Depends(get_deck_type), + deck_type: Annotated[DeckType, Depends(get_deck_type)], ) -> deck.types.DeckDefinitionV5: """Return this robot's deck definition.""" return deck.load(deck_type, version=5) diff --git a/robot-server/robot_server/health/router.py b/robot-server/robot_server/health/router.py index 92cdfd7cd63..86a78255cd7 100644 --- a/robot-server/robot_server/health/router.py +++ b/robot-server/robot_server/health/router.py @@ -1,7 +1,7 @@ """HTTP routes and handlers for /health endpoints.""" from dataclasses import dataclass from fastapi import APIRouter, Depends, status -from typing import Dict, cast +from typing import Annotated, Dict, cast import logging import json @@ -123,16 +123,16 @@ def _system_version_or_fallback() -> str: }, ) async def get_health( - hardware: HardwareControlAPI = Depends(get_hardware), + hardware: Annotated[HardwareControlAPI, Depends(get_hardware)], # This endpoint doesn't actually need sql_engine. We use it in order to artificially # fail requests until the database has finished initializing. This plays into the # Opentrons App's current error handling. With a non-healthy /health, the app will # block off most of its robot details UI. This prevents the user from trying things # like viewing runs and uploading protocols, which would hit "database not ready" # errors that would present in a confusing way. - sql_engine: object = Depends(ensure_sql_engine_is_ready), - versions: ComponentVersions = Depends(get_versions), - robot_type: RobotType = Depends(get_robot_type), + sql_engine: Annotated[object, Depends(ensure_sql_engine_is_ready)], + versions: Annotated[ComponentVersions, Depends(get_versions)], + robot_type: Annotated[RobotType, Depends(get_robot_type)], ) -> Health: """Get information about the health of the robot server. diff --git a/robot-server/robot_server/instruments/router.py b/robot-server/robot_server/instruments/router.py index 561e295a8d1..b34a5d0c749 100644 --- a/robot-server/robot_server/instruments/router.py +++ b/robot-server/robot_server/instruments/router.py @@ -1,5 +1,5 @@ """Instruments routes.""" -from typing import Optional, Dict, List, cast +from typing import Annotated, Optional, Dict, List, cast from fastapi import APIRouter, status, Depends @@ -265,7 +265,7 @@ async def _get_attached_instruments_ot2( responses={status.HTTP_200_OK: {"model": SimpleMultiBody[AttachedItem]}}, ) async def get_attached_instruments( - hardware: HardwareControlAPI = Depends(get_hardware), + hardware: Annotated[HardwareControlAPI, Depends(get_hardware)], ) -> PydanticResponse[SimpleMultiBody[AttachedItem]]: """Get a list of all attached instruments.""" try: diff --git a/robot-server/robot_server/maintenance_runs/dependencies.py b/robot-server/robot_server/maintenance_runs/dependencies.py index dda7db0d0e0..a20d8ce7eaa 100644 --- a/robot-server/robot_server/maintenance_runs/dependencies.py +++ b/robot-server/robot_server/maintenance_runs/dependencies.py @@ -1,4 +1,6 @@ """Maintenance Run router dependency-injection wire-up.""" +from typing import Annotated + from fastapi import Depends from opentrons_shared_data.robot.types import RobotType @@ -26,10 +28,10 @@ async def get_maintenance_run_orchestrator_store( - app_state: AppState = Depends(get_app_state), - hardware_api: HardwareControlAPI = Depends(get_hardware), - deck_type: DeckType = Depends(get_deck_type), - robot_type: RobotType = Depends(get_robot_type), + app_state: Annotated[AppState, Depends(get_app_state)], + hardware_api: Annotated[HardwareControlAPI, Depends(get_hardware)], + deck_type: Annotated[DeckType, Depends(get_deck_type)], + robot_type: Annotated[RobotType, Depends(get_robot_type)], ) -> MaintenanceRunOrchestratorStore: """Get a singleton MaintenanceRunOrchestratorStore to keep track of created engines / runners.""" run_orchestrator_store = _run_orchestrator_store_accessor.get_from(app_state) @@ -44,12 +46,12 @@ async def get_maintenance_run_orchestrator_store( async def get_maintenance_run_data_manager( - run_orchestrator_store: MaintenanceRunOrchestratorStore = Depends( - get_maintenance_run_orchestrator_store - ), - maintenance_runs_publisher: MaintenanceRunsPublisher = Depends( - get_maintenance_runs_publisher - ), + run_orchestrator_store: Annotated[ + MaintenanceRunOrchestratorStore, Depends(get_maintenance_run_orchestrator_store) + ], + maintenance_runs_publisher: Annotated[ + MaintenanceRunsPublisher, Depends(get_maintenance_runs_publisher) + ], ) -> MaintenanceRunDataManager: """Get a maintenance run data manager to keep track of current run data.""" return MaintenanceRunDataManager( diff --git a/robot-server/robot_server/maintenance_runs/router/base_router.py b/robot-server/robot_server/maintenance_runs/router/base_router.py index ada9208a49a..0e9abc62553 100644 --- a/robot-server/robot_server/maintenance_runs/router/base_router.py +++ b/robot-server/robot_server/maintenance_runs/router/base_router.py @@ -5,7 +5,7 @@ import logging from datetime import datetime from textwrap import dedent -from typing import Optional, Callable +from typing import Annotated, Optional, Callable from typing_extensions import Literal from fastapi import APIRouter, Depends, status @@ -104,9 +104,9 @@ class AllRunsLinks(BaseModel): async def get_run_data_from_url( runId: str, - run_data_manager: MaintenanceRunDataManager = Depends( - get_maintenance_run_data_manager - ), + run_data_manager: Annotated[ + MaintenanceRunDataManager, Depends(get_maintenance_run_data_manager) + ], ) -> MaintenanceRun: """Get the data of a maintenance run. @@ -143,20 +143,20 @@ async def get_run_data_from_url( }, ) async def create_run( + run_data_manager: Annotated[ + MaintenanceRunDataManager, Depends(get_maintenance_run_data_manager) + ], + run_id: Annotated[str, Depends(get_unique_id)], + created_at: Annotated[datetime, Depends(get_current_time)], + is_ok_to_create_maintenance_run: Annotated[ + bool, Depends(get_is_okay_to_create_maintenance_run) + ], + check_estop: Annotated[bool, Depends(require_estop_in_good_state)], + deck_configuration_store: Annotated[ + DeckConfigurationStore, Depends(get_deck_configuration_store) + ], + notify_publishers: Annotated[Callable[[], None], Depends(get_pe_notify_publishers)], request_body: Optional[RequestModel[MaintenanceRunCreate]] = None, - run_data_manager: MaintenanceRunDataManager = Depends( - get_maintenance_run_data_manager - ), - run_id: str = Depends(get_unique_id), - created_at: datetime = Depends(get_current_time), - is_ok_to_create_maintenance_run: bool = Depends( - get_is_okay_to_create_maintenance_run - ), - check_estop: bool = Depends(require_estop_in_good_state), - deck_configuration_store: DeckConfigurationStore = Depends( - get_deck_configuration_store - ), - notify_publishers: Callable[[], None] = Depends(get_pe_notify_publishers), ) -> PydanticResponse[SimpleBody[MaintenanceRun]]: """Create a new maintenance run. @@ -204,9 +204,9 @@ async def create_run( }, ) async def get_current_run( - run_data_manager: MaintenanceRunDataManager = Depends( - get_maintenance_run_data_manager - ), + run_data_manager: Annotated[ + MaintenanceRunDataManager, Depends(get_maintenance_run_data_manager) + ], ) -> PydanticResponse[Body[MaintenanceRun, AllRunsLinks]]: """Get the current maintenance run. @@ -241,7 +241,7 @@ async def get_current_run( }, ) async def get_run( - run_data: MaintenanceRun = Depends(get_run_data_from_url), + run_data: Annotated[MaintenanceRun, Depends(get_run_data_from_url)], ) -> PydanticResponse[SimpleBody[MaintenanceRun]]: """Get a maintenance run by its ID. @@ -266,9 +266,9 @@ async def get_run( ) async def remove_run( runId: str, - run_data_manager: MaintenanceRunDataManager = Depends( - get_maintenance_run_data_manager - ), + run_data_manager: Annotated[ + MaintenanceRunDataManager, Depends(get_maintenance_run_data_manager) + ], ) -> PydanticResponse[SimpleEmptyBody]: """Delete a maintenance run by its ID. diff --git a/robot-server/robot_server/maintenance_runs/router/commands_router.py b/robot-server/robot_server/maintenance_runs/router/commands_router.py index 85f1a8a3bd0..9df5f9630b9 100644 --- a/robot-server/robot_server/maintenance_runs/router/commands_router.py +++ b/robot-server/robot_server/maintenance_runs/router/commands_router.py @@ -1,6 +1,6 @@ """Router for /maintenance_runs commands endpoints.""" import textwrap -from typing import Optional, Union +from typing import Annotated, Optional, Union from typing_extensions import Final, Literal from fastapi import APIRouter, Depends, Query, status @@ -58,9 +58,9 @@ class CommandNotAllowed(ErrorDetails): async def get_current_run_from_url( runId: str, - run_orchestrator_store: MaintenanceRunOrchestratorStore = Depends( - get_maintenance_run_orchestrator_store - ), + run_orchestrator_store: Annotated[ + MaintenanceRunOrchestratorStore, Depends(get_maintenance_run_orchestrator_store) + ], ) -> str: """Get run from url. @@ -100,39 +100,43 @@ async def get_current_run_from_url( ) async def create_run_command( request_body: RequestModelWithCommandCreate, - waitUntilComplete: bool = Query( - default=False, - description=( - "If `false`, return immediately, while the new command is still queued." - " If `true`, only return once the new command succeeds or fails," - " or when the timeout is reached. See the `timeout` query parameter." + run_orchestrator_store: Annotated[ + MaintenanceRunOrchestratorStore, Depends(get_maintenance_run_orchestrator_store) + ], + run_id: Annotated[str, Depends(get_current_run_from_url)], + check_estop: Annotated[bool, Depends(require_estop_in_good_state)], + waitUntilComplete: Annotated[ + bool, + Query( + description=( + "If `false`, return immediately, while the new command is still queued." + " If `true`, only return once the new command succeeds or fails," + " or when the timeout is reached. See the `timeout` query parameter." + ), ), - ), - run_orchestrator_store: MaintenanceRunOrchestratorStore = Depends( - get_maintenance_run_orchestrator_store - ), - timeout: Optional[int] = Query( - default=None, - gt=0, - description=( - "If `waitUntilComplete` is `true`," - " the maximum time in milliseconds to wait before returning." - " The default is infinite." - "\n\n" - "The timer starts as soon as you enqueue the new command with this request," - " *not* when the new command starts running. So if there are other commands" - " in the queue before the new one, they will also count towards the" - " timeout." - "\n\n" - "If the timeout elapses before the command succeeds or fails," - " the command will be returned with its current status." - "\n\n" - "Compatibility note: on robot software v6.2.0 and older," - " the default was 30 seconds, not infinite." + ] = False, + timeout: Annotated[ + Optional[int], + Query( + gt=0, + description=( + "If `waitUntilComplete` is `true`," + " the maximum time in milliseconds to wait before returning." + " The default is infinite." + "\n\n" + "The timer starts as soon as you enqueue the new command with this request," + " *not* when the new command starts running. So if there are other commands" + " in the queue before the new one, they will also count towards the" + " timeout." + "\n\n" + "If the timeout elapses before the command succeeds or fails," + " the command will be returned with its current status." + "\n\n" + "Compatibility note: on robot software v6.2.0 and older," + " the default was 30 seconds, not infinite." + ), ), - ), - run_id: str = Depends(get_current_run_from_url), - check_estop: bool = Depends(require_estop_in_good_state), + ] = None, ) -> PydanticResponse[SimpleBody[pe_commands.Command]]: """Enqueue a protocol command. @@ -186,21 +190,25 @@ async def create_run_command( ) async def get_run_commands( runId: str, - cursor: Optional[int] = Query( - None, - description=( - "The starting index of the desired first command in the list." - " If unspecified, a cursor will be selected automatically" - " based on the currently running or most recently executed command." + run_data_manager: Annotated[ + MaintenanceRunDataManager, Depends(get_maintenance_run_data_manager) + ], + cursor: Annotated[ + Optional[int], + Query( + description=( + "The starting index of the desired first command in the list." + " If unspecified, a cursor will be selected automatically" + " based on the currently running or most recently executed command." + ), ), - ), - pageLength: int = Query( - _DEFAULT_COMMAND_LIST_LENGTH, - description="The maximum number of commands in the list to return.", - ), - run_data_manager: MaintenanceRunDataManager = Depends( - get_maintenance_run_data_manager - ), + ] = None, + pageLength: Annotated[ + int, + Query( + description="The maximum number of commands in the list to return.", + ), + ] = _DEFAULT_COMMAND_LIST_LENGTH, ) -> PydanticResponse[MultiBody[RunCommandSummary, CommandCollectionLinks]]: """Get a summary of a set of commands in a run. @@ -273,9 +281,9 @@ async def get_run_commands( async def get_run_command( runId: str, commandId: str, - run_data_manager: MaintenanceRunDataManager = Depends( - get_maintenance_run_data_manager - ), + run_data_manager: Annotated[ + MaintenanceRunDataManager, Depends(get_maintenance_run_data_manager) + ], ) -> PydanticResponse[SimpleBody[pe_commands.Command]]: """Get a specific command from a run. diff --git a/robot-server/robot_server/maintenance_runs/router/labware_router.py b/robot-server/robot_server/maintenance_runs/router/labware_router.py index c7da086e9e0..72fc09d911a 100644 --- a/robot-server/robot_server/maintenance_runs/router/labware_router.py +++ b/robot-server/robot_server/maintenance_runs/router/labware_router.py @@ -1,4 +1,5 @@ """Router for /maintenance_runs endpoints dealing with labware offsets and definitions.""" +from typing import Annotated import logging from fastapi import APIRouter, Depends, status @@ -37,10 +38,10 @@ ) async def add_labware_offset( request_body: RequestModel[LabwareOffsetCreate], - run_orchestrator_store: MaintenanceRunOrchestratorStore = Depends( - get_maintenance_run_orchestrator_store - ), - run: MaintenanceRun = Depends(get_run_data_from_url), + run_orchestrator_store: Annotated[ + MaintenanceRunOrchestratorStore, Depends(get_maintenance_run_orchestrator_store) + ], + run: Annotated[MaintenanceRun, Depends(get_run_data_from_url)], ) -> PydanticResponse[SimpleBody[LabwareOffset]]: """Add a labware offset to a maintenance run. @@ -76,10 +77,10 @@ async def add_labware_offset( ) async def add_labware_definition( request_body: RequestModel[LabwareDefinition], - run_orchestrator_store: MaintenanceRunOrchestratorStore = Depends( - get_maintenance_run_orchestrator_store - ), - run: MaintenanceRun = Depends(get_run_data_from_url), + run_orchestrator_store: Annotated[ + MaintenanceRunOrchestratorStore, Depends(get_maintenance_run_orchestrator_store) + ], + run: Annotated[MaintenanceRun, Depends(get_run_data_from_url)], ) -> PydanticResponse[SimpleBody[LabwareDefinitionSummary]]: """Add a labware offset to a run. diff --git a/robot-server/robot_server/modules/module_data_mapper.py b/robot-server/robot_server/modules/module_data_mapper.py index 6b7ca918e09..52f95489216 100644 --- a/robot-server/robot_server/modules/module_data_mapper.py +++ b/robot-server/robot_server/modules/module_data_mapper.py @@ -1,5 +1,5 @@ """Module identification and response data mapping.""" -from typing import Type, cast, Optional +from typing import Annotated, Type, cast, Optional from fastapi import Depends from opentrons_shared_data.module import load_definition @@ -48,7 +48,7 @@ class ModuleDataMapper: """Map hardware control modules to module response.""" - def __init__(self, deck_type: DeckType = Depends(get_deck_type)) -> None: + def __init__(self, deck_type: Annotated[DeckType, Depends(get_deck_type)]) -> None: self.deck_type = deck_type def map_data( diff --git a/robot-server/robot_server/modules/router.py b/robot-server/robot_server/modules/router.py index 8155a88c4a6..1f630d9bdb6 100644 --- a/robot-server/robot_server/modules/router.py +++ b/robot-server/robot_server/modules/router.py @@ -1,6 +1,6 @@ """Modules routes.""" from fastapi import APIRouter, Depends, status -from typing import List, Dict +from typing import Annotated, List, Dict from opentrons.hardware_control import HardwareControlAPI from opentrons.hardware_control.modules import module_calibration @@ -34,10 +34,10 @@ }, ) async def get_attached_modules( - requested_version: int = Depends(get_requested_version), - hardware: HardwareControlAPI = Depends(get_hardware), - module_identifier: ModuleIdentifier = Depends(ModuleIdentifier), - module_data_mapper: ModuleDataMapper = Depends(ModuleDataMapper), + requested_version: Annotated[int, Depends(get_requested_version)], + hardware: Annotated[HardwareControlAPI, Depends(get_hardware)], + module_identifier: Annotated[ModuleIdentifier, Depends(ModuleIdentifier)], + module_data_mapper: Annotated[ModuleDataMapper, Depends(ModuleDataMapper)], ) -> PydanticResponse[SimpleMultiBody[AttachedModule]]: """Get a list of all attached modules.""" if requested_version <= 2: diff --git a/robot-server/robot_server/persistence/fastapi_dependencies.py b/robot-server/robot_server/persistence/fastapi_dependencies.py index d2bd3790965..465a5f09912 100644 --- a/robot-server/robot_server/persistence/fastapi_dependencies.py +++ b/robot-server/robot_server/persistence/fastapi_dependencies.py @@ -4,7 +4,7 @@ import asyncio import logging from pathlib import Path -from typing import Awaitable, Callable, Iterable, Optional +from typing import Annotated, Awaitable, Callable, Iterable, Optional from typing_extensions import Literal from sqlalchemy.engine import Engine as SQLEngine @@ -173,7 +173,7 @@ async def clean_up_persistence(app_state: AppState) -> None: async def get_sql_engine( - app_state: AppState = Depends(get_app_state), + app_state: Annotated[AppState, Depends(get_app_state)], ) -> SQLEngine: """Return the server's singleton SQLAlchemy Engine for accessing the database. @@ -207,7 +207,7 @@ async def get_sql_engine( async def get_active_persistence_directory( - app_state: AppState = Depends(get_app_state), + app_state: Annotated[AppState, Depends(get_app_state)], ) -> Path: """Return the path to the server's persistence directory. @@ -250,7 +250,7 @@ async def get_active_persistence_directory( async def get_active_persistence_directory_failsafe( - app_state: AppState = Depends(get_app_state), + app_state: Annotated[AppState, Depends(get_app_state)], ) -> Optional[Path]: """Return the path to the server's persistence directory. @@ -272,7 +272,7 @@ async def get_active_persistence_directory_failsafe( async def _get_persistence_directory_root( - app_state: AppState = Depends(get_app_state), + app_state: Annotated[AppState, Depends(get_app_state)], ) -> Path: """Return the root persistence directory. @@ -287,7 +287,7 @@ async def _get_persistence_directory_root( async def get_persistence_resetter( # We want to reset everything, not only the *active* persistence directory. - directory_to_reset: Path = Depends(_get_persistence_directory_root), + directory_to_reset: Annotated[Path, Depends(_get_persistence_directory_root)], ) -> PersistenceResetter: """Get a `PersistenceResetter` to reset the robot-server's stored data.""" return PersistenceResetter(directory_to_reset) diff --git a/robot-server/robot_server/protocols/dependencies.py b/robot-server/robot_server/protocols/dependencies.py index d3ce2bbe6b1..79c9f82fb1a 100644 --- a/robot-server/robot_server/protocols/dependencies.py +++ b/robot-server/robot_server/protocols/dependencies.py @@ -3,7 +3,7 @@ from asyncio import Lock as AsyncLock from pathlib import Path -from typing_extensions import Final +from typing import Annotated, Final import logging from anyio import Path as AsyncPath @@ -64,8 +64,8 @@ def get_file_hasher() -> FileHasher: async def get_protocol_directory( - app_state: AppState = Depends(get_app_state), - persistence_directory: Path = Depends(get_active_persistence_directory), + app_state: Annotated[AppState, Depends(get_app_state)], + persistence_directory: Annotated[Path, Depends(get_active_persistence_directory)], ) -> Path: """Get the directory to save protocol files, creating it if needed.""" async with _protocol_directory_init_lock: @@ -79,10 +79,10 @@ async def get_protocol_directory( async def get_protocol_store( - app_state: AppState = Depends(get_app_state), - sql_engine: SQLEngine = Depends(get_sql_engine), - protocol_directory: Path = Depends(get_protocol_directory), - protocol_reader: ProtocolReader = Depends(get_protocol_reader), + app_state: Annotated[AppState, Depends(get_app_state)], + sql_engine: Annotated[SQLEngine, Depends(get_sql_engine)], + protocol_directory: Annotated[Path, Depends(get_protocol_directory)], + protocol_reader: Annotated[ProtocolReader, Depends(get_protocol_reader)], ) -> ProtocolStore: """Get a singleton ProtocolStore to keep track of created protocols.""" async with _protocol_store_init_lock: @@ -99,8 +99,8 @@ async def get_protocol_store( async def get_analysis_store( - app_state: AppState = Depends(get_app_state), - sql_engine: SQLEngine = Depends(get_sql_engine), + app_state: Annotated[AppState, Depends(get_app_state)], + sql_engine: Annotated[SQLEngine, Depends(get_sql_engine)], ) -> AnalysisStore: """Get a singleton AnalysisStore to keep track of created analyses.""" analysis_store = _analysis_store_accessor.get_from(app_state) @@ -113,9 +113,9 @@ async def get_analysis_store( async def get_analyses_manager( - app_state: AppState = Depends(get_app_state), - analysis_store: AnalysisStore = Depends(get_analysis_store), - task_runner: TaskRunner = Depends(get_task_runner), + app_state: Annotated[AppState, Depends(get_app_state)], + analysis_store: Annotated[AnalysisStore, Depends(get_analysis_store)], + task_runner: Annotated[TaskRunner, Depends(get_task_runner)], ) -> AnalysesManager: """Get a singleton AnalysesManager to keep track of analyzers.""" analyses_manager = _analyses_manager_accessor.get_from(app_state) @@ -130,7 +130,7 @@ async def get_analyses_manager( async def get_protocol_auto_deleter( - protocol_store: ProtocolStore = Depends(get_protocol_store), + protocol_store: Annotated[ProtocolStore, Depends(get_protocol_store)], ) -> ProtocolAutoDeleter: """Get a `ProtocolAutoDeleter` to delete old protocols.""" return ProtocolAutoDeleter( @@ -143,7 +143,7 @@ async def get_protocol_auto_deleter( async def get_quick_transfer_protocol_auto_deleter( - protocol_store: ProtocolStore = Depends(get_protocol_store), + protocol_store: Annotated[ProtocolStore, Depends(get_protocol_store)], ) -> ProtocolAutoDeleter: """Get a `ProtocolAutoDeleter` to delete old quick transfer protocols.""" return ProtocolAutoDeleter( diff --git a/robot-server/robot_server/protocols/router.py b/robot-server/robot_server/protocols/router.py index 2ea216f9f29..563b8c21d6f 100644 --- a/robot-server/robot_server/protocols/router.py +++ b/robot-server/robot_server/protocols/router.py @@ -5,7 +5,7 @@ from textwrap import dedent from datetime import datetime from pathlib import Path -from typing import List, Optional, Union, Tuple +from typing import Annotated, List, Literal, Optional, Union, Tuple from opentrons.protocol_engine.types import ( PrimitiveRunTimeParamValuesType, @@ -13,7 +13,6 @@ ) from opentrons_shared_data.robot import user_facing_robot_type from opentrons.util.performance_helpers import TrackingFunctions -from typing_extensions import Literal from fastapi import ( APIRouter, @@ -204,62 +203,70 @@ class ProtocolLinks(BaseModel): }, ) async def create_protocol( # noqa: C901 + protocol_directory: Annotated[Path, Depends(get_protocol_directory)], + protocol_store: Annotated[ProtocolStore, Depends(get_protocol_store)], + analysis_store: Annotated[AnalysisStore, Depends(get_analysis_store)], + file_reader_writer: Annotated[FileReaderWriter, Depends(get_file_reader_writer)], + protocol_reader: Annotated[ProtocolReader, Depends(get_protocol_reader)], + file_hasher: Annotated[FileHasher, Depends(get_file_hasher)], + analyses_manager: Annotated[AnalysesManager, Depends(get_analyses_manager)], + protocol_auto_deleter: Annotated[ + ProtocolAutoDeleter, Depends(get_protocol_auto_deleter) + ], + data_files_directory: Annotated[Path, Depends(get_data_files_directory)], + data_files_store: Annotated[DataFilesStore, Depends(get_data_files_store)], + quick_transfer_protocol_auto_deleter: Annotated[ + ProtocolAutoDeleter, Depends(get_quick_transfer_protocol_auto_deleter) + ], + robot_type: Annotated[RobotType, Depends(get_robot_type)], + protocol_id: Annotated[str, Depends(get_unique_id, use_cache=False)], + analysis_id: Annotated[str, Depends(get_unique_id, use_cache=False)], + created_at: Annotated[datetime, Depends(get_current_time)], + maximum_quick_transfer_protocols: Annotated[ + int, Depends(get_maximum_quick_transfer_protocols) + ], files: List[UploadFile] = File(...), # use Form because request is multipart/form-data # https://fastapi.tiangolo.com/tutorial/request-forms-and-files/ - key: Optional[str] = Form( - default=None, - description=( - "An arbitrary client-defined string to attach to the new protocol resource." - " This should be no longer than ~100 characters or so." - " It's intended to store something like a UUID, to help clients that store" - " protocols locally keep track of which local files correspond to which" - " protocol resources on the robot." + key: Annotated[ + Optional[str], + Form( + description=( + "An arbitrary client-defined string to attach to the new protocol resource." + " This should be no longer than ~100 characters or so." + " It's intended to store something like a UUID, to help clients that store" + " protocols locally keep track of which local files correspond to which" + " protocol resources on the robot." + ), ), - ), - run_time_parameter_values: Optional[str] = Form( - default=None, - description="Key-value pairs of run-time parameters defined in a protocol." - " Note that this is expected to be a string holding a JSON object." - " Also, if this data is included in the request, the server will" - " always trigger an analysis (for now).", - alias="runTimeParameterValues", - ), - protocol_kind: ProtocolKind = Form( - # This default needs to be kept in sync with the function body. - # See todo comments. - default=ProtocolKind.STANDARD, - description=( - "Whether this is a `standard` protocol or a `quick-transfer` protocol." - "if omitted, the protocol will be `standard` by default." + ] = None, + run_time_parameter_values: Annotated[ + Optional[str], + Form( + description="Key-value pairs of run-time parameters defined in a protocol." + " Note that this is expected to be a string holding a JSON object." + " Also, if this data is included in the request, the server will" + " always trigger an analysis (for now).", + alias="runTimeParameterValues", ), - alias="protocolKind", - ), - run_time_parameter_files: Optional[str] = Form( - default=None, - description="Param-file pairs of CSV run-time parameters defined in the protocol.", - alias="runTimeParameterFiles", - ), - protocol_directory: Path = Depends(get_protocol_directory), - protocol_store: ProtocolStore = Depends(get_protocol_store), - analysis_store: AnalysisStore = Depends(get_analysis_store), - file_reader_writer: FileReaderWriter = Depends(get_file_reader_writer), - protocol_reader: ProtocolReader = Depends(get_protocol_reader), - file_hasher: FileHasher = Depends(get_file_hasher), - analyses_manager: AnalysesManager = Depends(get_analyses_manager), - protocol_auto_deleter: ProtocolAutoDeleter = Depends(get_protocol_auto_deleter), - quick_transfer_protocol_auto_deleter: ProtocolAutoDeleter = Depends( - get_quick_transfer_protocol_auto_deleter - ), - data_files_directory: Path = Depends(get_data_files_directory), - data_files_store: DataFilesStore = Depends(get_data_files_store), - robot_type: RobotType = Depends(get_robot_type), - protocol_id: str = Depends(get_unique_id, use_cache=False), - analysis_id: str = Depends(get_unique_id, use_cache=False), - created_at: datetime = Depends(get_current_time), - maximum_quick_transfer_protocols: int = Depends( - get_maximum_quick_transfer_protocols - ), + ] = None, + protocol_kind: Annotated[ + ProtocolKind, + Form( + description=( + "Whether this is a `standard` protocol or a `quick-transfer` protocol." + "if omitted, the protocol will be `standard` by default." + ), + alias="protocolKind", + ), + ] = ProtocolKind.STANDARD, + run_time_parameter_files: Annotated[ + Optional[str], + Form( + description="Param-file pairs of CSV run-time parameters defined in the protocol.", + alias="runTimeParameterFiles", + ), + ] = None, ) -> PydanticResponse[SimpleBody[Protocol]]: """Create a new protocol by uploading its files. @@ -289,12 +296,8 @@ async def create_protocol( # noqa: C901 created_at: Timestamp to attach to the new resource. maximum_quick_transfer_protocols: Robot setting value limiting stored quick transfers protocols. """ - # We have to do these isinstance checks because if `runTimeParameterValues` or - # `protocolKind` are not specified in the request, then they get assigned a - # Form(default) value instead of just the default value. \(O.o)/ # TODO: check if we can make our own "RTP multipart-form field" Pydantic type # so we can validate the data contents and return a better error response. - # TODO: check if this is still necessary after converting FastAPI args to Annotated. parsed_rtp_values = ( json.loads(run_time_parameter_values) if isinstance(run_time_parameter_values, str) @@ -305,8 +308,6 @@ async def create_protocol( # noqa: C901 if isinstance(run_time_parameter_files, str) else {} ) - if not isinstance(protocol_kind, ProtocolKind): - protocol_kind = ProtocolKind.STANDARD if protocol_kind == ProtocolKind.QUICK_TRANSFER: quick_transfer_protocols = [ @@ -522,17 +523,19 @@ async def _start_new_analysis_if_necessary( responses={status.HTTP_200_OK: {"model": SimpleMultiBody[Protocol]}}, ) async def get_protocols( - protocol_kind: Optional[ProtocolKind] = Query( - None, - description=( - "Specify the kind of protocols you want to return." - " protocol kind can be `quick-transfer` or `standard` " - " If this is omitted or `null`, all protocols will be returned." + protocol_store: Annotated[ProtocolStore, Depends(get_protocol_store)], + analysis_store: Annotated[AnalysisStore, Depends(get_analysis_store)], + protocol_kind: Annotated[ + Optional[ProtocolKind], + Query( + description=( + "Specify the kind of protocols you want to return." + " protocol kind can be `quick-transfer` or `standard` " + " If this is omitted or `null`, all protocols will be returned." + ), + alias="protocolKind", ), - alias="protocolKind", - ), - protocol_store: ProtocolStore = Depends(get_protocol_store), - analysis_store: AnalysisStore = Depends(get_analysis_store), + ] = None, ) -> PydanticResponse[SimpleMultiBody[Protocol]]: """Get a list of all currently uploaded protocols. @@ -579,7 +582,7 @@ async def get_protocols( responses={status.HTTP_200_OK: {"model": SimpleMultiBody[str]}}, ) async def get_protocol_ids( - protocol_store: ProtocolStore = Depends(get_protocol_store), + protocol_store: Annotated[ProtocolStore, Depends(get_protocol_store)], ) -> PydanticResponse[SimpleMultiBody[str]]: """Get a list of all protocol ids stored on the server. @@ -606,8 +609,8 @@ async def get_protocol_ids( ) async def get_protocol_by_id( protocolId: str, - protocol_store: ProtocolStore = Depends(get_protocol_store), - analysis_store: AnalysisStore = Depends(get_analysis_store), + protocol_store: Annotated[ProtocolStore, Depends(get_protocol_store)], + analysis_store: Annotated[AnalysisStore, Depends(get_analysis_store)], ) -> PydanticResponse[Body[Protocol, ProtocolLinks]]: """Get an uploaded protocol by ID. @@ -666,7 +669,7 @@ async def get_protocol_by_id( ) async def delete_protocol_by_id( protocolId: str, - protocol_store: ProtocolStore = Depends(get_protocol_store), + protocol_store: Annotated[ProtocolStore, Depends(get_protocol_store)], ) -> PydanticResponse[SimpleEmptyBody]: """Delete an uploaded protocol by ID. @@ -709,13 +712,13 @@ async def delete_protocol_by_id( ) async def create_protocol_analysis( protocolId: str, + protocol_store: Annotated[ProtocolStore, Depends(get_protocol_store)], + analysis_store: Annotated[AnalysisStore, Depends(get_analysis_store)], + analyses_manager: Annotated[AnalysesManager, Depends(get_analyses_manager)], + data_files_directory: Annotated[Path, Depends(get_data_files_directory)], + data_files_store: Annotated[DataFilesStore, Depends(get_data_files_store)], + analysis_id: Annotated[str, Depends(get_unique_id, use_cache=False)], request_body: Optional[RequestModel[AnalysisRequest]] = None, - protocol_store: ProtocolStore = Depends(get_protocol_store), - analysis_store: AnalysisStore = Depends(get_analysis_store), - analyses_manager: AnalysesManager = Depends(get_analyses_manager), - analysis_id: str = Depends(get_unique_id, use_cache=False), - data_files_directory: Path = Depends(get_data_files_directory), - data_files_store: DataFilesStore = Depends(get_data_files_store), ) -> PydanticResponse[SimpleMultiBody[AnalysisSummary]]: """Start a new analysis for the given existing protocol. @@ -785,8 +788,8 @@ async def create_protocol_analysis( ) async def get_protocol_analyses( protocolId: str, - protocol_store: ProtocolStore = Depends(get_protocol_store), - analysis_store: AnalysisStore = Depends(get_analysis_store), + protocol_store: Annotated[ProtocolStore, Depends(get_protocol_store)], + analysis_store: Annotated[AnalysisStore, Depends(get_analysis_store)], ) -> PydanticResponse[SimpleMultiBody[ProtocolAnalysis]]: """Get a protocol's full analyses list. @@ -826,8 +829,8 @@ async def get_protocol_analyses( async def get_protocol_analysis_by_id( protocolId: str, analysisId: str, - protocol_store: ProtocolStore = Depends(get_protocol_store), - analysis_store: AnalysisStore = Depends(get_analysis_store), + protocol_store: Annotated[ProtocolStore, Depends(get_protocol_store)], + analysis_store: Annotated[AnalysisStore, Depends(get_analysis_store)], ) -> PydanticResponse[SimpleBody[ProtocolAnalysis]]: """Get a protocol analysis by analysis ID. @@ -881,8 +884,8 @@ async def get_protocol_analysis_by_id( async def get_protocol_analysis_as_document( protocolId: str, analysisId: str, - protocol_store: ProtocolStore = Depends(get_protocol_store), - analysis_store: AnalysisStore = Depends(get_analysis_store), + protocol_store: Annotated[ProtocolStore, Depends(get_protocol_store)], + analysis_store: Annotated[AnalysisStore, Depends(get_analysis_store)], ) -> PlainTextResponse: """Get a protocol analysis by analysis ID. @@ -924,7 +927,7 @@ async def get_protocol_analysis_as_document( ) async def get_protocol_data_files( protocolId: str, - protocol_store: ProtocolStore = Depends(get_protocol_store), + protocol_store: Annotated[ProtocolStore, Depends(get_protocol_store)], ) -> PydanticResponse[SimpleMultiBody[DataFile]]: """Get the list of all data files associated with a protocol. diff --git a/robot-server/robot_server/robot/control/dependencies.py b/robot-server/robot_server/robot/control/dependencies.py index ecd7becfb87..ba148cc4a55 100644 --- a/robot-server/robot_server/robot/control/dependencies.py +++ b/robot-server/robot_server/robot/control/dependencies.py @@ -1,4 +1,6 @@ """Dependencies related to /robot/control endpoints.""" +from typing import Annotated + from fastapi import status, Depends from opentrons.hardware_control import ThreadManagedHardware @@ -18,7 +20,7 @@ async def require_estop_in_good_state( - thread_manager: ThreadManagedHardware = Depends(get_thread_manager), + thread_manager: Annotated[ThreadManagedHardware, Depends(get_thread_manager)], ) -> bool: """Check that the estop is in a good state. diff --git a/robot-server/robot_server/robot/control/router.py b/robot-server/robot_server/robot/control/router.py index 8bada478caf..35910748115 100644 --- a/robot-server/robot_server/robot/control/router.py +++ b/robot-server/robot_server/robot/control/router.py @@ -1,6 +1,6 @@ """Router for /robot/control endpoints.""" from fastapi import APIRouter, status, Depends -from typing import TYPE_CHECKING +from typing import Annotated, TYPE_CHECKING from opentrons_shared_data.robot.types import RobotType from opentrons_shared_data.robot.types import RobotTypeEnum @@ -48,7 +48,7 @@ async def _get_estop_status_response( }, ) async def get_estop_status( - estop_handler: EstopHandler = Depends(get_estop_handler), + estop_handler: Annotated[EstopHandler, Depends(get_estop_handler)], ) -> PydanticResponse[SimpleBody[EstopStatusModel]]: """Return the current status of the estop.""" return await _get_estop_status_response(estop_handler) @@ -66,14 +66,16 @@ async def get_estop_status( }, ) async def put_acknowledge_estop_disengage( - estop_handler: EstopHandler = Depends(get_estop_handler), + estop_handler: Annotated[EstopHandler, Depends(get_estop_handler)], ) -> PydanticResponse[SimpleBody[EstopStatusModel]]: """Transition from the `logically_engaged` status if applicable.""" estop_handler.acknowledge_and_clear() return await _get_estop_status_response(estop_handler) -def get_door_switch_required(robot_type: RobotType = Depends(get_robot_type)) -> bool: +def get_door_switch_required( + robot_type: Annotated[RobotType, Depends(get_robot_type)] +) -> bool: return ff.enable_door_safety_switch(RobotTypeEnum.robot_literal_to_enum(robot_type)) @@ -85,8 +87,8 @@ def get_door_switch_required(robot_type: RobotType = Depends(get_robot_type)) -> responses={status.HTTP_200_OK: {"model": SimpleBody[DoorStatusModel]}}, ) async def get_door_status( - hardware: HardwareControlAPI = Depends(get_hardware), - door_required: bool = Depends(get_door_switch_required), + hardware: Annotated[HardwareControlAPI, Depends(get_hardware)], + door_required: Annotated[bool, Depends(get_door_switch_required)], ) -> PydanticResponse[SimpleBody[DoorStatusModel]]: return await PydanticResponse.create( content=SimpleBody.construct( diff --git a/robot-server/robot_server/runs/dependencies.py b/robot-server/robot_server/runs/dependencies.py index c3a990d38c8..146297715a9 100644 --- a/robot-server/robot_server/runs/dependencies.py +++ b/robot-server/robot_server/runs/dependencies.py @@ -1,4 +1,6 @@ """Run router dependency-injection wire-up.""" +from typing import Annotated + from fastapi import Depends, status from robot_server.protocols.dependencies import get_protocol_store from robot_server.protocols.protocol_models import ProtocolKind @@ -46,8 +48,8 @@ async def get_run_store( - app_state: AppState = Depends(get_app_state), - sql_engine: SQLEngine = Depends(get_sql_engine), + app_state: Annotated[AppState, Depends(get_app_state)], + sql_engine: Annotated[SQLEngine, Depends(get_sql_engine)], ) -> RunStore: """Get a singleton RunStore to keep track of created runs.""" run_store = _run_store_accessor.get_from(app_state) @@ -99,7 +101,7 @@ async def mark_light_control_startup_finished( async def get_light_controller( - app_state: AppState = Depends(get_app_state), + app_state: Annotated[AppState, Depends(get_app_state)], ) -> LightController: """Get the light controller as a dependency. @@ -112,11 +114,11 @@ async def get_light_controller( async def get_run_orchestrator_store( - app_state: AppState = Depends(get_app_state), - hardware_api: HardwareControlAPI = Depends(get_hardware), - robot_type: RobotType = Depends(get_robot_type), - deck_type: DeckType = Depends(get_deck_type), - light_controller: LightController = Depends(get_light_controller), + app_state: Annotated[AppState, Depends(get_app_state)], + hardware_api: Annotated[HardwareControlAPI, Depends(get_hardware)], + robot_type: Annotated[RobotType, Depends(get_robot_type)], + deck_type: Annotated[DeckType, Depends(get_deck_type)], + light_controller: Annotated[LightController, Depends(get_light_controller)], ) -> RunOrchestratorStore: """Get a singleton EngineStore to keep track of created engines / runners.""" run_orchestrator_store = _run_orchestrator_store_accessor.get_from(app_state) @@ -135,7 +137,9 @@ async def get_run_orchestrator_store( async def get_is_okay_to_create_maintenance_run( - run_orchestrator_store: RunOrchestratorStore = Depends(get_run_orchestrator_store), + run_orchestrator_store: Annotated[ + RunOrchestratorStore, Depends(get_run_orchestrator_store) + ], ) -> bool: """Whether a maintenance run can be created if a protocol run already exists.""" try: @@ -146,10 +150,12 @@ async def get_is_okay_to_create_maintenance_run( async def get_run_data_manager( - task_runner: TaskRunner = Depends(get_task_runner), - run_orchestrator_store: RunOrchestratorStore = Depends(get_run_orchestrator_store), - run_store: RunStore = Depends(get_run_store), - runs_publisher: RunsPublisher = Depends(get_runs_publisher), + task_runner: Annotated[TaskRunner, Depends(get_task_runner)], + run_orchestrator_store: Annotated[ + RunOrchestratorStore, Depends(get_run_orchestrator_store) + ], + run_store: Annotated[RunStore, Depends(get_run_store)], + runs_publisher: Annotated[RunsPublisher, Depends(get_runs_publisher)], ) -> RunDataManager: """Get a run data manager to keep track of current/historical run data.""" return RunDataManager( @@ -161,8 +167,8 @@ async def get_run_data_manager( async def get_run_auto_deleter( - run_store: RunStore = Depends(get_run_store), - protocol_store: ProtocolStore = Depends(get_protocol_store), + run_store: Annotated[RunStore, Depends(get_run_store)], + protocol_store: Annotated[ProtocolStore, Depends(get_protocol_store)], ) -> RunAutoDeleter: """Get an `AutoDeleter` to delete old runs.""" return RunAutoDeleter( @@ -174,8 +180,8 @@ async def get_run_auto_deleter( async def get_quick_transfer_run_auto_deleter( - run_store: RunStore = Depends(get_run_store), - protocol_store: ProtocolStore = Depends(get_protocol_store), + run_store: Annotated[RunStore, Depends(get_run_store)], + protocol_store: Annotated[ProtocolStore, Depends(get_protocol_store)], ) -> RunAutoDeleter: """Get an `AutoDeleter` to delete old runs for quick transfer prorotocols.""" return RunAutoDeleter( diff --git a/robot-server/robot_server/runs/router/actions_router.py b/robot-server/robot_server/runs/router/actions_router.py index 3323562fa22..6ceb6eadef6 100644 --- a/robot-server/robot_server/runs/router/actions_router.py +++ b/robot-server/robot_server/runs/router/actions_router.py @@ -3,8 +3,7 @@ from fastapi import APIRouter, Depends, status from datetime import datetime -from typing import Union -from typing_extensions import Literal +from typing import Annotated, Literal, Union from robot_server.errors.error_responses import ErrorDetails, ErrorBody from robot_server.service.dependencies import get_current_time, get_unique_id @@ -45,10 +44,12 @@ class RunActionNotAllowed(ErrorDetails): async def get_run_controller( runId: str, - task_runner: TaskRunner = Depends(get_task_runner), - run_orchestrator_store: RunOrchestratorStore = Depends(get_run_orchestrator_store), - run_store: RunStore = Depends(get_run_store), - runs_publisher: RunsPublisher = Depends(get_runs_publisher), + task_runner: Annotated[TaskRunner, Depends(get_task_runner)], + run_orchestrator_store: Annotated[ + RunOrchestratorStore, Depends(get_run_orchestrator_store) + ], + run_store: Annotated[RunStore, Depends(get_run_store)], + runs_publisher: Annotated[RunsPublisher, Depends(get_runs_publisher)], ) -> RunController: """Get a RunController for the current run. @@ -92,16 +93,16 @@ async def get_run_controller( async def create_run_action( runId: str, request_body: RequestModel[RunActionCreate], - run_controller: RunController = Depends(get_run_controller), - action_id: str = Depends(get_unique_id), - created_at: datetime = Depends(get_current_time), - maintenance_run_orchestrator_store: MaintenanceRunOrchestratorStore = Depends( - get_maintenance_run_orchestrator_store - ), - deck_configuration_store: DeckConfigurationStore = Depends( - get_deck_configuration_store - ), - check_estop: bool = Depends(require_estop_in_good_state), + run_controller: Annotated[RunController, Depends(get_run_controller)], + action_id: Annotated[str, Depends(get_unique_id)], + created_at: Annotated[datetime, Depends(get_current_time)], + maintenance_run_orchestrator_store: Annotated[ + MaintenanceRunOrchestratorStore, Depends(get_maintenance_run_orchestrator_store) + ], + deck_configuration_store: Annotated[ + DeckConfigurationStore, Depends(get_deck_configuration_store) + ], + check_estop: Annotated[bool, Depends(require_estop_in_good_state)], ) -> PydanticResponse[SimpleBody[RunAction]]: """Create a run control action. diff --git a/robot-server/robot_server/runs/router/base_router.py b/robot-server/robot_server/runs/router/base_router.py index 7000882b965..fa4f1947a4f 100644 --- a/robot-server/robot_server/runs/router/base_router.py +++ b/robot-server/robot_server/runs/router/base_router.py @@ -6,8 +6,7 @@ from datetime import datetime from pathlib import Path from textwrap import dedent -from typing import Optional, Union, Callable -from typing_extensions import Literal, Final +from typing import Annotated, Callable, Final, Literal, Optional, Union from fastapi import APIRouter, Depends, status, Query from pydantic import BaseModel, Field @@ -118,7 +117,7 @@ class AllRunsLinks(BaseModel): async def get_run_data_from_url( runId: str, - run_data_manager: RunDataManager = Depends(get_run_data_manager), + run_data_manager: Annotated[RunDataManager, Depends(get_run_data_manager)], ) -> Union[Run, BadRun]: """Get the data of a run. @@ -155,22 +154,22 @@ async def get_run_data_from_url( }, ) async def create_run( # noqa: C901 + run_data_manager: Annotated[RunDataManager, Depends(get_run_data_manager)], + protocol_store: Annotated[ProtocolStore, Depends(get_protocol_store)], + run_id: Annotated[str, Depends(get_unique_id)], + created_at: Annotated[datetime, Depends(get_current_time)], + run_auto_deleter: Annotated[RunAutoDeleter, Depends(get_run_auto_deleter)], + data_files_directory: Annotated[Path, Depends(get_data_files_directory)], + data_files_store: Annotated[DataFilesStore, Depends(get_data_files_store)], + quick_transfer_run_auto_deleter: Annotated[ + RunAutoDeleter, Depends(get_quick_transfer_run_auto_deleter) + ], + check_estop: Annotated[bool, Depends(require_estop_in_good_state)], + deck_configuration_store: Annotated[ + DeckConfigurationStore, Depends(get_deck_configuration_store) + ], + notify_publishers: Annotated[Callable[[], None], Depends(get_pe_notify_publishers)], request_body: Optional[RequestModel[RunCreate]] = None, - run_data_manager: RunDataManager = Depends(get_run_data_manager), - protocol_store: ProtocolStore = Depends(get_protocol_store), - run_id: str = Depends(get_unique_id), - created_at: datetime = Depends(get_current_time), - run_auto_deleter: RunAutoDeleter = Depends(get_run_auto_deleter), - quick_transfer_run_auto_deleter: RunAutoDeleter = Depends( - get_quick_transfer_run_auto_deleter - ), - data_files_directory: Path = Depends(get_data_files_directory), - data_files_store: DataFilesStore = Depends(get_data_files_store), - check_estop: bool = Depends(require_estop_in_good_state), - deck_configuration_store: DeckConfigurationStore = Depends( - get_deck_configuration_store - ), - notify_publishers: Callable[[], None] = Depends(get_pe_notify_publishers), ) -> PydanticResponse[SimpleBody[Union[Run, BadRun]]]: """Create a new run. @@ -270,16 +269,18 @@ async def create_run( # noqa: C901 }, ) async def get_runs( - pageLength: Optional[int] = Query( - None, - description=( - "The maximum number of runs to return." - " If this is less than the total number of runs," - " the most-recently created runs will be returned." - " If this is omitted or `null`, all runs will be returned." + run_data_manager: Annotated[RunDataManager, Depends(get_run_data_manager)], + pageLength: Annotated[ + Optional[int], + Query( + description=( + "The maximum number of runs to return." + " If this is less than the total number of runs," + " the most-recently created runs will be returned." + " If this is omitted or `null`, all runs will be returned." + ), ), - ), - run_data_manager: RunDataManager = Depends(get_run_data_manager), + ] = None, ) -> PydanticResponse[MultiBody[Union[Run, BadRun], AllRunsLinks]]: """Get all runs, in order from least-recently to most-recently created. @@ -313,7 +314,7 @@ async def get_runs( }, ) async def get_run( - run_data: Run = Depends(get_run_data_from_url), + run_data: Annotated[Run, Depends(get_run_data_from_url)], ) -> PydanticResponse[SimpleBody[Union[Run, BadRun]]]: """Get a run by its ID. @@ -338,7 +339,7 @@ async def get_run( ) async def remove_run( runId: str, - run_data_manager: RunDataManager = Depends(get_run_data_manager), + run_data_manager: Annotated[RunDataManager, Depends(get_run_data_manager)], ) -> PydanticResponse[SimpleEmptyBody]: """Delete a run by its ID. @@ -375,7 +376,7 @@ async def remove_run( async def update_run( runId: str, request_body: RequestModel[RunUpdate], - run_data_manager: RunDataManager = Depends(get_run_data_manager), + run_data_manager: Annotated[RunDataManager, Depends(get_run_data_manager)], ) -> PydanticResponse[SimpleBody[Union[Run, BadRun]]]: """Update a run by its ID. @@ -420,7 +421,7 @@ async def update_run( async def put_error_recovery_policy( runId: str, request_body: RequestModel[ErrorRecoveryPolicy], - run_data_manager: RunDataManager = Depends(get_run_data_manager), + run_data_manager: Annotated[RunDataManager, Depends(get_run_data_manager)], ) -> PydanticResponse[SimpleEmptyBody]: """Create run polices. @@ -461,20 +462,24 @@ async def put_error_recovery_policy( }, ) async def get_run_commands_error( + run_data_manager: Annotated[RunDataManager, Depends(get_run_data_manager)], runId: str, - cursor: Optional[int] = Query( - None, - description=( - "The starting index of the desired first command error in the list." - " If unspecified, a cursor will be selected automatically" - " based on the last error added." + pageLength: Annotated[ + int, + Query( + description="The maximum number of command errors in the list to return.", ), - ), - pageLength: int = Query( - _DEFAULT_COMMAND_ERROR_LIST_LENGTH, - description="The maximum number of command errors in the list to return.", - ), - run_data_manager: RunDataManager = Depends(get_run_data_manager), + ] = _DEFAULT_COMMAND_ERROR_LIST_LENGTH, + cursor: Annotated[ + Optional[int], + Query( + description=( + "The starting index of the desired first command error in the list." + " If unspecified, a cursor will be selected automatically" + " based on the last error added." + ), + ), + ] = None, ) -> PydanticResponse[SimpleMultiBody[pe_errors.ErrorOccurrence]]: """Get a summary of a set of command errors in a run. diff --git a/robot-server/robot_server/runs/router/commands_router.py b/robot-server/robot_server/runs/router/commands_router.py index fdabab6778f..56ff466bf84 100644 --- a/robot-server/robot_server/runs/router/commands_router.py +++ b/robot-server/robot_server/runs/router/commands_router.py @@ -1,7 +1,6 @@ """Router for /runs commands endpoints.""" import textwrap -from typing import Optional, Union -from typing_extensions import Final, Literal +from typing import Annotated, Final, Literal, Optional, Union from fastapi import APIRouter, Depends, Query, status @@ -83,8 +82,10 @@ class PreSerializedCommandsNotAvailable(ErrorDetails): async def get_current_run_from_url( runId: str, - run_orchestrator_store: RunOrchestratorStore = Depends(get_run_orchestrator_store), - run_store: RunStore = Depends(get_run_store), + run_orchestrator_store: Annotated[ + RunOrchestratorStore, Depends(get_run_orchestrator_store) + ], + run_store: Annotated[RunStore, Depends(get_run_store)], ) -> str: """Get run from url. @@ -155,43 +156,51 @@ async def get_current_run_from_url( ) async def create_run_command( request_body: RequestModelWithCommandCreate, - waitUntilComplete: bool = Query( - default=False, - description=( - "If `false`, return immediately, while the new command is still queued." - " If `true`, only return once the new command succeeds or fails," - " or when the timeout is reached. See the `timeout` query parameter." + run_orchestrator_store: Annotated[ + RunOrchestratorStore, Depends(get_run_orchestrator_store) + ], + check_estop: Annotated[bool, Depends(require_estop_in_good_state)], + run_id: Annotated[str, Depends(get_current_run_from_url)], + waitUntilComplete: Annotated[ + bool, + Query( + description=( + "If `false`, return immediately, while the new command is still queued." + " If `true`, only return once the new command succeeds or fails," + " or when the timeout is reached. See the `timeout` query parameter." + ), ), - ), - timeout: Optional[int] = Query( - default=None, - gt=0, - description=( - "If `waitUntilComplete` is `true`," - " the maximum time in milliseconds to wait before returning." - " The default is infinite." - "\n\n" - "The timer starts as soon as you enqueue the new command with this request," - " *not* when the new command starts running. So if there are other commands" - " in the queue before the new one, they will also count towards the" - " timeout." - "\n\n" - "If the timeout elapses before the command succeeds or fails," - " the command will be returned with its current status." - "\n\n" - "Compatibility note: on robot software v6.2.0 and older," - " the default was 30 seconds, not infinite." + ] = False, + timeout: Annotated[ + Optional[int], + Query( + gt=0, + description=( + "If `waitUntilComplete` is `true`," + " the maximum time in milliseconds to wait before returning." + " The default is infinite." + "\n\n" + "The timer starts as soon as you enqueue the new command with this request," + " *not* when the new command starts running. So if there are other commands" + " in the queue before the new one, they will also count towards the" + " timeout." + "\n\n" + "If the timeout elapses before the command succeeds or fails," + " the command will be returned with its current status." + "\n\n" + "Compatibility note: on robot software v6.2.0 and older," + " the default was 30 seconds, not infinite." + ), ), - ), - failedCommandId: Optional[str] = Query( - default=None, - description=( - "FIXIT command use only. Reference of the failed command id we are trying to fix." + ] = None, + failedCommandId: Annotated[ + Optional[str], + Query( + description=( + "FIXIT command use only. Reference of the failed command id we are trying to fix." + ), ), - ), - run_orchestrator_store: RunOrchestratorStore = Depends(get_run_orchestrator_store), - check_estop: bool = Depends(require_estop_in_good_state), - run_id: str = Depends(get_current_run_from_url), + ] = None, ) -> PydanticResponse[SimpleBody[pe_commands.Command]]: """Enqueue a protocol command. @@ -259,19 +268,23 @@ async def create_run_command( ) async def get_run_commands( runId: str, - cursor: Optional[int] = Query( - None, - description=( - "The starting index of the desired first command in the list." - " If unspecified, a cursor will be selected automatically" - " based on the currently running or most recently executed command." + run_data_manager: Annotated[RunDataManager, Depends(get_run_data_manager)], + cursor: Annotated[ + Optional[int], + Query( + description=( + "The starting index of the desired first command in the list." + " If unspecified, a cursor will be selected automatically" + " based on the currently running or most recently executed command." + ), ), - ), - pageLength: int = Query( - _DEFAULT_COMMAND_LIST_LENGTH, - description="The maximum number of commands in the list to return.", - ), - run_data_manager: RunDataManager = Depends(get_run_data_manager), + ] = None, + pageLength: Annotated[ + int, + Query( + description="The maximum number of commands in the list to return.", + ), + ] = _DEFAULT_COMMAND_LIST_LENGTH, ) -> PydanticResponse[MultiBody[RunCommandSummary, CommandCollectionLinks]]: """Get a summary of a set of commands in a run. @@ -354,7 +367,7 @@ async def get_run_commands( ) async def get_run_commands_as_pre_serialized_list( runId: str, - run_data_manager: RunDataManager = Depends(get_run_data_manager), + run_data_manager: Annotated[RunDataManager, Depends(get_run_data_manager)], ) -> PydanticResponse[SimpleMultiBody[str]]: """Get all commands of a completed run as a list of pre-serialized (string encoded) commands. @@ -395,7 +408,7 @@ async def get_run_commands_as_pre_serialized_list( async def get_run_command( runId: str, commandId: str, - run_data_manager: RunDataManager = Depends(get_run_data_manager), + run_data_manager: Annotated[RunDataManager, Depends(get_run_data_manager)], ) -> PydanticResponse[SimpleBody[pe_commands.Command]]: """Get a specific command from a run. diff --git a/robot-server/robot_server/runs/router/labware_router.py b/robot-server/robot_server/runs/router/labware_router.py index bb03fa6f0bb..7eba96afa0e 100644 --- a/robot-server/robot_server/runs/router/labware_router.py +++ b/robot-server/robot_server/runs/router/labware_router.py @@ -1,6 +1,6 @@ """Router for /runs endpoints dealing with labware offsets and definitions.""" import logging -from typing import Union +from typing import Annotated, Union from fastapi import APIRouter, Depends, status @@ -49,8 +49,10 @@ ) async def add_labware_offset( request_body: RequestModel[LabwareOffsetCreate], - run_orchestrator_store: RunOrchestratorStore = Depends(get_run_orchestrator_store), - run: Run = Depends(get_run_data_from_url), + run_orchestrator_store: Annotated[ + RunOrchestratorStore, Depends(get_run_orchestrator_store) + ], + run: Annotated[Run, Depends(get_run_data_from_url)], ) -> PydanticResponse[SimpleBody[LabwareOffset]]: """Add a labware offset to a run. @@ -91,8 +93,10 @@ async def add_labware_offset( ) async def add_labware_definition( request_body: RequestModel[LabwareDefinition], - run_orchestrator_store: RunOrchestratorStore = Depends(get_run_orchestrator_store), - run: Run = Depends(get_run_data_from_url), + run_orchestrator_store: Annotated[ + RunOrchestratorStore, Depends(get_run_orchestrator_store) + ], + run: Annotated[Run, Depends(get_run_data_from_url)], ) -> PydanticResponse[SimpleBody[LabwareDefinitionSummary]]: """Add a labware offset to a run. @@ -136,7 +140,7 @@ async def add_labware_definition( ) async def get_run_loaded_labware_definitions( runId: str, - run_data_manager: RunDataManager = Depends(get_run_data_manager), + run_data_manager: Annotated[RunDataManager, Depends(get_run_data_manager)], ) -> PydanticResponse[SimpleBody[ResponseList[SD_LabwareDefinition]]]: """Get a run's loaded labware definition by the run ID. diff --git a/robot-server/robot_server/service/dependencies.py b/robot-server/robot_server/service/dependencies.py index 0cfb9736b5d..b27e014a3e9 100644 --- a/robot-server/robot_server/service/dependencies.py +++ b/robot-server/robot_server/service/dependencies.py @@ -1,3 +1,5 @@ +from typing import Annotated + from datetime import datetime, timezone from uuid import uuid4 from fastapi import Depends @@ -21,8 +23,8 @@ async def get_motion_lock() -> ThreadedAsyncLock: @call_once async def get_session_manager( - hardware_api: HardwareControlAPI = Depends(get_hardware), - motion_lock: ThreadedAsyncLock = Depends(get_motion_lock), + hardware_api: Annotated[HardwareControlAPI, Depends(get_hardware)], + motion_lock: Annotated[ThreadedAsyncLock, Depends(get_motion_lock)], ) -> SessionManager: """The single session manager instance""" return SessionManager( diff --git a/robot-server/robot_server/service/labware/router.py b/robot-server/robot_server/service/labware/router.py index 930a6c91360..a90ed1ca867 100644 --- a/robot-server/robot_server/service/labware/router.py +++ b/robot-server/robot_server/service/labware/router.py @@ -3,7 +3,7 @@ As of the v5 software release, these endpoints do not function. All labware offsets are set via `/run` endpoints. """ -from typing import Optional +from typing import Annotated, Optional from typing_extensions import Literal, NoReturn from fastapi import APIRouter, Depends, status @@ -44,11 +44,11 @@ class LabwareCalibrationEndpointsRemoved(ErrorDetails): }, ) async def get_all_labware_calibrations( + requested_version: Annotated[int, Depends(get_requested_version)], loadName: Optional[str] = None, namespace: Optional[str] = None, version: Optional[int] = None, parent: Optional[str] = None, - requested_version: int = Depends(get_requested_version), ) -> lw_models.MultipleCalibrationsResponse: if requested_version <= 3: return lw_models.MultipleCalibrationsResponse(data=[], links=None) @@ -72,7 +72,7 @@ async def get_all_labware_calibrations( ) async def get_specific_labware_calibration( calibrationId: str, - requested_version: int = Depends(get_requested_version), + requested_version: Annotated[int, Depends(get_requested_version)], ) -> NoReturn: if requested_version <= 3: raise RobotServerError( @@ -99,7 +99,8 @@ async def get_specific_labware_calibration( }, ) async def delete_specific_labware_calibration( - calibrationId: str, requested_version: int = Depends(get_requested_version) + calibrationId: str, + requested_version: Annotated[int, Depends(get_requested_version)], ) -> NoReturn: if requested_version <= 3: raise RobotServerError( diff --git a/robot-server/robot_server/service/legacy/routers/control.py b/robot-server/robot_server/service/legacy/routers/control.py index d3713b81bee..5beb430512d 100644 --- a/robot-server/robot_server/service/legacy/routers/control.py +++ b/robot-server/robot_server/service/legacy/routers/control.py @@ -1,3 +1,5 @@ +from typing import Annotated + import asyncio from fastapi import APIRouter, Query, Depends @@ -28,8 +30,8 @@ description="Blink the gantry lights so you can pick it out of a crowd", ) async def post_identify( - seconds: int = Query(..., description="Time to blink the lights for"), - hardware: HardwareControlAPI = Depends(get_hardware), + seconds: Annotated[int, Query(..., description="Time to blink the lights for")], + hardware: Annotated[HardwareControlAPI, Depends(get_hardware)], ) -> V1BasicResponse: identify = hardware.identify asyncio.ensure_future(identify(seconds)) @@ -85,8 +87,8 @@ async def get_robot_positions() -> control.RobotPositionsResponse: ) async def post_move_robot( robot_move_target: control.RobotMoveTarget, - hardware: HardwareControlAPI = Depends(get_hardware), - motion_lock: ThreadedAsyncLock = Depends(get_motion_lock), + hardware: Annotated[HardwareControlAPI, Depends(get_hardware)], + motion_lock: Annotated[ThreadedAsyncLock, Depends(get_motion_lock)], ) -> V1BasicResponse: """Move the robot""" try: @@ -109,8 +111,8 @@ async def post_move_robot( ) async def post_home_robot( robot_home_target: control.RobotHomeTarget, - hardware: HardwareControlAPI = Depends(get_hardware), - motion_lock: ThreadedAsyncLock = Depends(get_motion_lock), + hardware: Annotated[HardwareControlAPI, Depends(get_hardware)], + motion_lock: Annotated[ThreadedAsyncLock, Depends(get_motion_lock)], ) -> V1BasicResponse: """Home the robot or one of the pipettes""" try: @@ -146,7 +148,7 @@ async def post_home_robot( response_model=control.RobotLightState, ) async def get_robot_light_state( - hardware: HardwareControlAPI = Depends(get_hardware), + hardware: Annotated[HardwareControlAPI, Depends(get_hardware)], ) -> control.RobotLightState: light_state = await hardware.get_lights() return control.RobotLightState(on=light_state.get("rails", False)) @@ -160,7 +162,7 @@ async def get_robot_light_state( ) async def post_robot_light_state( robot_light_state: control.RobotLightState, - hardware: HardwareControlAPI = Depends(get_hardware), + hardware: Annotated[HardwareControlAPI, Depends(get_hardware)], ) -> control.RobotLightState: await hardware.set_lights(rails=robot_light_state.on) return robot_light_state diff --git a/robot-server/robot_server/service/legacy/routers/deck_calibration.py b/robot-server/robot_server/service/legacy/routers/deck_calibration.py index c66ca3f4873..eb505b5cab8 100644 --- a/robot-server/robot_server/service/legacy/routers/deck_calibration.py +++ b/robot-server/robot_server/service/legacy/routers/deck_calibration.py @@ -1,3 +1,5 @@ +from typing import Annotated + from fastapi import APIRouter, Depends from opentrons.hardware_control import HardwareControlAPI @@ -25,7 +27,7 @@ response_model=CalibrationStatus, ) async def get_calibration_status( - hardware: HardwareControlAPI = Depends(get_hardware), + hardware: Annotated[HardwareControlAPI, Depends(get_hardware)], ) -> CalibrationStatus: # TODO: AA 12-01-2020 Instrument offset has been deprecated. We should # exclude instrument calibration in a future refactor diff --git a/robot-server/robot_server/service/legacy/routers/logs.py b/robot-server/robot_server/service/legacy/routers/logs.py index 589413181fb..69b92d5263c 100644 --- a/robot-server/robot_server/service/legacy/routers/logs.py +++ b/robot-server/robot_server/service/legacy/routers/logs.py @@ -1,5 +1,5 @@ from fastapi import APIRouter, Query, Response -from typing import Dict +from typing import Annotated, Dict from opentrons.system import log_control @@ -32,13 +32,15 @@ async def get_logs( log_identifier: LogIdentifier, response: Response, - format: LogFormat = Query(LogFormat.text, title="Log format type"), - records: int = Query( - log_control.DEFAULT_RECORDS, - title="Number of records to retrieve", - gt=0, - le=log_control.MAX_RECORDS, - ), + format: Annotated[LogFormat, Query(title="Log format type")] = LogFormat.text, + records: Annotated[ + int, + Query( + title="Number of records to retrieve", + gt=0, + le=log_control.MAX_RECORDS, + ), + ] = log_control.DEFAULT_RECORDS, ) -> Response: syslog_id = IDENTIFIER_TO_SYSLOG_ID[log_identifier] modes = { diff --git a/robot-server/robot_server/service/legacy/routers/modules.py b/robot-server/robot_server/service/legacy/routers/modules.py index 4ba44418540..69fa07ecb38 100644 --- a/robot-server/robot_server/service/legacy/routers/modules.py +++ b/robot-server/robot_server/service/legacy/routers/modules.py @@ -26,7 +26,7 @@ # NOTE(mc, 2022-03-22): replaced by robot_server.modules.router.get_attached_modules async def get_modules( - hardware: HardwareControlAPI = Depends(get_hardware), + hardware: typing.Annotated[HardwareControlAPI, Depends(get_hardware)], ) -> Modules: attached_modules = hardware.attached_modules module_data = [ @@ -78,9 +78,9 @@ async def get_modules( ) async def post_serial_command( command: SerialCommand, - serial: str = Path(..., description="Serial number of the module"), - hardware: HardwareControlAPI = Depends(get_hardware), - requested_version: int = Depends(get_requested_version), + serial: typing.Annotated[str, Path(..., description="Serial number of the module")], + hardware: typing.Annotated[HardwareControlAPI, Depends(get_hardware)], + requested_version: typing.Annotated[int, Depends(get_requested_version)], ) -> SerialCommandResponse: """Send a command on device identified by serial""" if requested_version >= 3: @@ -144,8 +144,8 @@ async def post_serial_command( }, ) async def post_serial_update( - serial: str = Path(..., description="Serial number of the module"), - hardware: HardwareControlAPI = Depends(get_hardware), + serial: typing.Annotated[str, Path(..., description="Serial number of the module")], + hardware: typing.Annotated[HardwareControlAPI, Depends(get_hardware)], ) -> V1BasicResponse: """Update module firmware""" attached_modules = hardware.attached_modules diff --git a/robot-server/robot_server/service/legacy/routers/motors.py b/robot-server/robot_server/service/legacy/routers/motors.py index 463315bec8e..d142ab2466f 100644 --- a/robot-server/robot_server/service/legacy/routers/motors.py +++ b/robot-server/robot_server/service/legacy/routers/motors.py @@ -1,3 +1,5 @@ +from typing import Annotated + from starlette import status from fastapi import APIRouter, Depends from pydantic import ValidationError @@ -26,7 +28,7 @@ }, ) async def get_engaged_motors( - hardware: HardwareControlAPI = Depends(get_hardware), + hardware: Annotated[HardwareControlAPI, Depends(get_hardware)], ) -> model.EngagedMotors: # TODO (spp, 2023-07-06): Implement fetching Flex's engaged motors # https://opentrons.atlassian.net/browse/RET-1371 @@ -51,7 +53,7 @@ async def get_engaged_motors( response_model=V1BasicResponse, ) async def post_disengage_motors( - axes: model.Axes, hardware: HardwareControlAPI = Depends(get_hardware) + axes: model.Axes, hardware: Annotated[HardwareControlAPI, Depends(get_hardware)] ) -> V1BasicResponse: input_axes = [Axis[ax.upper()] for ax in axes.axes] try: diff --git a/robot-server/robot_server/service/legacy/routers/networking.py b/robot-server/robot_server/service/legacy/routers/networking.py index 869fab1b139..17f4a3364cc 100644 --- a/robot-server/robot_server/service/legacy/routers/networking.py +++ b/robot-server/robot_server/service/legacy/routers/networking.py @@ -4,7 +4,7 @@ from starlette import status from starlette.responses import JSONResponse -from typing import Optional +from typing import Annotated, Optional from fastapi import APIRouter, HTTPException, File, Path, UploadFile, Query from opentrons_shared_data.errors import ErrorCodes @@ -66,17 +66,19 @@ async def get_networking_status() -> NetworkingStatus: response_model=WifiNetworks, ) async def get_wifi_networks( - rescan: Optional[bool] = Query( - default=False, - description=( - "If `true`, forces a rescan for beaconing Wi-Fi networks. " - "This is an expensive operation that can take ~10 seconds, " - 'so only do it based on user needs like clicking a "scan network" ' - "button, not just to poll. " - "If `false`, returns the cached Wi-Fi networks, " - "letting the system decide when to do a rescan." + rescan: Annotated[ + Optional[bool], + Query( + description=( + "If `true`, forces a rescan for beaconing Wi-Fi networks. " + "This is an expensive operation that can take ~10 seconds, " + 'so only do it based on user needs like clicking a "scan network" ' + "button, not just to poll. " + "If `false`, returns the cached Wi-Fi networks, " + "letting the system decide when to do a rescan." + ), ), - ) + ] = False ) -> WifiNetworks: networks = await nmcli.available_ssids(rescan) return WifiNetworks(list=[WifiNetworkFull(**n) for n in networks]) @@ -189,11 +191,14 @@ async def post_wifi_key(key: UploadFile = File(...)): }, ) async def delete_wifi_key( - key_uuid: str = Path( - ..., - description="The ID of key to delete, as determined by a previous" - " call to GET /wifi/keys", - ) + key_uuid: Annotated[ + str, + Path( + ..., + description="The ID of key to delete, as determined by a previous" + " call to GET /wifi/keys", + ), + ] ) -> V1BasicResponse: """Delete wifi key handler""" deleted_file = wifi.remove_key(key_uuid) diff --git a/robot-server/robot_server/service/legacy/routers/pipettes.py b/robot-server/robot_server/service/legacy/routers/pipettes.py index 8bcbc4cf1cc..fb219548b34 100644 --- a/robot-server/robot_server/service/legacy/routers/pipettes.py +++ b/robot-server/robot_server/service/legacy/routers/pipettes.py @@ -27,18 +27,20 @@ response_model=pipettes.PipettesByMount, ) async def get_pipettes( - refresh: typing.Optional[bool] = Query( - False, - description="If `false`, query a cached value. If `true`, actively scan for" - " attached pipettes." - "\n\n" - "**Warning:** Actively scanning disables the pipette motors and should only be done" - " when no protocol is running and you know it won't cause a problem." - "\n\n" - "**Warning:** Actively scanning is only valid on OT-2s. On Flex robots, it's" - " unnecessary, and the behavior is currently undefined.", - ), - hardware: HardwareControlAPI = Depends(get_hardware), + hardware: typing.Annotated[HardwareControlAPI, Depends(get_hardware)], + refresh: typing.Annotated[ + typing.Optional[bool], + Query( + description="If `false`, query a cached value. If `true`, actively scan for" + " attached pipettes." + "\n\n" + "**Warning:** Actively scanning disables the pipette motors and should only be done" + " when no protocol is running and you know it won't cause a problem." + "\n\n" + "**Warning:** Actively scanning is only valid on OT-2s. On Flex robots, it's" + " unnecessary, and the behavior is currently undefined.", + ), + ] = False, ) -> pipettes.PipettesByMount: """ Query robot for model strings on 'left' and 'right' mounts, and return a diff --git a/robot-server/robot_server/service/legacy/routers/settings.py b/robot-server/robot_server/service/legacy/routers/settings.py index 65e2d0c63d4..b5caa233724 100644 --- a/robot-server/robot_server/service/legacy/routers/settings.py +++ b/robot-server/robot_server/service/legacy/routers/settings.py @@ -1,7 +1,7 @@ import aiohttp import logging from dataclasses import asdict -from typing import cast, Any, Dict, List, Optional, Union +from typing import cast, Annotated, Any, Dict, List, Optional, Union from starlette import status from fastapi import APIRouter, Depends @@ -87,8 +87,8 @@ async def set_oem_mode_request(enable): ) async def post_settings( update: AdvancedSettingRequest, - hardware: HardwareControlAPI = Depends(get_hardware), - robot_type: RobotTypeEnum = Depends(get_robot_type_enum), + hardware: Annotated[HardwareControlAPI, Depends(get_hardware)], + robot_type: Annotated[RobotTypeEnum, Depends(get_robot_type_enum)], ) -> AdvancedSettingsResponse: """Update advanced setting (feature flag)""" try: @@ -121,7 +121,7 @@ async def post_settings( response_model_exclude_unset=True, ) async def get_settings( - robot_type: RobotTypeEnum = Depends(get_robot_type_enum), + robot_type: Annotated[RobotTypeEnum, Depends(get_robot_type_enum)], ) -> AdvancedSettingsResponse: """Get advanced setting (feature flags)""" return _create_settings_response(robot_type) @@ -163,7 +163,7 @@ def _create_settings_response(robot_type: RobotTypeEnum) -> AdvancedSettingsResp }, ) async def post_log_level_local( - log_level: LogLevel, hardware: HardwareControlAPI = Depends(get_hardware) + log_level: LogLevel, hardware: Annotated[HardwareControlAPI, Depends(get_hardware)] ) -> V1BasicResponse: """Update local log level""" level = log_level.log_level @@ -209,7 +209,7 @@ async def post_log_level_upstream(log_level: LogLevel) -> V1BasicResponse: response_model=FactoryResetOptions, ) async def get_settings_reset_options( - robot_type: RobotTypeEnum = Depends(get_robot_type_enum), + robot_type: Annotated[RobotTypeEnum, Depends(get_robot_type_enum)], ) -> FactoryResetOptions: reset_options = reset_util.reset_options(robot_type).items() return FactoryResetOptions( @@ -238,11 +238,13 @@ async def get_settings_reset_options( ) async def post_settings_reset_options( factory_reset_commands: Dict[reset_util.ResetOptionId, bool], - persistence_resetter: PersistenceResetter = Depends(get_persistence_resetter), - deck_configuration_store: Optional[DeckConfigurationStore] = Depends( - get_deck_configuration_store_failsafe - ), - robot_type: RobotTypeEnum = Depends(get_robot_type_enum), + persistence_resetter: Annotated[ + PersistenceResetter, Depends(get_persistence_resetter) + ], + deck_configuration_store: Annotated[ + Optional[DeckConfigurationStore], Depends(get_deck_configuration_store_failsafe) + ], + robot_type: Annotated[RobotTypeEnum, Depends(get_robot_type_enum)], ) -> V1BasicResponse: reset_options = reset_util.reset_options(robot_type) not_allowed_options = [ @@ -302,7 +304,7 @@ async def post_settings_reset_options( response_model=RobotConfigs, ) async def get_robot_settings( - hardware: HardwareControlAPI = Depends(get_hardware), + hardware: Annotated[HardwareControlAPI, Depends(get_hardware)], ) -> RobotConfigs: return asdict(hardware.config) @@ -315,7 +317,7 @@ async def get_robot_settings( response_model_exclude_unset=True, ) async def get_pipette_settings( - hardware: API = Depends(get_ot2_hardware), + hardware: Annotated[API, Depends(get_ot2_hardware)], ) -> MultiPipetteSettings: res = {} attached_pipettes = hardware.attached_pipettes @@ -346,7 +348,7 @@ async def get_pipette_settings( }, ) async def get_pipette_setting( - pipette_id: str, hardware: API = Depends(get_ot2_hardware) + pipette_id: str, hardware: Annotated[API, Depends(get_ot2_hardware)] ) -> PipetteSettings: attached_pipettes = hardware.attached_pipettes known_ids = mutable_configurations.known_pipettes( @@ -376,7 +378,7 @@ async def get_pipette_setting( async def patch_pipette_setting( pipette_id: str, settings_update: PipetteSettingsUpdate, - hardware: None = Depends(get_ot2_hardware), + hardware: Annotated[None, Depends(get_ot2_hardware)], ) -> PipetteSettings: # Convert fields to dict of field name to value fields = settings_update.setting_fields or {} diff --git a/robot-server/robot_server/service/notifications/notification_client.py b/robot-server/robot_server/service/notifications/notification_client.py index 81ec2b69a1d..1b706052f0e 100644 --- a/robot-server/robot_server/service/notifications/notification_client.py +++ b/robot-server/robot_server/service/notifications/notification_client.py @@ -4,7 +4,7 @@ import paho.mqtt.client as mqtt from anyio import to_thread from fastapi import Depends -from typing import Any, Dict, Optional +from typing import Annotated, Any, Dict, Optional from enum import Enum @@ -210,7 +210,7 @@ async def clean_up_notification_client(app_state: AppState) -> None: def get_notification_client( - app_state: AppState = Depends(get_app_state), + app_state: Annotated[AppState, Depends(get_app_state)], ) -> Optional[NotificationClient]: """Intended to be used by endpoint functions as a FastAPI dependency.""" notification_client = _notification_client_accessor.get_from(app_state) diff --git a/robot-server/robot_server/service/notifications/publisher_notifier.py b/robot-server/robot_server/service/notifications/publisher_notifier.py index 22b5c34b79e..1a3fc5de0d3 100644 --- a/robot-server/robot_server/service/notifications/publisher_notifier.py +++ b/robot-server/robot_server/service/notifications/publisher_notifier.py @@ -1,7 +1,7 @@ """Provides an interface for alerting notification publishers to events and related lifecycle utilities.""" import asyncio from fastapi import Depends -from typing import Optional, Callable, List, Awaitable, Union +from typing import Annotated, Optional, Callable, List, Awaitable, Union from server_utils.fastapi_utils.app_state import ( AppState, @@ -48,7 +48,7 @@ async def _wait_for_event(self) -> None: def get_pe_publisher_notifier( - app_state: AppState = Depends(get_app_state), + app_state: Annotated[AppState, Depends(get_app_state)], ) -> PublisherNotifier: """Intended for use by various publishers only. Intended for protocol engine.""" publisher_notifier = _pe_publisher_notifier_accessor.get_from(app_state) @@ -58,7 +58,7 @@ def get_pe_publisher_notifier( def get_pe_notify_publishers( - app_state: AppState = Depends(get_app_state), + app_state: Annotated[AppState, Depends(get_app_state)], ) -> Callable[[], None]: """Provides access to the callback used to notify publishers of changes. Intended for protocol engine.""" publisher_notifier = _pe_publisher_notifier_accessor.get_from(app_state) diff --git a/robot-server/robot_server/service/notifications/publishers/deck_configuration_publisher.py b/robot-server/robot_server/service/notifications/publishers/deck_configuration_publisher.py index 14361873a7b..261f4b16395 100644 --- a/robot-server/robot_server/service/notifications/publishers/deck_configuration_publisher.py +++ b/robot-server/robot_server/service/notifications/publishers/deck_configuration_publisher.py @@ -1,3 +1,5 @@ +from typing import Annotated + from fastapi import Depends from server_utils.fastapi_utils.app_state import ( @@ -29,8 +31,10 @@ async def publish_deck_configuration( async def get_deck_configuration_publisher( - app_state: AppState = Depends(get_app_state), - notification_client: NotificationClient = Depends(get_notification_client), + app_state: Annotated[AppState, Depends(get_app_state)], + notification_client: Annotated[ + NotificationClient, Depends(get_notification_client) + ], ) -> DeckConfigurationPublisher: """Get a singleton DeckConfigurationPublisher to publish deck configuration topics.""" deck_configuration_publisher = _deck_configuration_publisher_accessor.get_from( diff --git a/robot-server/robot_server/service/notifications/publishers/maintenance_runs_publisher.py b/robot-server/robot_server/service/notifications/publishers/maintenance_runs_publisher.py index 1c382d37102..b1b7e44675c 100644 --- a/robot-server/robot_server/service/notifications/publishers/maintenance_runs_publisher.py +++ b/robot-server/robot_server/service/notifications/publishers/maintenance_runs_publisher.py @@ -1,3 +1,5 @@ +from typing import Annotated + from fastapi import Depends from server_utils.fastapi_utils.app_state import ( @@ -31,8 +33,10 @@ async def publish_current_maintenance_run( async def get_maintenance_runs_publisher( - app_state: AppState = Depends(get_app_state), - notification_client: NotificationClient = Depends(get_notification_client), + app_state: Annotated[AppState, Depends(get_app_state)], + notification_client: Annotated[ + NotificationClient, Depends(get_notification_client) + ], ) -> MaintenanceRunsPublisher: """Get a singleton MaintenanceRunsPublisher to publish maintenance run topics.""" maintenance_runs_publisher = _maintenance_runs_publisher_accessor.get_from( diff --git a/robot-server/robot_server/service/notifications/publishers/runs_publisher.py b/robot-server/robot_server/service/notifications/publishers/runs_publisher.py index 9de9e2c7c51..f4278718416 100644 --- a/robot-server/robot_server/service/notifications/publishers/runs_publisher.py +++ b/robot-server/robot_server/service/notifications/publishers/runs_publisher.py @@ -1,6 +1,6 @@ from fastapi import Depends from dataclasses import dataclass -from typing import Callable, Optional +from typing import Annotated, Callable, Optional from opentrons.protocol_engine import CommandPointer, StateSummary, EngineStatus @@ -173,9 +173,13 @@ async def _handle_engine_status_change(self) -> None: async def get_runs_publisher( - app_state: AppState = Depends(get_app_state), - notification_client: NotificationClient = Depends(get_notification_client), - publisher_notifier: PublisherNotifier = Depends(get_pe_publisher_notifier), + app_state: Annotated[AppState, Depends(get_app_state)], + notification_client: Annotated[ + NotificationClient, Depends(get_notification_client) + ], + publisher_notifier: Annotated[ + PublisherNotifier, Depends(get_pe_publisher_notifier) + ], ) -> RunsPublisher: """Get a singleton RunsPublisher to publish runs topics.""" runs_publisher = _runs_publisher_accessor.get_from(app_state) diff --git a/robot-server/robot_server/service/pipette_offset/router.py b/robot-server/robot_server/service/pipette_offset/router.py index 8a41f3b1ee0..88db5a87170 100644 --- a/robot-server/robot_server/service/pipette_offset/router.py +++ b/robot-server/robot_server/service/pipette_offset/router.py @@ -1,6 +1,6 @@ from starlette import status from fastapi import APIRouter, Depends -from typing import Optional +from typing import Annotated, Optional from opentrons import types as ot_types from opentrons.calibration_storage.ot2 import pipette_offset, models @@ -48,11 +48,10 @@ def _format_calibration( response_model=pip_models.MultipleCalibrationsResponse, ) async def get_all_pipette_offset_calibrations( + _: Annotated[API, Depends(get_ot2_hardware)], pipette_id: Optional[str] = None, mount: Optional[pip_models.MountType] = None, - _: API = Depends(get_ot2_hardware), ) -> pip_models.MultipleCalibrationsResponse: - all_calibrations = pipette_offset.get_all_pipette_offset_calibrations() if not all_calibrations: return pip_models.MultipleCalibrationsResponse( @@ -80,7 +79,9 @@ async def get_all_pipette_offset_calibrations( responses={status.HTTP_404_NOT_FOUND: {"model": ErrorBody}}, ) async def delete_specific_pipette_offset_calibration( - pipette_id: str, mount: pip_models.MountType, _: API = Depends(get_ot2_hardware) + pipette_id: str, + mount: pip_models.MountType, + _: Annotated[API, Depends(get_ot2_hardware)], ): try: pipette_offset.delete_pipette_offset_file( diff --git a/robot-server/robot_server/service/session/router.py b/robot-server/robot_server/service/session/router.py index b4b77a6b06b..3b9da7ba88a 100644 --- a/robot-server/robot_server/service/session/router.py +++ b/robot-server/robot_server/service/session/router.py @@ -1,3 +1,4 @@ +from typing import Annotated, Optional import logging from starlette import status as http_status_codes @@ -57,7 +58,7 @@ def get_session(manager: SessionManager, session_id: IdentifierType) -> BaseSess ) async def create_session_handler( create_request: SessionCreateRequest, - session_manager: SessionManager = Depends(get_session_manager), + session_manager: Annotated[SessionManager, Depends(get_session_manager)], ) -> SessionResponse: session_type = create_request.data.sessionType create_params = create_request.data.createParams @@ -85,7 +86,7 @@ async def create_session_handler( ) async def delete_session_handler( sessionId: IdentifierType, - session_manager: SessionManager = Depends(get_session_manager), + session_manager: Annotated[SessionManager, Depends(get_session_manager)], ) -> SessionResponse: session_obj = get_session(manager=session_manager, session_id=sessionId) await session_manager.remove(session_obj.meta.identifier) @@ -108,7 +109,7 @@ async def delete_session_handler( ) async def get_session_handler( sessionId: IdentifierType, - session_manager: SessionManager = Depends(get_session_manager), + session_manager: Annotated[SessionManager, Depends(get_session_manager)], ) -> SessionResponse: session_obj = get_session(manager=session_manager, session_id=sessionId) @@ -129,10 +130,11 @@ async def get_session_handler( response_model=MultiSessionResponse, ) async def get_sessions_handler( - session_type: SessionType = Query( - None, description="Will limit the results to only this session type" - ), - session_manager: SessionManager = Depends(get_session_manager), + session_manager: Annotated[SessionManager, Depends(get_session_manager)], + session_type: Annotated[ + Optional[SessionType], + Query(description="Will limit the results to only this session type"), + ] = None, ) -> MultiSessionResponse: sessions = session_manager.get(session_type=session_type) return MultiSessionResponse( @@ -154,7 +156,7 @@ async def get_sessions_handler( async def session_command_execute_handler( sessionId: IdentifierType, command_request: CommandRequest, - session_manager: SessionManager = Depends(get_session_manager), + session_manager: Annotated[SessionManager, Depends(get_session_manager)], ) -> CommandResponse: session_obj = get_session(manager=session_manager, session_id=sessionId) if not session_manager.is_active(session_obj.meta.identifier): diff --git a/robot-server/robot_server/service/task_runner.py b/robot-server/robot_server/service/task_runner.py index da9f074bf13..4d671b9fd07 100644 --- a/robot-server/robot_server/service/task_runner.py +++ b/robot-server/robot_server/service/task_runner.py @@ -7,7 +7,7 @@ from __future__ import annotations import asyncio from logging import getLogger -from typing import Any, Awaitable, Callable, Set +from typing import Annotated, Any, Awaitable, Callable, Set from fastapi import Depends from server_utils.fastapi_utils.app_state import ( AppState, @@ -89,9 +89,11 @@ async def clean_up_task_runner(app_state: AppState) -> None: await task_runner.cancel_all_and_clean_up() -def get_task_runner(app_state: AppState = Depends(get_app_state)) -> TaskRunner: +def get_task_runner( + app_state: Annotated[AppState, Depends(get_app_state)] +) -> TaskRunner: """Intended to be used by endpoint functions as a FastAPI dependency, - like `task_runner = fastapi.Depends(get_task_runner)`. + like `Annotated[task_runner, fastapi.Depends(get_task_runner)]`. """ task_runner = _task_runner_accessor.get_from(app_state) assert task_runner, "Task runner was not initialized" diff --git a/robot-server/robot_server/service/tip_length/router.py b/robot-server/robot_server/service/tip_length/router.py index e4658f59dc7..c982bdc9d37 100644 --- a/robot-server/robot_server/service/tip_length/router.py +++ b/robot-server/robot_server/service/tip_length/router.py @@ -1,6 +1,6 @@ from starlette import status from fastapi import APIRouter, Depends, Query -from typing import Optional, cast +from typing import Annotated, Optional, cast from opentrons.calibration_storage import types as cal_types from opentrons.calibration_storage.ot2 import tip_length, models @@ -49,24 +49,29 @@ def _format_calibration( response_model=tl_models.MultipleCalibrationsResponse, ) async def get_all_tip_length_calibrations( - tiprack_hash: Optional[str] = Query( - None, - description=( - "Filter results by their `tiprack` field." - " This is deprecated because it was prone to bugs where semantically identical" - " definitions had different hashes." - " Use `tiprack_uri` instead." + _: Annotated[API, Depends(get_ot2_hardware)], + tiprack_hash: Annotated[ + Optional[str], + Query( + description=( + "Filter results by their `tiprack` field." + " This is deprecated because it was prone to bugs where semantically identical" + " definitions had different hashes." + " Use `tiprack_uri` instead." + ), + deprecated=True, ), - deprecated=True, - ), - pipette_id: Optional[str] = Query( - None, description="Filter results by their `pipette` field." - ), - tiprack_uri: Optional[str] = Query( - None, - description="Filter results by their `uri` field.", - ), - _: API = Depends(get_ot2_hardware), + ] = None, + pipette_id: Annotated[ + Optional[str], + Query(description="Filter results by their `pipette` field."), + ] = None, + tiprack_uri: Annotated[ + Optional[str], + Query( + description="Filter results by their `uri` field.", + ), + ] = None, ) -> tl_models.MultipleCalibrationsResponse: all_calibrations = tip_length.get_all_tip_length_calibrations() if not all_calibrations: @@ -99,37 +104,44 @@ async def get_all_tip_length_calibrations( responses={status.HTTP_404_NOT_FOUND: {"model": ErrorBody}}, ) async def delete_specific_tip_length_calibration( - pipette_id: str = Query( - ..., - description=( - "The `pipette` field value of the calibration you want to delete." - " (See `GET /calibration/tip_length`.)" + _: Annotated[API, Depends(get_ot2_hardware)], + pipette_id: Annotated[ + str, + Query( + ..., + description=( + "The `pipette` field value of the calibration you want to delete." + " (See `GET /calibration/tip_length`.)" + ), ), - ), - tiprack_hash: Optional[str] = Query( - None, - description=( - "The `tiprack` field value of the calibration you want to delete." - " (See `GET /calibration/tip_length`.)" - "\n\n" - " This is deprecated because it was prone to bugs where semantically identical" - " definitions had different hashes." - " Use `tiprack_uri` instead." - "\n\n" - "You must supply either this or `tiprack_uri`." + ], + tiprack_hash: Annotated[ + Optional[str], + Query( + description=( + "The `tiprack` field value of the calibration you want to delete." + " (See `GET /calibration/tip_length`.)" + "\n\n" + " This is deprecated because it was prone to bugs where semantically identical" + " definitions had different hashes." + " Use `tiprack_uri` instead." + "\n\n" + "You must supply either this or `tiprack_uri`." + ), + deprecated=True, ), - deprecated=True, - ), - tiprack_uri: Optional[str] = Query( - None, - description=( - "The `uri` field value of the calibration you want to delete." - " (See `GET /calibration/tip_length`.)" - "\n\n" - " You must supply either this or `tiprack_hash`." + ] = None, + tiprack_uri: Annotated[ + Optional[str], + Query( + description=( + "The `uri` field value of the calibration you want to delete." + " (See `GET /calibration/tip_length`.)" + "\n\n" + " You must supply either this or `tiprack_hash`." + ), ), - ), - _: API = Depends(get_ot2_hardware), + ] = None, ): try: tip_length.delete_tip_length_calibration( diff --git a/robot-server/robot_server/subsystems/router.py b/robot-server/robot_server/subsystems/router.py index bf0ca0edac8..e64e7390063 100644 --- a/robot-server/robot_server/subsystems/router.py +++ b/robot-server/robot_server/subsystems/router.py @@ -1,7 +1,7 @@ """The router for the /subsystems endpoints.""" from datetime import datetime -from typing import Optional, TYPE_CHECKING +from typing import Annotated, Optional, TYPE_CHECKING from fastapi import APIRouter, status, Depends, Response, Request from typing_extensions import Literal @@ -117,7 +117,7 @@ class NoOngoingUpdate(ErrorDetails): }, ) async def get_attached_subsystems( - thread_manager: ThreadManagedHardware = Depends(get_thread_manager), + thread_manager: Annotated[ThreadManagedHardware, Depends(get_thread_manager)], ) -> PydanticResponse[SimpleMultiBody[PresentSubsystem]]: """Return all subsystems currently present on the machine.""" hardware = get_ot3_hardware(thread_manager) @@ -151,7 +151,7 @@ async def get_attached_subsystems( ) async def get_attached_subsystem( subsystem: SubSystem, - thread_manager: ThreadManagedHardware = Depends(get_thread_manager), + thread_manager: Annotated[ThreadManagedHardware, Depends(get_thread_manager)], ) -> PydanticResponse[SimpleBody[PresentSubsystem]]: """Return the status of a single attached subsystem. @@ -190,7 +190,9 @@ async def get_attached_subsystem( responses={status.HTTP_200_OK: {"model": SimpleMultiBody[UpdateProgressSummary]}}, ) async def get_subsystem_updates( - update_manager: FirmwareUpdateManager = Depends(get_firmware_update_manager), + update_manager: Annotated[ + FirmwareUpdateManager, Depends(get_firmware_update_manager) + ], ) -> PydanticResponse[SimpleMultiBody[UpdateProgressSummary]]: """Return all currently-running firmware update process summaries.""" handles = await update_manager.all_ongoing_processes() @@ -221,7 +223,9 @@ async def get_subsystem_updates( ) async def get_subsystem_update( subsystem: SubSystem, - update_manager: FirmwareUpdateManager = Depends(get_firmware_update_manager), + update_manager: Annotated[ + FirmwareUpdateManager, Depends(get_firmware_update_manager) + ], ) -> PydanticResponse[SimpleBody[UpdateProgressData]]: """Return full data about a specific currently-running update process.""" try: @@ -262,7 +266,9 @@ async def get_subsystem_update( responses={status.HTTP_200_OK: {"model": SimpleMultiBody[UpdateProgressData]}}, ) async def get_update_processes( - update_manager: FirmwareUpdateManager = Depends(get_firmware_update_manager), + update_manager: Annotated[ + FirmwareUpdateManager, Depends(get_firmware_update_manager) + ], ) -> PydanticResponse[SimpleMultiBody[UpdateProgressSummary]]: """Return summaries of all past (since robot boot) or present update processes.""" data = [ @@ -289,7 +295,9 @@ async def get_update_processes( ) async def get_update_process( id: str, - update_manager: FirmwareUpdateManager = Depends(get_firmware_update_manager), + update_manager: Annotated[ + FirmwareUpdateManager, Depends(get_firmware_update_manager) + ], ) -> PydanticResponse[SimpleBody[UpdateProgressData]]: """Return the progress of a specific past or present update process.""" try: @@ -330,9 +338,11 @@ async def begin_subsystem_update( subsystem: SubSystem, response: Response, request: Request, - update_manager: FirmwareUpdateManager = Depends(get_firmware_update_manager), - update_process_id: str = Depends(get_unique_id), - created_at: datetime = Depends(get_current_time), + update_manager: Annotated[ + FirmwareUpdateManager, Depends(get_firmware_update_manager) + ], + update_process_id: Annotated[str, Depends(get_unique_id)], + created_at: Annotated[datetime, Depends(get_current_time)], ) -> PydanticResponse[SimpleBody[UpdateProgressData]]: """Update the firmware of the OT3 instrument on the specified mount.""" try: diff --git a/robot-server/tests/commands/test_get_default_engine.py b/robot-server/tests/commands/test_get_default_engine.py index afc55efabf1..a3476cd562a 100644 --- a/robot-server/tests/commands/test_get_default_engine.py +++ b/robot-server/tests/commands/test_get_default_engine.py @@ -85,7 +85,10 @@ async def test_get_default_orchestrator( async def test_raises_conflict( - decoy: Decoy, run_orchestrator_store: RunOrchestratorStore + decoy: Decoy, + run_orchestrator_store: RunOrchestratorStore, + hardware_api: HardwareControlAPI, + module_identifier: ModuleIdentifier, ) -> None: """It should raise a 409 conflict if the default engine is not availble.""" decoy.when(await run_orchestrator_store.get_default_orchestrator()).then_raise( @@ -93,7 +96,11 @@ async def test_raises_conflict( ) with pytest.raises(ApiError) as exc_info: - await get_default_orchestrator(run_orchestrator_store=run_orchestrator_store) + await get_default_orchestrator( + run_orchestrator_store=run_orchestrator_store, + hardware_api=hardware_api, + module_identifier=module_identifier, + ) assert exc_info.value.status_code == 409 assert exc_info.value.content["errors"][0]["id"] == "RunActive" diff --git a/robot-server/tests/data_files/test_router.py b/robot-server/tests/data_files/test_router.py index 7437af48c33..ffd77a2e27a 100644 --- a/robot-server/tests/data_files/test_router.py +++ b/robot-server/tests/data_files/test_router.py @@ -106,6 +106,7 @@ async def test_upload_existing_data_file( data_files_store: DataFilesStore, file_reader_writer: FileReaderWriter, file_hasher: FileHasher, + file_auto_deleter: DataFileAutoDeleter, ) -> None: """It should return the existing file info.""" data_files_directory = Path("/dev/null") @@ -135,6 +136,7 @@ async def test_upload_existing_data_file( data_files_store=data_files_store, file_reader_writer=file_reader_writer, file_hasher=file_hasher, + data_file_auto_deleter=file_auto_deleter, file_id="data-file-id", created_at=datetime(year=2024, month=6, day=18), ) @@ -201,6 +203,7 @@ async def test_upload_non_existent_file_path( data_files_store: DataFilesStore, file_reader_writer: FileReaderWriter, file_hasher: FileHasher, + file_auto_deleter: DataFileAutoDeleter, ) -> None: """It should store the data file from path to persistent storage & update the database.""" data_files_directory = Path("/dev/null") @@ -216,6 +219,7 @@ async def test_upload_non_existent_file_path( data_files_store=data_files_store, file_reader_writer=file_reader_writer, file_hasher=file_hasher, + data_file_auto_deleter=file_auto_deleter, file_id="data-file-id", created_at=datetime(year=2024, month=6, day=18), ) @@ -228,6 +232,7 @@ async def test_upload_non_csv_file( data_files_store: DataFilesStore, file_reader_writer: FileReaderWriter, file_hasher: FileHasher, + file_auto_deleter: DataFileAutoDeleter, ) -> None: """It should store the data file from path to persistent storage & update the database.""" data_files_directory = Path("/dev/null") @@ -245,6 +250,7 @@ async def test_upload_non_csv_file( data_files_store=data_files_store, file_reader_writer=file_reader_writer, file_hasher=file_hasher, + data_file_auto_deleter=file_auto_deleter, file_id="data-file-id", created_at=datetime(year=2024, month=6, day=18), ) diff --git a/robot-server/tests/maintenance_runs/router/test_base_router.py b/robot-server/tests/maintenance_runs/router/test_base_router.py index b363cd1e6ac..35fb6da06c1 100644 --- a/robot-server/tests/maintenance_runs/router/test_base_router.py +++ b/robot-server/tests/maintenance_runs/router/test_base_router.py @@ -101,6 +101,7 @@ async def test_create_run( is_ok_to_create_maintenance_run=True, deck_configuration_store=mock_deck_configuration_store, notify_publishers=mock_notify_publishers, + check_estop=True, ) assert result.content.data == expected_response @@ -125,6 +126,8 @@ async def test_create_maintenance_run_with_protocol_run_conflict( run_data_manager=mock_maintenance_run_data_manager, is_ok_to_create_maintenance_run=False, deck_configuration_store=mock_deck_configuration_store, + check_estop=True, + notify_publishers=mock_notify_publishers, ) assert exc_info.value.status_code == 409 assert exc_info.value.content["errors"][0]["id"] == "ProtocolRunIsActive" diff --git a/robot-server/tests/maintenance_runs/router/test_commands_router.py b/robot-server/tests/maintenance_runs/router/test_commands_router.py index 43ad3c6fa30..19415f080f9 100644 --- a/robot-server/tests/maintenance_runs/router/test_commands_router.py +++ b/robot-server/tests/maintenance_runs/router/test_commands_router.py @@ -108,10 +108,12 @@ async def test_create_run_command( ).then_return(command_once_added) result = await create_run_command( + run_id="run-id", request_body=RequestModelWithCommandCreate(data=command_request), waitUntilComplete=False, run_orchestrator_store=mock_maintenance_run_orchestrator_store, timeout=None, + check_estop=True, ) assert result.content.data == command_once_added @@ -148,10 +150,12 @@ async def test_create_run_command_blocking_completion( ).then_return(command_once_completed) result = await create_run_command( + run_id="run-id", request_body=RequestModelWithCommandCreate(data=command_request), waitUntilComplete=True, timeout=999, run_orchestrator_store=mock_maintenance_run_orchestrator_store, + check_estop=True, ) assert result.content.data == command_once_completed diff --git a/robot-server/tests/modules/test_router.py b/robot-server/tests/modules/test_router.py index f2392672109..287041f17cf 100644 --- a/robot-server/tests/modules/test_router.py +++ b/robot-server/tests/modules/test_router.py @@ -67,6 +67,8 @@ async def test_get_modules_empty( result = await get_attached_modules( requested_version=_HTTP_API_VERSION, hardware=hardware_api, + module_identifier=decoy.mock(cls=ModuleIdentifier), + module_data_mapper=decoy.mock(cls=ModuleDataMapper), ) assert result.content.data == [] diff --git a/robot-server/tests/protocols/test_protocols_router.py b/robot-server/tests/protocols/test_protocols_router.py index 89c41341941..a2ad10dbda0 100644 --- a/robot-server/tests/protocols/test_protocols_router.py +++ b/robot-server/tests/protocols/test_protocols_router.py @@ -122,9 +122,25 @@ def data_files_store(decoy: Decoy) -> DataFilesStore: return decoy.mock(cls=DataFilesStore) +@pytest.fixture +def data_files_directory(decoy: Decoy) -> Path: + """Get a mocked out data files directory. + + We could use Path("/dev/null") for this but I worry something will accidentally + try to use it as an actual path and then we'll get confusing errors on Windows. + """ + return decoy.mock(cls=Path) + + @pytest.fixture def protocol_auto_deleter(decoy: Decoy) -> ProtocolAutoDeleter: - """Get a mocked out AutoDeleter.""" + """Get a mocked out ProtocolAutoDeleter.""" + return decoy.mock(cls=ProtocolAutoDeleter) + + +@pytest.fixture +def quick_transfer_protocol_auto_deleter(decoy: Decoy) -> ProtocolAutoDeleter: + """Get a mocked out quick-transfer ProtocolAutoDeleter.""" return decoy.mock(cls=ProtocolAutoDeleter) @@ -135,7 +151,9 @@ async def test_get_protocols_no_protocols( """It should return an empty collection response with no protocols loaded.""" decoy.when(protocol_store.get_all()).then_return([]) - result = await get_protocols(protocol_store=protocol_store) + result = await get_protocols( + protocol_store=protocol_store, analysis_store=decoy.mock(cls=AnalysisStore) + ) assert result.content.data == [] assert result.content.meta == MultiBodyMeta(cursor=0, totalLength=0) @@ -390,6 +408,7 @@ async def test_get_protocol_not_found( await get_protocol_by_id( "protocol-id", protocol_store=protocol_store, + analysis_store=decoy.mock(cls=AnalysisStore), ) assert exc_info.value.status_code == 404 @@ -399,11 +418,14 @@ async def test_create_existing_protocol( decoy: Decoy, protocol_store: ProtocolStore, analysis_store: AnalysisStore, + data_files_store: DataFilesStore, + data_files_directory: Path, protocol_reader: ProtocolReader, file_reader_writer: FileReaderWriter, file_hasher: FileHasher, analyses_manager: AnalysesManager, protocol_auto_deleter: ProtocolAutoDeleter, + quick_transfer_protocol_auto_deleter: ProtocolAutoDeleter, ) -> None: """It should return the existing protocol info from database.""" protocol_directory = Path("/dev/null") @@ -480,11 +502,14 @@ async def test_create_existing_protocol( protocol_directory=protocol_directory, protocol_store=protocol_store, analysis_store=analysis_store, + data_files_store=data_files_store, + data_files_directory=data_files_directory, file_reader_writer=file_reader_writer, protocol_reader=protocol_reader, file_hasher=file_hasher, analyses_manager=analyses_manager, protocol_auto_deleter=protocol_auto_deleter, + quick_transfer_protocol_auto_deleter=quick_transfer_protocol_auto_deleter, robot_type="OT-2 Standard", protocol_id="protocol-id", analysis_id="analysis-id", @@ -510,11 +535,14 @@ async def test_create_protocol( decoy: Decoy, protocol_store: ProtocolStore, analysis_store: AnalysisStore, + data_files_store: DataFilesStore, + data_files_directory: Path, protocol_reader: ProtocolReader, file_reader_writer: FileReaderWriter, file_hasher: FileHasher, analyses_manager: AnalysesManager, protocol_auto_deleter: ProtocolAutoDeleter, + quick_transfer_protocol_auto_deleter: ProtocolAutoDeleter, ) -> None: """It should store an uploaded protocol file.""" protocol_directory = Path("/dev/null") @@ -596,12 +624,14 @@ async def test_create_protocol( protocol_directory=protocol_directory, protocol_store=protocol_store, analysis_store=analysis_store, + data_files_store=data_files_store, + data_files_directory=data_files_directory, file_reader_writer=file_reader_writer, protocol_reader=protocol_reader, file_hasher=file_hasher, analyses_manager=analyses_manager, protocol_auto_deleter=protocol_auto_deleter, - quick_transfer_protocol_auto_deleter=protocol_auto_deleter, + quick_transfer_protocol_auto_deleter=quick_transfer_protocol_auto_deleter, robot_type="OT-2 Standard", protocol_id="protocol-id", analysis_id="analysis-id", @@ -638,6 +668,7 @@ async def test_create_new_protocol_with_run_time_params( file_hasher: FileHasher, analyses_manager: AnalysesManager, protocol_auto_deleter: ProtocolAutoDeleter, + quick_transfer_protocol_auto_deleter: ProtocolAutoDeleter, ) -> None: """It should handle the run time parameter overrides correctly.""" protocol_directory = Path("/dev/null") @@ -745,6 +776,7 @@ async def test_create_new_protocol_with_run_time_params( file_hasher=file_hasher, analyses_manager=analyses_manager, protocol_auto_deleter=protocol_auto_deleter, + quick_transfer_protocol_auto_deleter=quick_transfer_protocol_auto_deleter, robot_type="OT-2 Standard", protocol_id="protocol-id", analysis_id="analysis-id", @@ -762,11 +794,14 @@ async def test_create_existing_protocol_with_no_previous_analysis( decoy: Decoy, protocol_store: ProtocolStore, analysis_store: AnalysisStore, + data_files_store: DataFilesStore, + data_files_directory: Path, protocol_reader: ProtocolReader, file_reader_writer: FileReaderWriter, file_hasher: FileHasher, analyses_manager: AnalysesManager, protocol_auto_deleter: ProtocolAutoDeleter, + quick_transfer_protocol_auto_deleter: ProtocolAutoDeleter, ) -> None: """It should re-trigger analysis of the existing protocol resource.""" protocol_directory = Path("/dev/null") @@ -854,11 +889,14 @@ async def test_create_existing_protocol_with_no_previous_analysis( protocol_directory=protocol_directory, protocol_store=protocol_store, analysis_store=analysis_store, + data_files_store=data_files_store, + data_files_directory=data_files_directory, file_reader_writer=file_reader_writer, protocol_reader=protocol_reader, file_hasher=file_hasher, analyses_manager=analyses_manager, protocol_auto_deleter=protocol_auto_deleter, + quick_transfer_protocol_auto_deleter=quick_transfer_protocol_auto_deleter, robot_type="OT-2 Standard", protocol_id="protocol-id", analysis_id="analysis-id", @@ -890,6 +928,7 @@ async def test_create_existing_protocol_with_different_run_time_params( file_hasher: FileHasher, analyses_manager: AnalysesManager, protocol_auto_deleter: ProtocolAutoDeleter, + quick_transfer_protocol_auto_deleter: ProtocolAutoDeleter, ) -> None: """It should re-trigger analysis of the existing protocol resource.""" protocol_directory = Path("/dev/null") @@ -1007,6 +1046,7 @@ async def test_create_existing_protocol_with_different_run_time_params( file_hasher=file_hasher, analyses_manager=analyses_manager, protocol_auto_deleter=protocol_auto_deleter, + quick_transfer_protocol_auto_deleter=quick_transfer_protocol_auto_deleter, robot_type="OT-2 Standard", protocol_id="protocol-id", analysis_id="analysis-id", @@ -1032,11 +1072,14 @@ async def test_create_existing_protocol_with_same_run_time_params( decoy: Decoy, protocol_store: ProtocolStore, analysis_store: AnalysisStore, + data_files_store: DataFilesStore, + data_files_directory: Path, protocol_reader: ProtocolReader, file_reader_writer: FileReaderWriter, file_hasher: FileHasher, analyses_manager: AnalysesManager, protocol_auto_deleter: ProtocolAutoDeleter, + quick_transfer_protocol_auto_deleter: ProtocolAutoDeleter, ) -> None: """It should re-trigger analysis of the existing protocol resource.""" protocol_directory = Path("/dev/null") @@ -1126,11 +1169,14 @@ async def test_create_existing_protocol_with_same_run_time_params( protocol_directory=protocol_directory, protocol_store=protocol_store, analysis_store=analysis_store, + data_files_store=data_files_store, + data_files_directory=data_files_directory, file_reader_writer=file_reader_writer, protocol_reader=protocol_reader, file_hasher=file_hasher, analyses_manager=analyses_manager, protocol_auto_deleter=protocol_auto_deleter, + quick_transfer_protocol_auto_deleter=quick_transfer_protocol_auto_deleter, robot_type="OT-2 Standard", protocol_id="protocol-id", analysis_id="analysis-id", @@ -1156,11 +1202,14 @@ async def test_create_existing_protocol_with_pending_analysis_raises( decoy: Decoy, protocol_store: ProtocolStore, analysis_store: AnalysisStore, + data_files_store: DataFilesStore, + data_files_directory: Path, protocol_reader: ProtocolReader, file_reader_writer: FileReaderWriter, file_hasher: FileHasher, analyses_manager: AnalysesManager, protocol_auto_deleter: ProtocolAutoDeleter, + quick_transfer_protocol_auto_deleter: ProtocolAutoDeleter, ) -> None: """It should raise an error if protocol has existing pending analysis.""" protocol_directory = Path("/dev/null") @@ -1252,11 +1301,14 @@ async def test_create_existing_protocol_with_pending_analysis_raises( protocol_directory=protocol_directory, protocol_store=protocol_store, analysis_store=analysis_store, + data_files_store=data_files_store, + data_files_directory=data_files_directory, file_reader_writer=file_reader_writer, protocol_reader=protocol_reader, file_hasher=file_hasher, analyses_manager=analyses_manager, protocol_auto_deleter=protocol_auto_deleter, + quick_transfer_protocol_auto_deleter=quick_transfer_protocol_auto_deleter, robot_type="OT-2 Standard", protocol_id="protocol-id", analysis_id="analysis-id", @@ -1299,6 +1351,15 @@ async def test_create_protocol_not_readable( file_hasher=file_hasher, protocol_id="protocol-id", maximum_quick_transfer_protocols=20, + analysis_store=decoy.mock(cls=AnalysisStore), + analyses_manager=decoy.mock(cls=AnalysesManager), + protocol_auto_deleter=decoy.mock(cls=ProtocolAutoDeleter), + quick_transfer_protocol_auto_deleter=decoy.mock(cls=ProtocolAutoDeleter), + data_files_store=decoy.mock(cls=DataFilesStore), + data_files_directory=Path("/dev/null"), + robot_type="OT-2 Standard", + analysis_id="analysis-id", + created_at=datetime.now(), ) assert exc_info.value.status_code == 422 @@ -1352,6 +1413,15 @@ async def test_create_protocol_different_robot_type( file_hasher=file_hasher, protocol_id="protocol-id", maximum_quick_transfer_protocols=20, + analysis_store=decoy.mock(cls=AnalysisStore), + analyses_manager=decoy.mock(cls=AnalysesManager), + protocol_auto_deleter=decoy.mock(cls=ProtocolAutoDeleter), + quick_transfer_protocol_auto_deleter=decoy.mock(cls=ProtocolAutoDeleter), + data_files_store=decoy.mock(cls=DataFilesStore), + data_files_directory=Path("/dev/null"), + robot_type="OT-3 Standard", + analysis_id="analysis-id", + created_at=datetime.now(), ) assert exc_info.value.status_code == 422 @@ -1592,6 +1662,8 @@ async def test_create_protocol_analyses_with_same_rtp_values( decoy: Decoy, protocol_store: ProtocolStore, analysis_store: AnalysisStore, + data_files_store: DataFilesStore, + data_files_directory: Path, analyses_manager: AnalysesManager, ) -> None: """It should not start a new analysis for the new rtp values.""" @@ -1669,6 +1741,8 @@ async def test_create_protocol_analyses_with_same_rtp_values( ), protocol_store=protocol_store, analysis_store=analysis_store, + data_files_store=data_files_store, + data_files_directory=data_files_directory, analyses_manager=analyses_manager, analysis_id="analysis-id-2", ) @@ -1809,6 +1883,8 @@ async def test_update_protocol_analyses_with_forced_reanalysis( decoy: Decoy, protocol_store: ProtocolStore, analysis_store: AnalysisStore, + data_files_store: DataFilesStore, + data_files_directory: Path, analyses_manager: AnalysesManager, ) -> None: """It should start a new analysis for the protocol, regardless of rtp values.""" @@ -1868,6 +1944,8 @@ async def test_update_protocol_analyses_with_forced_reanalysis( request_body=RequestModel(data=AnalysisRequest(forceReAnalyze=True)), protocol_store=protocol_store, analysis_store=analysis_store, + data_files_store=data_files_store, + data_files_directory=data_files_directory, analyses_manager=analyses_manager, analysis_id="analysis-id-2", ) @@ -1882,11 +1960,14 @@ async def test_create_protocol_kind_quick_transfer( decoy: Decoy, protocol_store: ProtocolStore, analysis_store: AnalysisStore, + data_files_store: DataFilesStore, + data_files_directory: Path, protocol_reader: ProtocolReader, file_reader_writer: FileReaderWriter, file_hasher: FileHasher, analyses_manager: AnalysesManager, protocol_auto_deleter: ProtocolAutoDeleter, + quick_transfer_protocol_auto_deleter: ProtocolAutoDeleter, ) -> None: """It should store an uploaded protocol file marked as quick-transfer.""" protocol_directory = Path("/dev/null") @@ -1977,11 +2058,14 @@ async def test_create_protocol_kind_quick_transfer( protocol_directory=protocol_directory, protocol_store=protocol_store, analysis_store=analysis_store, + data_files_store=data_files_store, + data_files_directory=data_files_directory, file_reader_writer=file_reader_writer, protocol_reader=protocol_reader, file_hasher=file_hasher, analyses_manager=analyses_manager, - quick_transfer_protocol_auto_deleter=protocol_auto_deleter, + protocol_auto_deleter=protocol_auto_deleter, + quick_transfer_protocol_auto_deleter=quick_transfer_protocol_auto_deleter, robot_type="OT-3 Standard", protocol_kind=ProtocolKind.QUICK_TRANSFER, protocol_id="protocol-id", @@ -1991,7 +2075,7 @@ async def test_create_protocol_kind_quick_transfer( ) decoy.verify( - protocol_auto_deleter.make_room_for_new_protocol(), + quick_transfer_protocol_auto_deleter.make_room_for_new_protocol(), protocol_store.insert(protocol_resource), ) @@ -2013,10 +2097,13 @@ async def test_create_protocol_maximum_quick_transfer_protocols_exceeded( decoy: Decoy, protocol_store: ProtocolStore, analysis_store: AnalysisStore, + data_files_store: DataFilesStore, + data_files_directory: Path, protocol_reader: ProtocolReader, file_reader_writer: FileReaderWriter, file_hasher: FileHasher, protocol_auto_deleter: ProtocolAutoDeleter, + quick_transfer_protocol_auto_deleter: ProtocolAutoDeleter, ) -> None: """It should throw a 409 error if the quick transfer protocols maximum is exceeded.""" protocol_directory = Path("/dev/null") @@ -2056,10 +2143,14 @@ async def test_create_protocol_maximum_quick_transfer_protocols_exceeded( protocol_directory=protocol_directory, protocol_store=protocol_store, analysis_store=analysis_store, + data_files_store=data_files_store, + data_files_directory=data_files_directory, + analyses_manager=decoy.mock(cls=AnalysesManager), file_reader_writer=file_reader_writer, protocol_reader=protocol_reader, file_hasher=file_hasher, protocol_auto_deleter=protocol_auto_deleter, + quick_transfer_protocol_auto_deleter=quick_transfer_protocol_auto_deleter, robot_type="OT-3 Standard", protocol_id="protocol-id", analysis_id="analysis-id", diff --git a/robot-server/tests/runs/router/test_actions_router.py b/robot-server/tests/runs/router/test_actions_router.py index c638c243eaf..336a645b58b 100644 --- a/robot-server/tests/runs/router/test_actions_router.py +++ b/robot-server/tests/runs/router/test_actions_router.py @@ -64,6 +64,7 @@ async def test_create_run_action( created_at=created_at, maintenance_run_orchestrator_store=mock_maintenance_run_orchestrator_store, deck_configuration_store=mock_deck_configuration_store, + check_estop=True, ) assert result.content.data == expected_result @@ -110,6 +111,7 @@ async def test_play_action_clears_maintenance_run( created_at=created_at, maintenance_run_orchestrator_store=mock_maintenance_run_orchestrator_store, deck_configuration_store=mock_deck_configuration_store, + check_estop=True, ) decoy.verify(await mock_maintenance_run_orchestrator_store.clear(), times=1) @@ -162,6 +164,7 @@ async def test_create_play_action_not_allowed( created_at=created_at, maintenance_run_orchestrator_store=mock_maintenance_run_orchestrator_store, deck_configuration_store=mock_deck_configuration_store, + check_estop=True, ) assert exc_info.value.status_code == expected_status_code diff --git a/robot-server/tests/runs/router/test_base_router.py b/robot-server/tests/runs/router/test_base_router.py index df778420a07..5f23d459e2a 100644 --- a/robot-server/tests/runs/router/test_base_router.py +++ b/robot-server/tests/runs/router/test_base_router.py @@ -64,6 +64,16 @@ def mock_data_files_store(decoy: Decoy) -> DataFilesStore: return decoy.mock(cls=DataFilesStore) +@pytest.fixture +def mock_data_files_directory(decoy: Decoy) -> Path: + """Get a mocked out data files directory. + + We could use Path("/dev/null") for this but I worry something will accidentally + try to use it as an actual path and then we'll get confusing errors on Windows. + """ + return decoy.mock(cls=Path) + + @pytest.fixture def labware_offset_create() -> LabwareOffsetCreate: """Get a labware offset create request value object.""" @@ -80,6 +90,7 @@ async def test_create_run( mock_run_auto_deleter: RunAutoDeleter, labware_offset_create: pe_types.LabwareOffsetCreate, mock_deck_configuration_store: DeckConfigurationStore, + mock_protocol_store: ProtocolStore, mock_data_files_store: DataFilesStore, ) -> None: """It should be able to create a basic run.""" @@ -127,8 +138,11 @@ async def test_create_run( run_id=run_id, created_at=run_created_at, run_auto_deleter=mock_run_auto_deleter, + quick_transfer_run_auto_deleter=mock_run_auto_deleter, deck_configuration_store=mock_deck_configuration_store, notify_publishers=mock_notify_publishers, + protocol_store=mock_protocol_store, + check_estop=True, ) assert result.content.data == expected_response @@ -224,8 +238,10 @@ async def test_create_protocol_run( run_id=run_id, created_at=run_created_at, run_auto_deleter=mock_run_auto_deleter, + quick_transfer_run_auto_deleter=mock_run_auto_deleter, deck_configuration_store=mock_deck_configuration_store, notify_publishers=mock_notify_publishers, + check_estop=True, ) assert result.content.data == expected_response @@ -238,6 +254,10 @@ async def test_create_protocol_run_bad_protocol_id( decoy: Decoy, mock_protocol_store: ProtocolStore, mock_deck_configuration_store: DeckConfigurationStore, + mock_run_data_manager: RunDataManager, + mock_run_auto_deleter: RunAutoDeleter, + mock_data_files_store: DataFilesStore, + mock_data_files_directory: Path, ) -> None: """It should 404 if a protocol for a run does not exist.""" error = ProtocolNotFoundError("protocol-id") @@ -251,6 +271,15 @@ async def test_create_protocol_run_bad_protocol_id( request_body=RequestModel(data=RunCreate(protocolId="protocol-id")), protocol_store=mock_protocol_store, deck_configuration_store=mock_deck_configuration_store, + run_data_manager=mock_run_data_manager, + data_files_store=mock_data_files_store, + data_files_directory=mock_data_files_directory, + run_id="run-id", + created_at=datetime.now(), + run_auto_deleter=mock_run_auto_deleter, + quick_transfer_run_auto_deleter=mock_run_auto_deleter, + check_estop=True, + notify_publishers=mock_notify_publishers, ) assert exc_info.value.status_code == 404 @@ -262,6 +291,9 @@ async def test_create_run_conflict( mock_run_data_manager: RunDataManager, mock_run_auto_deleter: RunAutoDeleter, mock_deck_configuration_store: DeckConfigurationStore, + mock_protocol_store: ProtocolStore, + mock_data_files_store: DataFilesStore, + mock_data_files_directory: Path, ) -> None: """It should respond with a conflict error if multiple engines are created.""" created_at = datetime(year=2021, month=1, day=1) @@ -287,10 +319,15 @@ async def test_create_run_conflict( run_id="run-id", created_at=created_at, request_body=None, + protocol_store=mock_protocol_store, run_data_manager=mock_run_data_manager, run_auto_deleter=mock_run_auto_deleter, + quick_transfer_run_auto_deleter=mock_run_auto_deleter, deck_configuration_store=mock_deck_configuration_store, + data_files_store=mock_data_files_store, + data_files_directory=mock_data_files_directory, notify_publishers=mock_notify_publishers, + check_estop=True, ) assert exc_info.value.status_code == 409 diff --git a/robot-server/tests/runs/router/test_commands_router.py b/robot-server/tests/runs/router/test_commands_router.py index 4b49ffe5d3b..147a5535556 100644 --- a/robot-server/tests/runs/router/test_commands_router.py +++ b/robot-server/tests/runs/router/test_commands_router.py @@ -128,11 +128,13 @@ def _stub_queued_command_state(*_a: object, **_k: object) -> pe_commands.Command ).then_do(_stub_queued_command_state) result = await create_run_command( + run_id="run-id", request_body=RequestModelWithCommandCreate(data=command_request), waitUntilComplete=False, run_orchestrator_store=mock_run_orchestrator_store, failedCommandId=None, timeout=12, + check_estop=True, ) assert result.content.data == command_once_added @@ -158,11 +160,13 @@ async def test_create_command_with_failed_command_raises( with pytest.raises(ApiError): await create_run_command( - RequestModelWithCommandCreate(data=command_create), + run_id="run-id", + request_body=RequestModelWithCommandCreate(data=command_create), waitUntilComplete=False, timeout=42, run_orchestrator_store=mock_run_orchestrator_store, failedCommandId="123", + check_estop=True, ) @@ -199,14 +203,15 @@ async def test_create_run_command_blocking_completion( ) result = await create_run_command( + run_id="run-id", request_body=RequestModelWithCommandCreate(data=command_request), waitUntilComplete=True, timeout=999, run_orchestrator_store=mock_run_orchestrator_store, failedCommandId=None, + check_estop=True, ) - print(result.content.data) assert result.content.data == command_once_completed assert result.status_code == 201 @@ -229,10 +234,12 @@ async def test_add_conflicting_setup_command( with pytest.raises(ApiError) as exc_info: await create_run_command( + run_id="run-id", request_body=RequestModelWithCommandCreate(data=command_request), waitUntilComplete=False, run_orchestrator_store=mock_run_orchestrator_store, failedCommandId=None, + check_estop=True, ) assert exc_info.value.status_code == 409 @@ -260,10 +267,12 @@ async def test_add_command_to_stopped_engine( with pytest.raises(ApiError) as exc_info: await create_run_command( + run_id="run-id", request_body=RequestModelWithCommandCreate(data=command_request), waitUntilComplete=False, run_orchestrator_store=mock_run_orchestrator_store, failedCommandId=None, + check_estop=True, ) assert exc_info.value.status_code == 409 diff --git a/robot-server/tests/test_versioning.py b/robot-server/tests/test_versioning.py index 59f00d3476d..a141d1572e3 100644 --- a/robot-server/tests/test_versioning.py +++ b/robot-server/tests/test_versioning.py @@ -6,7 +6,7 @@ import pytest from fastapi import FastAPI, APIRouter, Request, Depends from fastapi.testclient import TestClient -from typing import Dict +from typing import Annotated, Dict from robot_server.errors.exception_handlers import exception_handlers from robot_server.versioning import API_VERSION, check_version_header @@ -30,7 +30,7 @@ def test_check_version_header(app: FastAPI, client: TestClient) -> None: @app.get("/foobar") def _get_foobar( request: Request, - _: None = Depends(check_version_header), + _: Annotated[None, Depends(check_version_header)], ) -> Dict[str, str]: assert request.state.api_version == 2 return {"hello": "world"} @@ -64,7 +64,7 @@ def test_check_version_header_fallback(app: FastAPI, client: TestClient) -> None @app.get("/foobar") def _get_foobar( request: Request, - _: None = Depends(check_version_header), + _: Annotated[None, Depends(check_version_header)], ) -> Dict[str, str]: assert request.state.api_version == API_VERSION return {"hello": "world"}