diff --git a/mula/scheduler/server/handlers/schedules.py b/mula/scheduler/server/handlers/schedules.py index 391ea2e21a0..b7605f9975f 100644 --- a/mula/scheduler/server/handlers/schedules.py +++ b/mula/scheduler/server/handlers/schedules.py @@ -5,7 +5,7 @@ import fastapi import pydantic import structlog -from fastapi import status +from fastapi import Body, status from scheduler import context, models, schedulers, storage from scheduler.server import serializers, utils @@ -65,6 +65,15 @@ def __init__( description="Delete a schedule", ) + self.api.add_api_route( + path="/schedules/search", + endpoint=self.search, + methods=["POST"], + response_model=utils.PaginatedResponse, + status_code=200, + description="Search schedules", + ) + def list( self, request: fastapi.Request, @@ -255,6 +264,39 @@ def patch(self, schedule_id: uuid.UUID, schedule: serializers.SchedulePatch) -> return updated_schedule + def search( + self, + request: fastapi.Request, + offset: int = 0, + limit: int = 10, + filters: storage.filters.FilterRequest | None = Body(...), + ) -> utils.PaginatedResponse: + if filters is None: + raise fastapi.HTTPException( + status_code=fastapi.status.HTTP_400_BAD_REQUEST, detail="missing search filters" + ) + + try: + results, count = self.ctx.datastores.schedule_store.get_schedules( + offset=offset, limit=limit, filters=filters + ) + except storage.filters.errors.FilterError as exc: + raise fastapi.HTTPException( + status_code=fastapi.status.HTTP_400_BAD_REQUEST, detail=f"invalid filter(s) [exception: {exc}]" + ) from exc + except storage.errors.StorageError as exc: + raise fastapi.HTTPException( + status_code=fastapi.status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"error occurred while accessing the database [exception: {exc}]", + ) from exc + except Exception as exc: + self.logger.exception(exc) + raise fastapi.HTTPException( + status_code=fastapi.status.HTTP_500_INTERNAL_SERVER_ERROR, detail="failed to search schedules" + ) from exc + + return utils.paginate(request, results, count, offset, limit) + def delete(self, schedule_id: uuid.UUID) -> None: try: self.ctx.datastores.schedule_store.delete_schedule(schedule_id) @@ -266,8 +308,5 @@ def delete(self, schedule_id: uuid.UUID) -> None: except Exception as exc: self.logger.exception(exc) raise fastapi.HTTPException( - status_code=fastapi.status.HTTP_500_INTERNAL_SERVER_ERROR, - detail=f"failed to delete schedule [exception: {exc}]", + status_code=fastapi.status.HTTP_500_INTERNAL_SERVER_ERROR, detail="failed to search schedules" ) from exc - - return None diff --git a/mula/tests/integration/test_api.py b/mula/tests/integration/test_api.py index e48dd544362..16d028b3536 100644 --- a/mula/tests/integration/test_api.py +++ b/mula/tests/integration/test_api.py @@ -729,22 +729,22 @@ class APIScheduleEndpointTestCase(APITemplateTestCase): def setUp(self): super().setUp() - first_item = functions.create_item(self.scheduler.scheduler_id, 1) + self.first_item = functions.create_item(self.scheduler.scheduler_id, 1) self.first_schedule = self.mock_ctx.datastores.schedule_store.create_schedule( models.Schedule( scheduler_id=self.scheduler.scheduler_id, - hash=first_item.hash, - data=first_item.data, + hash=self.first_item.hash, + data=self.first_item.data, deadline_at=datetime.now(timezone.utc) + timedelta(days=1), ) ) - second_item = functions.create_item(self.scheduler.scheduler_id, 1) + self.second_item = functions.create_item(self.scheduler.scheduler_id, 1) self.second_schedule = self.mock_ctx.datastores.schedule_store.create_schedule( models.Schedule( scheduler_id=self.scheduler.scheduler_id, - hash=second_item.hash, - data=second_item.data, + hash=self.second_item.hash, + data=self.second_item.data, deadline_at=datetime.now(timezone.utc) + timedelta(days=2), ) ) @@ -954,6 +954,28 @@ def test_patch_schedule_validate_malformed_schedule(self): self.assertEqual(400, response.status_code) self.assertIn("validation error", response.json().get("detail")) + def test_search_schedule(self): + response = self.client.post( + "/schedules/search", + json={ + "filters": [ + {"column": "data", "field": "name", "operator": "eq", "value": self.first_item.data.get("name")} + ] + }, + ) + self.assertEqual(200, response.status_code) + self.assertEqual(1, response.json()["count"]) + self.assertEqual(1, len(response.json()["results"])) + + def test_search_schedule_with_pagination(self): + response = self.client.post( + "/schedules/search?limit=1", + json={"filters": [{"column": "scheduler_id", "operator": "eq", "value": self.scheduler.scheduler_id}]}, + ) + self.assertEqual(200, response.status_code) + self.assertEqual(2, response.json()["count"]) + self.assertEqual(1, len(response.json()["results"])) + def test_delete_schedule(self): response = self.client.delete(f"/schedules/{self.first_schedule.id}") self.assertEqual(204, response.status_code)