From 75566bb59460dbba56d00e8e70ba23ea6002c180 Mon Sep 17 00:00:00 2001 From: DonHaul Date: Mon, 12 Aug 2024 11:18:08 +0200 Subject: [PATCH] author submissions: add author accept curate action * ref: cern-sis/issues-inspire/issues/522 --- backoffice/backoffice/workflows/api/views.py | 29 ++------------------ 1 file changed, 2 insertions(+), 27 deletions(-) diff --git a/backoffice/backoffice/workflows/api/views.py b/backoffice/backoffice/workflows/api/views.py index 0d8a16d8..ca78ae8e 100644 --- a/backoffice/backoffice/workflows/api/views.py +++ b/backoffice/backoffice/workflows/api/views.py @@ -104,8 +104,8 @@ class AuthorWorkflowViewSet(viewsets.ViewSet): serializer_class = WorkflowAuthorSerializer @extend_schema( - summary="Create/Update an Author", - description="Creates/Updates an author, launches the required airflow dags.", + summary="Create a New Author", + description="Creates a new author, launches the required airflow dags.", request=serializer_class, ) def create(self, request): @@ -127,31 +127,6 @@ def create(self, request): workflow.data, ) - @extend_schema( - summary="Updates an Author", - description="Updates an author, launches the required airflow dag.", - request=serializer_class, - ) - def update(self, request, pk=None): - logger.info("Creating workflow with data: %s", request.data) - serializer = self.serializer_class(data=request.data) - if serializer.is_valid(raise_exception=True): - workflow = Workflow.objects.create( - data=serializer.validated_data["data"], - workflow_type=WorkflowType.AUTHOR_UPDATE, - ) - - logger.info( - "Trigger Airflow DAG: %s for %s", - WORKFLOW_DAGS[workflow.workflow_type].initialize, - workflow.id, - ) - return airflow_utils.trigger_airflow_dag( - WORKFLOW_DAGS[workflow.workflow_type].initialize, - str(workflow.id), - workflow.data, - ) - @extend_schema( summary="Partially Updates Author", description="Updates specific fields of the author.",