From 771648e5e7ab586bb8849396c92062ff833a2497 Mon Sep 17 00:00:00 2001 From: Adrien Coulier Date: Thu, 2 Jun 2022 14:25:25 +0200 Subject: [PATCH 1/2] Get auth_token through API The authentication token is then written in a temporary file that is deleted as soon as delivery is completed. --- delivery/handlers/dds_handlers.py | 4 +- delivery/handlers/delivery_handlers.py | 8 +- delivery/services/dds_service.py | 112 ++++++++++-------- .../integration_tests/test_integration_dds.py | 10 +- tests/unit_tests/services/test_dds.py | 36 ++++-- 5 files changed, 95 insertions(+), 75 deletions(-) diff --git a/delivery/handlers/dds_handlers.py b/delivery/handlers/dds_handlers.py index 4197bd7..78bab7e 100644 --- a/delivery/handlers/dds_handlers.py +++ b/delivery/handlers/dds_handlers.py @@ -37,13 +37,13 @@ async def post(self, project_name): "researchers": ["robin@doe.com", "kim@doe.com"], "owners": ["alex@doe.com"], "non-sensitive": False, - "token_path": "/foo/bar" + "auth_token": "1234" } response = requests.request("POST", url, json=payload) """ - required_members = ["token_path"] + required_members = ["auth_token"] project_metadata = self.body_as_object(required_members=required_members) dds_project_id = await self.dds_service.create_dds_project(project_name, project_metadata) diff --git a/delivery/handlers/delivery_handlers.py b/delivery/handlers/delivery_handlers.py index 7e254fc..db81721 100644 --- a/delivery/handlers/delivery_handlers.py +++ b/delivery/handlers/delivery_handlers.py @@ -24,16 +24,16 @@ def initialize(self, **kwargs): def post(self, staging_id): required_members = ["delivery_project_id"] if self.dds: - required_members += ["token_path"] + required_members += ["auth_token"] request_data = self.body_as_object(required_members=required_members) delivery_project_id = request_data["delivery_project_id"] - token_path = request_data.get("token_path") + auth_token = request_data.get("auth_token") md5sum_file = request_data.get("md5sums_file") extra_args = {} - if token_path: - extra_args['token_path'] = token_path + if auth_token: + extra_args['auth_token'] = auth_token # This should only be used for testing purposes /JD 20170202 skip_mover_request = request_data.get("skip_mover") diff --git a/delivery/services/dds_service.py b/delivery/services/dds_service.py index 67ee913..4e7e8be 100644 --- a/delivery/services/dds_service.py +++ b/delivery/services/dds_service.py @@ -4,6 +4,7 @@ import re import json from tornado import gen +import tempfile from delivery.models.db_models import StagingStatus, DeliveryStatus from delivery.exceptions import ProjectNotFoundException, TooManyProjectsFound, InvalidStatusException, CannotParseDDSOutputException @@ -50,36 +51,41 @@ async def create_dds_project(self, project_name, project_metadata): sensitive or not. :return: project id in dds """ - cmd = [ - 'dds', - '--token-path', project_metadata["token_path"], - '--log-file', self.dds_conf["log_path"], - ] - - cmd += [ - 'project', 'create', - '--title', project_name, - '--description', f"\"{project_metadata['description']}\"", - '-pi', project_metadata['pi'] - ] - - cmd += [ - args - for owner in project_metadata.get('owners', []) - for args in ['--owner', owner] - ] - - cmd += [ - args - for researcher in project_metadata.get('researchers', []) - for args in ['--researcher', researcher] - ] - - if project_metadata.get('non-sensitive', False): - cmd += ['--non-sensitive'] - - log.debug(f"Running dds with command: {' '.join(cmd)}") - execution_result = await self.external_program_service.run_and_wait(cmd) + + with tempfile.NamedTemporaryFile(mode='w', delete=True) as token_file: + token_file.write(project_metadata["auth_token"]) + token_file.flush() + + cmd = [ + 'dds', + '--token-path', token_file.name, + '--log-file', self.dds_conf["log_path"], + ] + + cmd += [ + 'project', 'create', + '--title', project_name, + '--description', f"\"{project_metadata['description']}\"", + '-pi', project_metadata['pi'] + ] + + cmd += [ + args + for owner in project_metadata.get('owners', []) + for args in ['--owner', owner] + ] + + cmd += [ + args + for researcher in project_metadata.get('researchers', []) + for args in ['--researcher', researcher] + ] + + if project_metadata.get('non-sensitive', False): + cmd += ['--non-sensitive'] + + log.debug(f"Running dds with command: {' '.join(cmd)}") + execution_result = await self.external_program_service.run_and_wait(cmd) if execution_result.status_code == 0: dds_project_id = DDSService._parse_dds_project_id(execution_result.stdout) @@ -102,7 +108,7 @@ def _run_dds_put( staging_dir, external_program_service, session_factory, - token_path, + auth_token, dds_conf): session = session_factory() @@ -111,28 +117,32 @@ def _run_dds_put( # thread, therefore it is re-materialized in here... delivery_order = delivery_order_repo.get_delivery_order_by_id(delivery_order_id, session) try: - cmd = [ - 'dds', - '--token-path', token_path, - '--log-file', dds_conf["log_path"], - ] + with tempfile.NamedTemporaryFile(mode='w', delete=True) as token_file: + token_file.write(auth_token) + token_file.flush() - cmd += [ - 'data', 'put', - '--mount-dir', staging_dir, - '--source', delivery_order.delivery_source, - '--project', delivery_order.delivery_project, - '--silent', - ] + cmd = [ + 'dds', + '--token-path', token_file.name, + '--log-file', dds_conf["log_path"], + ] - log.debug("Running dds with cmd: {}".format(" ".join(cmd))) + cmd += [ + 'data', 'put', + '--mount-dir', staging_dir, + '--source', delivery_order.delivery_source, + '--project', delivery_order.delivery_project, + '--silent', + ] - execution = external_program_service.run(cmd) - delivery_order.delivery_status = DeliveryStatus.delivery_in_progress - delivery_order.mover_pid = execution.pid - session.commit() + log.debug("Running dds with cmd: {}".format(" ".join(cmd))) + + execution = external_program_service.run(cmd) + delivery_order.delivery_status = DeliveryStatus.delivery_in_progress + delivery_order.mover_pid = execution.pid + session.commit() - execution_result = yield external_program_service.wait_for_execution(execution) + execution_result = yield external_program_service.wait_for_execution(execution) if execution_result.status_code == 0: delivery_order.delivery_status = DeliveryStatus.delivery_successful @@ -152,7 +162,7 @@ def _run_dds_put( session.commit() @gen.coroutine - def deliver_by_staging_id(self, staging_id, delivery_project, md5sum_file, token_path, skip_mover=False): + def deliver_by_staging_id(self, staging_id, delivery_project, md5sum_file, auth_token, skip_mover=False): stage_order = self.staging_service.get_stage_order_by_id(staging_id) if not stage_order or not stage_order.status == StagingStatus.staging_successful: @@ -172,7 +182,7 @@ def deliver_by_staging_id(self, staging_id, delivery_project, md5sum_file, token 'staging_dir': self.staging_dir, 'external_program_service': self.mover_external_program_service, 'session_factory': self.session_factory, - 'token_path': token_path, + 'auth_token': auth_token, 'dds_conf': self.dds_conf, } diff --git a/tests/integration_tests/test_integration_dds.py b/tests/integration_tests/test_integration_dds.py index ee3f46a..f33704a 100644 --- a/tests/integration_tests/test_integration_dds.py +++ b/tests/integration_tests/test_integration_dds.py @@ -60,7 +60,7 @@ def test_can_stage_and_delivery_runfolder(self): delivery_body = { 'delivery_project_id': 'fakedeliveryid2016', 'dds': True, - 'token_path': 'token_path', + 'auth_token': '1234', 'skip_mover': True, } delivery_resp = yield self.http_client.fetch(self.get_url(delivery_url), method='POST', body=json.dumps(delivery_body)) @@ -102,7 +102,7 @@ def test_can_stage_and_delivery_project_dir(self): 'delivery_project_id': 'fakedeliveryid2016', 'skip_mover': True, 'dds': True, - 'token_path': 'token_path', + 'auth_token': '1234', } delivery_resp = yield self.http_client.fetch(self.get_url(delivery_url), method='POST', body=json.dumps(delivery_body)) delivery_resp_as_json = json.loads(delivery_resp.body) @@ -214,7 +214,7 @@ def test_can_create_project(self): "researchers": ["robin@doe.com", "kim@doe.com"], "owners": ["alex@doe.com"], "non-sensitive": False, - "token_path": '/foo/bar/auth', + "auth_token": '1234', } response = yield self.http_client.fetch( @@ -234,7 +234,7 @@ def test_can_create_two_projects(self): "researchers": ["robin@doe.com", "kim@doe.com"], "owners": ["alex@doe.com"], "non-sensitive": False, - "token_path": '/foo/bar/auth', + "auth_token": '1234', } response = yield self.http_client.fetch( @@ -294,7 +294,7 @@ def test_can_deliver_and_respond(self): delivery_body = { 'delivery_project_id': 'fakedeliveryid2016', 'dds': True, - 'token_path': 'token_path', + 'auth_token': '1234', 'skip_mover': False, } delivery_response = self.http_client.fetch(self.get_url(delivery_url), method='POST', body=json.dumps(delivery_body)) diff --git a/tests/unit_tests/services/test_dds.py b/tests/unit_tests/services/test_dds.py index 0b725ed..f7de135 100644 --- a/tests/unit_tests/services/test_dds.py +++ b/tests/unit_tests/services/test_dds.py @@ -88,26 +88,31 @@ def wait_as_coroutine(x): def test_deliver_by_staging_id(self): source = '/foo/bar' staging_target = '/staging/dir/bar' + auth_token = '1234' staging_order = StagingOrder(source=source, staging_target=staging_target) staging_order.status = StagingStatus.staging_successful self.mock_staging_service.get_stage_order_by_id.return_value = staging_order self.mock_staging_service.get_delivery_order_by_id.return_value = self.delivery_order - with patch('shutil.rmtree') as mock_rmtree: + with patch('shutil.rmtree') as mock_rmtree,\ + patch('tempfile.NamedTemporaryFile') as mock_tempfile: + mock_tempfile.return_value.__enter__.return_value.name = 'auth_token' res = yield self.dds_service.deliver_by_staging_id( staging_id=1, delivery_project='snpseq00001', - token_path='token_path', + auth_token=auth_token, md5sum_file='md5sum_file') mock_rmtree.assert_called_once_with(staging_target) + mock_tempfile.return_value.__enter__.return_value.write.assert_called_with(auth_token) def _get_delivery_order(): return self.delivery_order.delivery_status + assert_eventually_equals(self, 1, _get_delivery_order, DeliveryStatus.delivery_successful) self.mock_mover_runner.run.assert_called_with([ 'dds', - '--token-path', 'token_path', + '--token-path', 'auth_token', '--log-file', '/foo/bar/log', 'data', 'put', '--mount-dir', '/foo/bar/staging_dir', @@ -126,7 +131,7 @@ def test_deliver_by_staging_id_raises_on_non_existent_stage_id(self): staging_id=1, delivery_project='snpseq00001', md5sum_file='md5sum_file', - token_path='token_path', + auth_token='auth_token', ) @gen_test @@ -142,7 +147,7 @@ def test_deliver_by_staging_id_raises_on_non_successful_stage_id(self): staging_id=1, delivery_project='snpseq00001', md5sum_file='md5sum_file', - token_path='token_path', + auth_token='auth_token', ) def test_delivery_order_by_id(self): @@ -168,7 +173,7 @@ def test_possible_to_delivery_by_staging_id_and_skip_mover(self): staging_id=1, delivery_project='snpseq00001', md5sum_file='md5sum_file', - token_path='token_path', + auth_token='auth_token', skip_mover=True, ) @@ -188,28 +193,33 @@ def test_parse_dds_project_id(self): @gen_test def test_create_project(self): project_name = "AA-1221" + auth_token = "1234" project_metadata = { "description": "Dummy project", "pi": "alex@doe.com", "researchers": ["robin@doe.com", "kim@doe.com"], "owners": ["alex@doe.com"], "non-sensitive": False, - "token_path": "/foo/bar/auth", + "auth_token": auth_token, } with patch( 'delivery.services.external_program_service' '.ExternalProgramService.run_and_wait', - new_callable=AsyncMock) as mock_run,\ - patch('delivery.services.dds_service.DDSService._parse_dds_project_id') as mock_parse_dds_project_id: + new_callable=AsyncMock) as mock_run, \ + patch('tempfile.NamedTemporaryFile') as mock_tempfile, \ + patch('delivery.services.dds_service.DDSService._parse_dds_project_id') as mock_parse_dds_project_id: mock_run.return_value.status_code = 0 mock_parse_dds_project_id.return_value = "snpseq00001" + mock_tempfile.return_value.__enter__.return_value.name = 'auth_token' yield self.dds_service.create_dds_project(project_name, project_metadata) + mock_tempfile.return_value.__enter__.return_value.write.assert_called_with(auth_token) + mock_run.assert_called_once_with([ 'dds', - '--token-path', '/foo/bar/auth', + '--token-path', 'auth_token', '--log-file', '/foo/bar/log', 'project', 'create', '--title', project_name, @@ -220,6 +230,6 @@ def test_create_project(self): '--researcher', project_metadata['researchers'][1], ]) self.mock_dds_project_repo.add_dds_project\ - .assert_called_once_with( - project_name=project_name, - dds_project_id=mock_parse_dds_project_id.return_value) + .assert_called_once_with( + project_name=project_name, + dds_project_id=mock_parse_dds_project_id.return_value) From 8519a3e3dde9fc278b399fa9ff27001bd6024877 Mon Sep 17 00:00:00 2001 From: Adrien Coulier Date: Fri, 3 Jun 2022 14:16:29 +0200 Subject: [PATCH 2/2] Get either token or token path through API --- delivery/handlers/dds_handlers.py | 23 ++++- delivery/handlers/delivery_handlers.py | 28 ++++-- delivery/services/dds_service.py | 125 +++++++++++++------------ tests/unit_tests/services/test_dds.py | 41 ++++---- 4 files changed, 122 insertions(+), 95 deletions(-) diff --git a/delivery/handlers/dds_handlers.py b/delivery/handlers/dds_handlers.py index 78bab7e..ed5e946 100644 --- a/delivery/handlers/dds_handlers.py +++ b/delivery/handlers/dds_handlers.py @@ -3,6 +3,8 @@ from delivery.handlers import * from delivery.handlers.utility_handlers import ArteriaDeliveryBaseHandler +import os +import tempfile import logging log = logging.getLogger(__name__) @@ -25,7 +27,8 @@ async def post(self, project_name): Create a new project in DDS. The project description as well as the email of its pi must be specified in the request body. Project owners, researchers, and whether the data is sensitive or not (default is yes), - can also be specified there. E.g.: + can also be specified there. "auth_token" can be either the path to + the authentication token or the token itself. E.g.: import requests @@ -44,9 +47,23 @@ async def post(self, project_name): """ required_members = ["auth_token"] - project_metadata = self.body_as_object(required_members=required_members) + project_metadata = self.body_as_object( + required_members=required_members) - dds_project_id = await self.dds_service.create_dds_project(project_name, project_metadata) + with tempfile.NamedTemporaryFile(mode='w', delete=True) as token_file: + if os.path.exists(project_metadata["auth_token"]): + token_path = project_metadata["auth_token"] + else: + token_file.write(project_metadata["auth_token"]) + token_file.flush() + + token_path = token_file.name + + dds_project_id = await self.dds_service.create_dds_project( + project_name, + project_metadata, + token_path, + ) self.set_status(ACCEPTED) self.write_json({'dds_project_id': dds_project_id}) diff --git a/delivery/handlers/delivery_handlers.py b/delivery/handlers/delivery_handlers.py index db81721..606636b 100644 --- a/delivery/handlers/delivery_handlers.py +++ b/delivery/handlers/delivery_handlers.py @@ -1,6 +1,8 @@ +import os import json import logging +import tempfile from tornado.gen import coroutine @@ -32,8 +34,6 @@ def post(self, staging_id): md5sum_file = request_data.get("md5sums_file") extra_args = {} - if auth_token: - extra_args['auth_token'] = auth_token # This should only be used for testing purposes /JD 20170202 skip_mover_request = request_data.get("skip_mover") @@ -44,12 +44,24 @@ def post(self, staging_id): log.debug("Will not skip running mover!") skip_mover = False - delivery_id = yield self.delivery_service.deliver_by_staging_id( - staging_id=staging_id, - delivery_project=delivery_project_id, - md5sum_file=md5sum_file, - skip_mover=skip_mover, - **extra_args) + with tempfile.NamedTemporaryFile(mode='w', delete=True) as token_file: + if auth_token: + if os.path.exists(auth_token): + token_path = auth_token + else: + token_file.write(auth_token) + token_file.flush() + + token_path = token_file.name + + extra_args['token_path'] = token_path + + delivery_id = yield self.delivery_service.deliver_by_staging_id( + staging_id=staging_id, + delivery_project=delivery_project_id, + md5sum_file=md5sum_file, + skip_mover=skip_mover, + **extra_args) status_end_point = "{0}://{1}{2}".format(self.request.protocol, self.request.host, diff --git a/delivery/services/dds_service.py b/delivery/services/dds_service.py index 4e7e8be..5d19708 100644 --- a/delivery/services/dds_service.py +++ b/delivery/services/dds_service.py @@ -4,7 +4,6 @@ import re import json from tornado import gen -import tempfile from delivery.models.db_models import StagingStatus, DeliveryStatus from delivery.exceptions import ProjectNotFoundException, TooManyProjectsFound, InvalidStatusException, CannotParseDDSOutputException @@ -42,50 +41,50 @@ def _parse_dds_project_id(dds_output): else: raise CannotParseDDSOutputException(f"Could not parse DDS project ID from: {dds_output}") - async def create_dds_project(self, project_name, project_metadata): + async def create_dds_project( + self, + project_name, + project_metadata, + token_path): """ Create a new project in dds :param project_name: Project name from Clarity :param project_metadata: dictionnary containing pi email, project description, owner and researcher emails as well as whether the data is sensitive or not. + :param token_path: path to DDS authentication token. :return: project id in dds """ - - with tempfile.NamedTemporaryFile(mode='w', delete=True) as token_file: - token_file.write(project_metadata["auth_token"]) - token_file.flush() - - cmd = [ - 'dds', - '--token-path', token_file.name, - '--log-file', self.dds_conf["log_path"], - ] - - cmd += [ - 'project', 'create', - '--title', project_name, - '--description', f"\"{project_metadata['description']}\"", - '-pi', project_metadata['pi'] - ] - - cmd += [ - args - for owner in project_metadata.get('owners', []) - for args in ['--owner', owner] - ] - - cmd += [ - args - for researcher in project_metadata.get('researchers', []) - for args in ['--researcher', researcher] - ] - - if project_metadata.get('non-sensitive', False): - cmd += ['--non-sensitive'] - - log.debug(f"Running dds with command: {' '.join(cmd)}") - execution_result = await self.external_program_service.run_and_wait(cmd) + cmd = [ + 'dds', + '--token-path', token_path, + '--log-file', self.dds_conf["log_path"], + ] + + cmd += [ + 'project', 'create', + '--title', project_name, + '--description', f"\"{project_metadata['description']}\"", + '-pi', project_metadata['pi'] + ] + + cmd += [ + args + for owner in project_metadata.get('owners', []) + for args in ['--owner', owner] + ] + + cmd += [ + args + for researcher in project_metadata.get('researchers', []) + for args in ['--researcher', researcher] + ] + + if project_metadata.get('non-sensitive', False): + cmd += ['--non-sensitive'] + + log.debug(f"Running dds with command: {' '.join(cmd)}") + execution_result = await self.external_program_service.run_and_wait(cmd) if execution_result.status_code == 0: dds_project_id = DDSService._parse_dds_project_id(execution_result.stdout) @@ -108,7 +107,7 @@ def _run_dds_put( staging_dir, external_program_service, session_factory, - auth_token, + token_path, dds_conf): session = session_factory() @@ -117,32 +116,28 @@ def _run_dds_put( # thread, therefore it is re-materialized in here... delivery_order = delivery_order_repo.get_delivery_order_by_id(delivery_order_id, session) try: - with tempfile.NamedTemporaryFile(mode='w', delete=True) as token_file: - token_file.write(auth_token) - token_file.flush() - - cmd = [ - 'dds', - '--token-path', token_file.name, - '--log-file', dds_conf["log_path"], - ] + cmd = [ + 'dds', + '--token-path', token_path, + '--log-file', dds_conf["log_path"], + ] - cmd += [ - 'data', 'put', - '--mount-dir', staging_dir, - '--source', delivery_order.delivery_source, - '--project', delivery_order.delivery_project, - '--silent', - ] + cmd += [ + 'data', 'put', + '--mount-dir', staging_dir, + '--source', delivery_order.delivery_source, + '--project', delivery_order.delivery_project, + '--silent', + ] - log.debug("Running dds with cmd: {}".format(" ".join(cmd))) + log.debug("Running dds with cmd: {}".format(" ".join(cmd))) - execution = external_program_service.run(cmd) - delivery_order.delivery_status = DeliveryStatus.delivery_in_progress - delivery_order.mover_pid = execution.pid - session.commit() + execution = external_program_service.run(cmd) + delivery_order.delivery_status = DeliveryStatus.delivery_in_progress + delivery_order.mover_pid = execution.pid + session.commit() - execution_result = yield external_program_service.wait_for_execution(execution) + execution_result = yield external_program_service.wait_for_execution(execution) if execution_result.status_code == 0: delivery_order.delivery_status = DeliveryStatus.delivery_successful @@ -162,7 +157,13 @@ def _run_dds_put( session.commit() @gen.coroutine - def deliver_by_staging_id(self, staging_id, delivery_project, md5sum_file, auth_token, skip_mover=False): + def deliver_by_staging_id( + self, + staging_id, + delivery_project, + md5sum_file, + token_path, + skip_mover=False): stage_order = self.staging_service.get_stage_order_by_id(staging_id) if not stage_order or not stage_order.status == StagingStatus.staging_successful: @@ -182,7 +183,7 @@ def deliver_by_staging_id(self, staging_id, delivery_project, md5sum_file, auth_ 'staging_dir': self.staging_dir, 'external_program_service': self.mover_external_program_service, 'session_factory': self.session_factory, - 'auth_token': auth_token, + 'token_path': token_path, 'dds_conf': self.dds_conf, } diff --git a/tests/unit_tests/services/test_dds.py b/tests/unit_tests/services/test_dds.py index f7de135..44a952f 100644 --- a/tests/unit_tests/services/test_dds.py +++ b/tests/unit_tests/services/test_dds.py @@ -88,31 +88,26 @@ def wait_as_coroutine(x): def test_deliver_by_staging_id(self): source = '/foo/bar' staging_target = '/staging/dir/bar' - auth_token = '1234' staging_order = StagingOrder(source=source, staging_target=staging_target) staging_order.status = StagingStatus.staging_successful self.mock_staging_service.get_stage_order_by_id.return_value = staging_order self.mock_staging_service.get_delivery_order_by_id.return_value = self.delivery_order - with patch('shutil.rmtree') as mock_rmtree,\ - patch('tempfile.NamedTemporaryFile') as mock_tempfile: - mock_tempfile.return_value.__enter__.return_value.name = 'auth_token' + with patch('shutil.rmtree') as mock_rmtree: res = yield self.dds_service.deliver_by_staging_id( staging_id=1, delivery_project='snpseq00001', - auth_token=auth_token, + token_path='token_path', md5sum_file='md5sum_file') mock_rmtree.assert_called_once_with(staging_target) - mock_tempfile.return_value.__enter__.return_value.write.assert_called_with(auth_token) def _get_delivery_order(): return self.delivery_order.delivery_status - assert_eventually_equals(self, 1, _get_delivery_order, DeliveryStatus.delivery_successful) self.mock_mover_runner.run.assert_called_with([ 'dds', - '--token-path', 'auth_token', + '--token-path', 'token_path', '--log-file', '/foo/bar/log', 'data', 'put', '--mount-dir', '/foo/bar/staging_dir', @@ -131,7 +126,7 @@ def test_deliver_by_staging_id_raises_on_non_existent_stage_id(self): staging_id=1, delivery_project='snpseq00001', md5sum_file='md5sum_file', - auth_token='auth_token', + token_path='token_path', ) @gen_test @@ -147,7 +142,7 @@ def test_deliver_by_staging_id_raises_on_non_successful_stage_id(self): staging_id=1, delivery_project='snpseq00001', md5sum_file='md5sum_file', - auth_token='auth_token', + token_path='token_path', ) def test_delivery_order_by_id(self): @@ -173,7 +168,7 @@ def test_possible_to_delivery_by_staging_id_and_skip_mover(self): staging_id=1, delivery_project='snpseq00001', md5sum_file='md5sum_file', - auth_token='auth_token', + token_path='token_path', skip_mover=True, ) @@ -191,35 +186,37 @@ def test_parse_dds_project_id(self): self.assertEqual(DDSService._parse_dds_project_id(dds_output), "snpseq00003") @gen_test - def test_create_project(self): + def test_create_project_token_file(self): project_name = "AA-1221" - auth_token = "1234" project_metadata = { "description": "Dummy project", "pi": "alex@doe.com", "researchers": ["robin@doe.com", "kim@doe.com"], "owners": ["alex@doe.com"], "non-sensitive": False, - "auth_token": auth_token, } + token_path = "/foo/bar/auth" + with patch( 'delivery.services.external_program_service' '.ExternalProgramService.run_and_wait', - new_callable=AsyncMock) as mock_run, \ - patch('tempfile.NamedTemporaryFile') as mock_tempfile, \ - patch('delivery.services.dds_service.DDSService._parse_dds_project_id') as mock_parse_dds_project_id: + new_callable=AsyncMock) as mock_run,\ + patch( + 'delivery.services.dds_service' + '.DDSService._parse_dds_project_id' + ) as mock_parse_dds_project_id: mock_run.return_value.status_code = 0 mock_parse_dds_project_id.return_value = "snpseq00001" - mock_tempfile.return_value.__enter__.return_value.name = 'auth_token' - - yield self.dds_service.create_dds_project(project_name, project_metadata) - mock_tempfile.return_value.__enter__.return_value.write.assert_called_with(auth_token) + yield self.dds_service.create_dds_project( + project_name, + project_metadata, + token_path) mock_run.assert_called_once_with([ 'dds', - '--token-path', 'auth_token', + '--token-path', token_path, '--log-file', '/foo/bar/log', 'project', 'create', '--title', project_name,