diff --git a/VERSION b/VERSION index 0b1f1edf..fd9d1a5a 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -1.2.13 +1.2.14 diff --git a/src/requirements.txt b/src/requirements.txt index 4c2b9248..1448bfad 100644 --- a/src/requirements.txt +++ b/src/requirements.txt @@ -10,7 +10,7 @@ requests==2.25.1 # Use the branch name of commons from github for testing new changes made in commons from different branch # Default is main branch specified in docker-compose.development.yml if not set # git+https://github.com/hubmapconsortium/commons.git@${COMMONS_BRANCH}#egg=hubmap-commons -hubmap-commons==2.1.9 +hubmap-commons==2.1.12 atlas-consortia-commons==1.0.5 # Testing diff --git a/src/routes/entity_CRUD/__init__.py b/src/routes/entity_CRUD/__init__.py index e05e5a7c..ec5026c3 100644 --- a/src/routes/entity_CRUD/__init__.py +++ b/src/routes/entity_CRUD/__init__.py @@ -81,6 +81,65 @@ def create_dataset(): return Response("Unexpected error while creating a dataset: " + str(e) + " Check the logs", 500) +@entity_CRUD_blueprint.route('/datasets/components', methods=['POST']) +def multiple_components(): + if not request.is_json: + return Response("json request required", 400) + entity_type = 'dataset' + try: + component_request = request.json + auth_helper = AuthHelper.configured_instance(current_app.config['APP_CLIENT_ID'], current_app.config['APP_CLIENT_SECRET']) + auth_tokens = auth_helper.getAuthorizationTokens(request.headers) + if isinstance(auth_tokens, Response): + return(auth_tokens) + elif isinstance(auth_tokens, str): + token = auth_tokens + elif 'nexus_token' in auth_tokens: + token = auth_tokens['nexus_token'] + else: + return(Response("Valid nexus auth token required", 401)) + + # Check that `dataset_link_abs_dir` exists for both datasets and that it is a valid directory + json_data_dict = request.get_json() + for dataset in json_data_dict.get('datasets'): + if 'dataset_link_abs_dir' in dataset: + if not os.path.exists(dataset['dataset_link_abs_dir']): + return Response(f"The filepath specified with 'dataset_link_abs_dir' does not exist: {dataset['dataset_link_abs_dir']}", 500) + else: + return Response("Required field 'dataset_link_abs_dir' is missing from dataset", 500) + + requested_group_uuid = None + if 'group_uuid' in component_request: + requested_group_uuid = component_request['group_uuid'] + + ingest_helper = IngestFileHelper(current_app.config) + requested_group_uuid = auth_helper.get_write_group_uuid(token, requested_group_uuid) + component_request['group_uuid'] = requested_group_uuid + post_url = commons_file_helper.ensureTrailingSlashURL(current_app.config['ENTITY_WEBSERVICE_URL']) + 'datasets/components' + response = requests.post(post_url, json = component_request, headers = {'Authorization': 'Bearer ' + token, 'X-SenNet-Application':'ingest-api' }, verify = False) + if response.status_code != 200: + return Response(response.text, response.status_code) + new_datasets_list = response.json() + + for dataset in new_datasets_list: + # The property `dataset_link_abs_dir` will contain the filepath to the existing directory located inside the primary multi-assay + # directory. We need to create a symlink to the aforementioned directory at the path for the newly created datsets. + if 'dataset_link_abs_dir' in dataset: + new_directory_path = ingest_helper.get_dataset_directory_absolute_path(dataset, requested_group_uuid, dataset['uuid']) + logger.info( + f"Creating a directory as: {new_directory_path} with a symbolic link to: {dataset['dataset_link_abs_dir']}") + os.symlink(dataset['dataset_link_abs_dir'], new_directory_path, True) + else: + return Response("Required field 'dataset_link_abs_dir' is missing from dataset", 500) + + return jsonify(new_datasets_list) + except HTTPException as hte: + return Response(hte.get_description(), hte.get_status_code()) + except Exception as e: + logger.error(e, exc_info=True) + return Response("Unexpected error while creating a dataset: " + str(e) + " Check the logs", 500) + + @entity_CRUD_blueprint.route('/sources/bulk/validate', methods=['POST']) def bulk_sources_upload_and_validate(): return _bulk_upload_and_validate(Ontology.ops().entities().SOURCE)