Skip to content

Commit

Permalink
Merge pull request #171 from sennetconsortium/maxsibilla/issue-192
Browse files Browse the repository at this point in the history
Maxsibilla/issue 192
  • Loading branch information
maxsibilla authored Oct 18, 2023
2 parents 6221596 + 152ed80 commit c91cd4e
Show file tree
Hide file tree
Showing 3 changed files with 61 additions and 2 deletions.
2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
1.2.13
1.2.14
2 changes: 1 addition & 1 deletion src/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ requests==2.25.1
# Use the branch name of commons from github for testing new changes made in commons from different branch
# Default is main branch specified in docker-compose.development.yml if not set
# git+https://github.com/hubmapconsortium/commons.git@${COMMONS_BRANCH}#egg=hubmap-commons
hubmap-commons==2.1.9
hubmap-commons==2.1.12
atlas-consortia-commons==1.0.5

# Testing
Expand Down
59 changes: 59 additions & 0 deletions src/routes/entity_CRUD/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,65 @@ def create_dataset():
return Response("Unexpected error while creating a dataset: " + str(e) + " Check the logs", 500)


@entity_CRUD_blueprint.route('/datasets/components', methods=['POST'])
def multiple_components():
if not request.is_json:
return Response("json request required", 400)
entity_type = 'dataset'
try:
component_request = request.json
auth_helper = AuthHelper.configured_instance(current_app.config['APP_CLIENT_ID'], current_app.config['APP_CLIENT_SECRET'])
auth_tokens = auth_helper.getAuthorizationTokens(request.headers)
if isinstance(auth_tokens, Response):
return(auth_tokens)
elif isinstance(auth_tokens, str):
token = auth_tokens
elif 'nexus_token' in auth_tokens:
token = auth_tokens['nexus_token']
else:
return(Response("Valid nexus auth token required", 401))

# Check that `dataset_link_abs_dir` exists for both datasets and that it is a valid directory
json_data_dict = request.get_json()
for dataset in json_data_dict.get('datasets'):
if 'dataset_link_abs_dir' in dataset:
if not os.path.exists(dataset['dataset_link_abs_dir']):
return Response(f"The filepath specified with 'dataset_link_abs_dir' does not exist: {dataset['dataset_link_abs_dir']}", 500)
else:
return Response("Required field 'dataset_link_abs_dir' is missing from dataset", 500)

requested_group_uuid = None
if 'group_uuid' in component_request:
requested_group_uuid = component_request['group_uuid']

ingest_helper = IngestFileHelper(current_app.config)
requested_group_uuid = auth_helper.get_write_group_uuid(token, requested_group_uuid)
component_request['group_uuid'] = requested_group_uuid
post_url = commons_file_helper.ensureTrailingSlashURL(current_app.config['ENTITY_WEBSERVICE_URL']) + 'datasets/components'
response = requests.post(post_url, json = component_request, headers = {'Authorization': 'Bearer ' + token, 'X-SenNet-Application':'ingest-api' }, verify = False)
if response.status_code != 200:
return Response(response.text, response.status_code)
new_datasets_list = response.json()

for dataset in new_datasets_list:
# The property `dataset_link_abs_dir` will contain the filepath to the existing directory located inside the primary multi-assay
# directory. We need to create a symlink to the aforementioned directory at the path for the newly created datsets.
if 'dataset_link_abs_dir' in dataset:
new_directory_path = ingest_helper.get_dataset_directory_absolute_path(dataset, requested_group_uuid, dataset['uuid'])
logger.info(
f"Creating a directory as: {new_directory_path} with a symbolic link to: {dataset['dataset_link_abs_dir']}")
os.symlink(dataset['dataset_link_abs_dir'], new_directory_path, True)
else:
return Response("Required field 'dataset_link_abs_dir' is missing from dataset", 500)

return jsonify(new_datasets_list)
except HTTPException as hte:
return Response(hte.get_description(), hte.get_status_code())
except Exception as e:
logger.error(e, exc_info=True)
return Response("Unexpected error while creating a dataset: " + str(e) + " Check the logs", 500)


@entity_CRUD_blueprint.route('/sources/bulk/validate', methods=['POST'])
def bulk_sources_upload_and_validate():
return _bulk_upload_and_validate(Ontology.ops().entities().SOURCE)
Expand Down

0 comments on commit c91cd4e

Please sign in to comment.