Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Move writing of metadata.json to the end of dataset publish endpoint #497

Merged
merged 1 commit into from
Feb 19, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 16 additions & 13 deletions src/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -945,19 +945,8 @@ def publish_datastage(identifier):
dataset_contributors = dataset_contributors.replace("'", '"')
if len(json.loads(dataset_contacts)) < 1 or len(json.loads(dataset_contributors)) < 1:
return jsonify({"error": f"{dataset_uuid} missing contacts or contributors. Must have at least one of each"}), 400

ingest_helper: IngestFileHelper = IngestFileHelper(app.config)
ds_path = ingest_helper.dataset_directory_absolute_path(dataset_data_access_level,
dataset_group_uuid, dataset_uuid, False)
md_file = os.path.join(ds_path, "metadata.json")
json_object = entity_json_dumps(entity_instance, dataset_uuid)
logger.info(f"publish_datastage; writing metadata.json file: '{md_file}'; "
f"containing: '{json_object}'")
try:
with open(md_file, "w") as outfile:
outfile.write(json_object)
except Exception as e:
logger.exception(f"Fatal error while writing md_file {md_file}; {str(e)}")
return jsonify({"error": f"{dataset_uuid} problem writing metadata.json file."}), 500

data_access_level = dataset_data_access_level
#if consortium access level convert to public dataset, if protected access leave it protected
Expand Down Expand Up @@ -1038,6 +1027,20 @@ def publish_datastage(identifier):
for e_id in uuids_for_public:
out = entity_instance.clear_cache(e_id)

# Write out the metadata.json file after all processing has been done...
ds_path = ingest_helper.dataset_directory_absolute_path(dataset_data_access_level,
dataset_group_uuid, dataset_uuid, False)
md_file = os.path.join(ds_path, "metadata.json")
json_object = entity_json_dumps(entity_instance, dataset_uuid)
logger.info(f"publish_datastage; writing metadata.json file: '{md_file}'; "
f"containing: '{json_object}'")
try:
with open(md_file, "w") as outfile:
outfile.write(json_object)
except Exception as e:
logger.exception(f"Fatal error while writing md_file {md_file}; {str(e)}")
return jsonify({"error": f"{dataset_uuid} problem writing metadata.json file."}), 500

if no_indexing_and_acls:
r_val = {'acl_cmd': acls_cmd, 'donors_for_indexing': donors_to_reindex}
else:
Expand Down Expand Up @@ -2516,7 +2519,7 @@ def update_datasets_datastatus():
dataset['has_dataset_metadata'] = has_dataset_metadata

for prop in dataset:
if isinstance(dataset[prop], list) and prop is not 'descendant_datasets':
if isinstance(dataset[prop], list) and prop != 'descendant_datasets':
dataset[prop] = ", ".join(dataset[prop])
if isinstance(dataset[prop], (bool, int)):
dataset[prop] = str(dataset[prop])
Expand Down
Loading