From a02618d03a969e8818085dac1fd42c41b9a37f6f Mon Sep 17 00:00:00 2001 From: ChuckKollar Date: Mon, 19 Feb 2024 12:40:32 -0500 Subject: [PATCH] Move writing of metadata.json to the end of dataset publish --- src/app.py | 29 ++++++++++++++++------------- 1 file changed, 16 insertions(+), 13 deletions(-) diff --git a/src/app.py b/src/app.py index 4b40b89d..aae9d4d5 100644 --- a/src/app.py +++ b/src/app.py @@ -945,19 +945,8 @@ def publish_datastage(identifier): dataset_contributors = dataset_contributors.replace("'", '"') if len(json.loads(dataset_contacts)) < 1 or len(json.loads(dataset_contributors)) < 1: return jsonify({"error": f"{dataset_uuid} missing contacts or contributors. Must have at least one of each"}), 400 + ingest_helper: IngestFileHelper = IngestFileHelper(app.config) - ds_path = ingest_helper.dataset_directory_absolute_path(dataset_data_access_level, - dataset_group_uuid, dataset_uuid, False) - md_file = os.path.join(ds_path, "metadata.json") - json_object = entity_json_dumps(entity_instance, dataset_uuid) - logger.info(f"publish_datastage; writing metadata.json file: '{md_file}'; " - f"containing: '{json_object}'") - try: - with open(md_file, "w") as outfile: - outfile.write(json_object) - except Exception as e: - logger.exception(f"Fatal error while writing md_file {md_file}; {str(e)}") - return jsonify({"error": f"{dataset_uuid} problem writing metadata.json file."}), 500 data_access_level = dataset_data_access_level #if consortium access level convert to public dataset, if protected access leave it protected @@ -1038,6 +1027,20 @@ def publish_datastage(identifier): for e_id in uuids_for_public: out = entity_instance.clear_cache(e_id) + # Write out the metadata.json file after all processing has been done... + ds_path = ingest_helper.dataset_directory_absolute_path(dataset_data_access_level, + dataset_group_uuid, dataset_uuid, False) + md_file = os.path.join(ds_path, "metadata.json") + json_object = entity_json_dumps(entity_instance, dataset_uuid) + logger.info(f"publish_datastage; writing metadata.json file: '{md_file}'; " + f"containing: '{json_object}'") + try: + with open(md_file, "w") as outfile: + outfile.write(json_object) + except Exception as e: + logger.exception(f"Fatal error while writing md_file {md_file}; {str(e)}") + return jsonify({"error": f"{dataset_uuid} problem writing metadata.json file."}), 500 + if no_indexing_and_acls: r_val = {'acl_cmd': acls_cmd, 'donors_for_indexing': donors_to_reindex} else: @@ -2516,7 +2519,7 @@ def update_datasets_datastatus(): dataset['has_dataset_metadata'] = has_dataset_metadata for prop in dataset: - if isinstance(dataset[prop], list) and prop is not 'descendant_datasets': + if isinstance(dataset[prop], list) and prop != 'descendant_datasets': dataset[prop] = ", ".join(dataset[prop]) if isinstance(dataset[prop], (bool, int)): dataset[prop] = str(dataset[prop])