From f7bc149a6b03dbd7aa4966bda468330bf920bdd6 Mon Sep 17 00:00:00 2001 From: fnets Date: Wed, 1 May 2024 12:21:31 -0500 Subject: [PATCH 1/3] task/DES-2638 - Recon Portal Header Update (#1222) * Added validators to recon portal event creator to make sure that lat and long values are within possible bounds. * Added Recon Portal header, the logo should take 35% of the header row at max and the text should take the rest. It is responsive, too * Incorporating feedback from PIs to fill up white space and improve text * Fixing Typo Accidentally removed a css property --------- Co-authored-by: Sal Tijerina --- designsafe/apps/rapid/forms.py | 5 +-- .../designsafe/apps/rapid/index.html | 22 ++++++++++--- .../static/scripts/rapid/html/index.html | 3 -- .../static/scripts/rapid/styles/rapid.css | 31 ++++++++++++++++++- 4 files changed, 50 insertions(+), 11 deletions(-) diff --git a/designsafe/apps/rapid/forms.py b/designsafe/apps/rapid/forms.py index cccc76bf67..b1bc5c174b 100644 --- a/designsafe/apps/rapid/forms.py +++ b/designsafe/apps/rapid/forms.py @@ -1,4 +1,5 @@ from django import forms +from django.core.validators import MinValueValidator, MaxValueValidator from designsafe.apps.rapid.models import RapidNHEvent, RapidNHEventType import logging @@ -13,8 +14,8 @@ class RapidNHEventForm(forms.Form): title = forms.CharField(label="Event Title") event_type = forms.ChoiceField(label="Hazard Event Type", required=True) location_description = forms.CharField(label="Brief location description", required=True) - lat = forms.FloatField(label="Latitude", required=True) - lon = forms.FloatField(label="Longitude", required=True) + lat = forms.FloatField(validators=[MinValueValidator(-90), MaxValueValidator(90)], label="Latitude", required=True) + lon = forms.FloatField(validators=[MinValueValidator(-180), MaxValueValidator(180)],label="Longitude", required=True) image = forms.FileField(label="Banner image for detail", required=False) # def clean(self): diff --git a/designsafe/apps/rapid/templates/designsafe/apps/rapid/index.html b/designsafe/apps/rapid/templates/designsafe/apps/rapid/index.html index 6c3bd04a6d..dd4d6cfa72 100644 --- a/designsafe/apps/rapid/templates/designsafe/apps/rapid/index.html +++ b/designsafe/apps/rapid/templates/designsafe/apps/rapid/index.html @@ -7,11 +7,23 @@ {% block main %} -
-
-
-
-
+
+ +
+

+ The Recon Portal is an interactive world map of natural hazard events with associated datasets. View available datasets by clicking on a marker, browsing the listing of events, or searching by clicking on "Show filter options". +
+ Learn how to contribute your datasets. +

+
+
+
+
+
+
+
diff --git a/designsafe/static/scripts/rapid/html/index.html b/designsafe/static/scripts/rapid/html/index.html index 37ea1272d6..ccfb1956ff 100644 --- a/designsafe/static/scripts/rapid/html/index.html +++ b/designsafe/static/scripts/rapid/html/index.html @@ -14,9 +14,6 @@

Select License

    diff --git a/client/modules/datafiles/src/projects/ProjectPipeline/ProjectPipeline.tsx b/client/modules/datafiles/src/projects/ProjectPipeline/ProjectPipeline.tsx index 4c8a4b8121..3ea46d3a60 100644 --- a/client/modules/datafiles/src/projects/ProjectPipeline/ProjectPipeline.tsx +++ b/client/modules/datafiles/src/projects/ProjectPipeline/ProjectPipeline.tsx @@ -1,22 +1,35 @@ -import React, { useCallback, useMemo, useState } from 'react'; +import React, { useCallback, useEffect, useMemo, useState } from 'react'; import { Steps } from 'antd'; import { PipelineSelectForPublish } from './PipelineSelectForPublish'; import { PipelineProofreadProjectStep } from './PipelineProofreadProjectStep'; -import { useProjectDetail } from '@client/hooks'; +import { TBaseProjectValue, useProjectDetail } from '@client/hooks'; import { PipelineOtherSelectFiles } from './PipelineOtherSelectFiles'; import { PipelineOrderAuthors } from './PipelineOrderAuthors'; import { PipelineProofreadPublications } from './PipelineProofreadPublications'; import { PipelineProofreadCategories } from './PipelineProofreadCategories'; import { PipelineSelectLicense } from './PipelineSelectLicense'; +import { useSearchParams } from 'react-router-dom'; const getSteps = ( projectId: string, - projectType: string, + projectType: TBaseProjectValue['projectType'], next: () => void, prev: () => void ) => { + const proofreadStepMapping: Partial< + Record + > = { + experimental: 'Experiments', + field_recon: 'Missions', + hybrid_simulation: 'Hybrid Simulations', + simulation: 'Simulations', + }; + switch (projectType) { case 'hybrid_simulation': + case 'experimental': + case 'field_recon': + case 'simulation': return [ { title: 'Selection', @@ -35,7 +48,7 @@ const getSteps = ( ), }, { - title: 'Proofread Experiments', + title: `Proofread ${proofreadStepMapping[projectType]}`, content: ( + ), }, ]; default: @@ -122,10 +141,24 @@ export const ProjectPipeline: React.FC<{ projectId: string }> = ({ projectId, }) => { const [current, setCurrent] = useState(0); + const [, setSearchParams] = useSearchParams(); const { data } = useProjectDetail(projectId); const projectType = data?.baseProject.value.projectType; + // type Other doesn't support entity selection, so we select the base project here. + useEffect(() => { + if (projectType === 'other' && data?.baseProject.uuid) { + setSearchParams( + (e) => { + e.set('selected', data?.baseProject.uuid); + return e; + }, + { replace: true } + ); + } + }, [projectType, setSearchParams, data]); + const next = useCallback(() => { setCurrent(current + 1); }, [current, setCurrent]); @@ -134,10 +167,10 @@ export const ProjectPipeline: React.FC<{ projectId: string }> = ({ setCurrent(current - 1); }, [current, setCurrent]); - const steps = useMemo( - () => getSteps(projectId, projectType ?? '', next, prev), - [projectId, projectType, next, prev] - ); + const steps = useMemo(() => { + if (!projectType) return []; + return getSteps(projectId, projectType, next, prev); + }, [projectId, projectType, next, prev]); const items = steps.map((item) => ({ key: item.title, title: item.title })); if (!data) return null; diff --git a/client/modules/datafiles/src/projects/ProjectPreview/ProjectPreview.tsx b/client/modules/datafiles/src/projects/ProjectPreview/ProjectPreview.tsx index 947b98ed8a..80f7dc2290 100644 --- a/client/modules/datafiles/src/projects/ProjectPreview/ProjectPreview.tsx +++ b/client/modules/datafiles/src/projects/ProjectPreview/ProjectPreview.tsx @@ -3,6 +3,7 @@ import { TPreviewTreeData, useProjectPreview, usePublicationDetail, + usePublicationVersions, useSelectedFiles, } from '@client/hooks'; import { Button, Collapse } from 'antd'; @@ -235,6 +236,8 @@ export const PublicationView: React.FC<{ useEffect(() => unsetSelections(), [projectId, unsetSelections]); const { children } = data?.tree ?? {}; + const { selectedVersion } = usePublicationVersions(projectId); + const sortedChildren = useMemo( () => [...(children ?? [])].sort((a, b) => (a.order ?? 0) - (b.order ?? 0)), [children] @@ -246,7 +249,8 @@ export const PublicationView: React.FC<{ {sortedChildren .filter( (child) => - child.version === version && child.name !== 'designsafe.project' + child.version === selectedVersion && + child.name !== 'designsafe.project' ) .map((child, idx) => ( - Facility + Simulation Type {entityValue.simulationType?.name} @@ -61,14 +61,14 @@ export const PublishedEntityDetails: React.FC<{ {entityValue.authors ?.filter((a) => a.authorship !== false) .map((u, i) => ( - <> + {i !== (entityValue.authors?.filter( (a) => a.authorship !== false ).length ?? 0) - 1 && '; '} - + ))} @@ -85,7 +85,7 @@ export const PublishedEntityDetails: React.FC<{ {entityValue.experimentType && ( - Facility + Experiment Type {entityValue.experimentType?.name} diff --git a/client/src/datafiles/layouts/projects/ProjectPipelineSelectLayout.tsx b/client/src/datafiles/layouts/projects/ProjectPipelineSelectLayout.tsx index 17c4eb9b24..14e79732ed 100644 --- a/client/src/datafiles/layouts/projects/ProjectPipelineSelectLayout.tsx +++ b/client/src/datafiles/layouts/projects/ProjectPipelineSelectLayout.tsx @@ -1,9 +1,16 @@ +import { useProjectDetail } from '@client/hooks'; import React from 'react'; import { NavLink, useParams } from 'react-router-dom'; export const ProjectPipelineSelectLayout: React.FC = () => { const { projectId } = useParams(); - if (!projectId) return null; + const { data } = useProjectDetail(projectId ?? ''); + if (!projectId || !data) return null; + + const has_published_entities = !!data.entities.find( + (e) => e.value.dois && e.value.dois.length > 0 + ); + return (
    @@ -42,7 +49,12 @@ export const ProjectPipelineSelectLayout: React.FC = () => {
- +
@@ -59,7 +71,12 @@ export const ProjectPipelineSelectLayout: React.FC = () => { natural hazard event. - +

Versioning

@@ -83,7 +100,12 @@ export const ProjectPipelineSelectLayout: React.FC = () => { - +
diff --git a/client/src/datafiles/layouts/published/PublishedDetailLayout.tsx b/client/src/datafiles/layouts/published/PublishedDetailLayout.tsx index dbbf61ad24..d6ceea8309 100644 --- a/client/src/datafiles/layouts/published/PublishedDetailLayout.tsx +++ b/client/src/datafiles/layouts/published/PublishedDetailLayout.tsx @@ -1,5 +1,5 @@ import { BaseProjectDetails, DatafilesToolbar } from '@client/datafiles'; -import { usePublicationDetail } from '@client/hooks'; +import { usePublicationDetail, usePublicationVersions } from '@client/hooks'; import React, { useEffect } from 'react'; import { Button, Form, Input } from 'antd'; import { Navigate, Outlet, useParams, useSearchParams } from 'react-router-dom'; @@ -35,6 +35,7 @@ export const PublishedDetailLayout: React.FC = () => { const { projectId, path } = useParams(); const [searchParams, setSearchParams] = useSearchParams(); const { data } = usePublicationDetail(projectId ?? ''); + const { allVersions } = usePublicationVersions(projectId ?? ''); const version = (projectId ?? '').split('v')[1]; useEffect(() => { @@ -74,6 +75,7 @@ export const PublishedDetailLayout: React.FC = () => {
diff --git a/client/src/datafiles/layouts/published/PublishedEntityListingLayout.tsx b/client/src/datafiles/layouts/published/PublishedEntityListingLayout.tsx index 46298a5fa5..aba9849096 100644 --- a/client/src/datafiles/layouts/published/PublishedEntityListingLayout.tsx +++ b/client/src/datafiles/layouts/published/PublishedEntityListingLayout.tsx @@ -1,12 +1,12 @@ import { FileListing, PublicationView } from '@client/datafiles'; -import { usePublicationDetail } from '@client/hooks'; +import { usePublicationDetail, usePublicationVersions } from '@client/hooks'; import React from 'react'; import { useParams } from 'react-router-dom'; export const PublishedEntityListingLayout: React.FC = () => { const { projectId } = useParams(); const { data } = usePublicationDetail(projectId ?? ''); - + const { selectedVersion } = usePublicationVersions(projectId ?? ''); if (!projectId || !data) return null; return ( @@ -19,7 +19,10 @@ export const PublishedEntityListingLayout: React.FC = () => { scroll={{ y: 500 }} api="tapis" system="designsafe.storage.published" - path={data.baseProject.projectId} + path={encodeURIComponent( + data.tree.children.find((c) => c.version === selectedVersion) + ?.basePath ?? '' + )} baseRoute="." /> )} diff --git a/designsafe/apps/api/projects_v2/operations/_tests/publish_unit_test.py b/designsafe/apps/api/projects_v2/operations/_tests/publish_unit_test.py index 61adbbb1a0..c8a222faea 100644 --- a/designsafe/apps/api/projects_v2/operations/_tests/publish_unit_test.py +++ b/designsafe/apps/api/projects_v2/operations/_tests/publish_unit_test.py @@ -8,23 +8,17 @@ create_project_metdata, create_entity_metadata, add_file_associations, - remove_file_associations, - add_file_tags, set_file_tags, - remove_file_tags, - ProjectMetadata, FileObj, - FileTag, ) from designsafe.apps.api.projects_v2.operations.graph_operations import ( initialize_project_graph, add_node_to_project, - remove_nodes_from_project, - reorder_project_nodes, ) from designsafe.apps.api.projects_v2.operations.project_publish_operations import ( get_publication_subtree, + get_publication_full_tree, ) @@ -75,15 +69,15 @@ def project_with_associations(): set_file_tags(model_config.uuid, "/path/to/dir1/nested/file", ["test_tag"]) set_file_tags(model_config.uuid, "/path/to/other/file1", ["test_tag"]) - yield (project, experiment.uuid) + yield (project, experiment.uuid, project.uuid) @pytest.mark.django_db -def test_fixture_works(project_with_associations): - (project, exp_uuid) = project_with_associations +def test_publication_subtree(project_with_associations): + (project, exp_uuid, project_uuid) = project_with_associations assert project.name == "designsafe.project" - subtree = get_publication_subtree("PRJ-1234", exp_uuid) + subtree, path_mapping = get_publication_subtree("PRJ-1234", exp_uuid) assert len(subtree) == 3 mc_data = next( @@ -93,25 +87,112 @@ def test_fixture_works(project_with_associations): ) entity_file_paths = [f["path"] for f in mc_data["value"]["fileObjs"]] - expected_file_name = ( - "/PRJ-1234/Experiment--test-experiment/data/Model-config--test-entity/file1" - ) - expected_dupe_file_name = ( - "/PRJ-1234/Experiment--test-experiment/data/Model-config--test-entity/file1(1)" - ) - expected_dir_name = ( - "/PRJ-1234/Experiment--test-experiment/data/Model-config--test-entity/dir1" - ) + expected_file_name = "/PRJ-1234/path/to/file1" + expected_dupe_file_name = "/PRJ-1234/path/to/other/file1" - assert expected_dir_name in entity_file_paths assert expected_file_name in entity_file_paths assert expected_dupe_file_name in entity_file_paths entity_tag_paths = [f["path"] for f in mc_data["value"]["fileTags"]] - expected_tag_path_1 = ( - "/PRJ-1234/Experiment--test-experiment/data/Model-config--test-entity/file1" + expected_tag_path_1 = "/PRJ-1234/path/to/file1" + expected_tag_path_2 = "/PRJ-1234/path/to/dir1/nested/file" + assert expected_tag_path_1 in entity_tag_paths + assert expected_tag_path_2 in entity_tag_paths + assert path_mapping == { + f"/corral-repl/tacc/NHERI/projects/{project_uuid}/path/to/file1": "/corral-repl/tacc/NHERI/published/PRJ-1234/path/to/file1", + f"/corral-repl/tacc/NHERI/projects/{project_uuid}/path/to/other/file1": "/corral-repl/tacc/NHERI/published/PRJ-1234/path/to/other/file1", + f"/corral-repl/tacc/NHERI/projects/{project_uuid}/path/to/dir1": "/corral-repl/tacc/NHERI/published/PRJ-1234/path/to/dir1", + } + + +@pytest.mark.django_db +def test_publication_subtree_with_version(project_with_associations): + (project, exp_uuid, project_uuid) = project_with_associations + assert project.name == "designsafe.project" + + subtree, path_mapping = get_publication_subtree("PRJ-1234", exp_uuid, version=2) + assert len(subtree) == 3 + + mc_data = next( + subtree.nodes[node] + for node in subtree + if subtree.nodes[node]["name"] == constants.EXPERIMENT_MODEL_CONFIG ) - expected_tag_path_2 = "/PRJ-1234/Experiment--test-experiment/data/Model-config--test-entity/dir1/nested/file" + + entity_file_paths = [f["path"] for f in mc_data["value"]["fileObjs"]] + expected_file_name = "/PRJ-1234v2/path/to/file1" + expected_dupe_file_name = "/PRJ-1234v2/path/to/other/file1" + + assert expected_file_name in entity_file_paths + assert expected_dupe_file_name in entity_file_paths + + entity_tag_paths = [f["path"] for f in mc_data["value"]["fileTags"]] + expected_tag_path_1 = "/PRJ-1234v2/path/to/file1" + expected_tag_path_2 = "/PRJ-1234v2/path/to/dir1/nested/file" assert expected_tag_path_1 in entity_tag_paths assert expected_tag_path_2 in entity_tag_paths - assert expected_dupe_file_name in entity_tag_paths + + assert path_mapping == { + f"/corral-repl/tacc/NHERI/projects/{project_uuid}/path/to/file1": "/corral-repl/tacc/NHERI/published/PRJ-1234v2/path/to/file1", + f"/corral-repl/tacc/NHERI/projects/{project_uuid}/path/to/other/file1": "/corral-repl/tacc/NHERI/published/PRJ-1234v2/path/to/other/file1", + f"/corral-repl/tacc/NHERI/projects/{project_uuid}/path/to/dir1": "/corral-repl/tacc/NHERI/published/PRJ-1234v2/path/to/dir1", + } + + +@pytest.mark.django_db +def test_tree_multiple_experiments(project_with_associations): + """ + Set up a project with 2 experiments, and assert that get_publication_full_tree + returns the correct tree and file mapping. + """ + (_, exp_uuid, project_uuid) = project_with_associations + + # Add a new experiment with an additional model config, and add it to the tree. + experiment2_value = {"title": "Test Experiment 2", "description": "Experiment test"} + model_config2_value = { + "title": "Test Entity 2", + "description": "Entity with file associations", + } + + experiment2 = create_entity_metadata( + "PRJ-1234", name=constants.EXPERIMENT, value=experiment2_value + ) + model_config2 = create_entity_metadata( + "PRJ-1234", name=constants.EXPERIMENT_MODEL_CONFIG, value=model_config2_value + ) + + experiment2_node = add_node_to_project( + "PRJ-1234", "NODE_ROOT", experiment2.uuid, experiment2.name + ) + add_node_to_project( + "PRJ-1234", experiment2_node, model_config2.uuid, model_config2.name + ) + + file_objs = [ + FileObj( + system="project.system", name="file1", path="/path/to/file3", type="file" + ), + FileObj( + system="project.system", + name="file3", + path="/path/to/other/file3", + type="file", + ), + FileObj(system="project.system", name="dir2", path="/path/to/dir2", type="dir"), + ] + add_file_associations(model_config2.uuid, file_objs) + + full_tree, full_path_mapping = get_publication_full_tree( + "PRJ-1234", [exp_uuid, experiment2.uuid] + ) + + assert len(full_tree) == 5 + + assert full_path_mapping == { + f"/corral-repl/tacc/NHERI/projects/{project_uuid}/path/to/file1": "/corral-repl/tacc/NHERI/published/PRJ-1234/path/to/file1", + f"/corral-repl/tacc/NHERI/projects/{project_uuid}/path/to/other/file1": "/corral-repl/tacc/NHERI/published/PRJ-1234/path/to/other/file1", + f"/corral-repl/tacc/NHERI/projects/{project_uuid}/path/to/dir1": "/corral-repl/tacc/NHERI/published/PRJ-1234/path/to/dir1", + f"/corral-repl/tacc/NHERI/projects/{project_uuid}/path/to/file3": "/corral-repl/tacc/NHERI/published/PRJ-1234/path/to/file3", + f"/corral-repl/tacc/NHERI/projects/{project_uuid}/path/to/other/file3": "/corral-repl/tacc/NHERI/published/PRJ-1234/path/to/other/file3", + f"/corral-repl/tacc/NHERI/projects/{project_uuid}/path/to/dir2": "/corral-repl/tacc/NHERI/published/PRJ-1234/path/to/dir2", + } diff --git a/designsafe/apps/api/projects_v2/operations/datacite_operations.py b/designsafe/apps/api/projects_v2/operations/datacite_operations.py new file mode 100644 index 0000000000..21e4b48937 --- /dev/null +++ b/designsafe/apps/api/projects_v2/operations/datacite_operations.py @@ -0,0 +1,247 @@ +"""Operations to format and manage Datacite DOIs""" +import datetime +from typing import Optional +import json +import requests +import networkx as nx +from django.conf import settings +from designsafe.apps.api.projects_v2 import constants +from designsafe.apps.api.projects_v2.schema_models import PATH_SLUGS + + +# pylint: disable=too-many-locals, too-many-branches, too-many-statements +def get_datacite_json( + pub_graph: nx.DiGraph, entity_uuid: str, version: Optional[int] = 1 +): + """ + Generate datacite payload for a publishable entity. `pub_graph` is the output of + either `get_publication_subtree` or `get_publication_full_tree`. + """ + + datacite_json = {} + is_other = pub_graph.nodes["NODE_ROOT"].get("projectType", None) == "other" + if is_other: + base_meta_node = next( + ( + node + for node in pub_graph + if pub_graph.nodes[node]["name"] == constants.PROJECT + and pub_graph.nodes[node]["version"] == version + ), + None, + ) + else: + base_meta_node = "NODE_ROOT" + + base_meta = pub_graph.nodes[base_meta_node]["value"] + + entity_node = base_meta_node = next( + ( + node + for node in pub_graph + if pub_graph.nodes[node]["uuid"] == entity_uuid + and pub_graph.nodes[node]["version"] == version + ), + None, + ) + + author_attr = [] + institutions = [] + entity_meta = pub_graph.nodes[entity_node]["value"] + for author in entity_meta.get("authors", []): + author_attr.append( + { + "nameType": "Personal", + "givenName": author.get("fname", ""), + "familyName": author.get("lname", ""), + } + ) + institutions.append(author.get("inst", "")) + + datacite_json["contributors"] = [ + { + "contributorType": "HostingInstitution", + "nameType": "Organizational", + "name": institution, + } + for institution in list(set(institutions)) + ] + datacite_json["creators"] = author_attr + datacite_json["titles"] = [ + {"title": title} for title in set([entity_meta["title"]]) + ] + if not is_other: + datacite_json["titles"].append( + {"title": base_meta["title"], "titleType": "Subtitle"} + ) + datacite_json["publisher"] = "Designsafe-CI" + + if version == 1 or not version: + initial_pub_date = pub_graph.nodes[entity_node]["publicationDate"] + datacite_json["publicationYear"] = datetime.datetime.fromisoformat( + initial_pub_date + ).year + + datacite_json["types"] = {} + + datacite_json["types"]["resourceType"] = PATH_SLUGS.get( + pub_graph.nodes[entity_node]["name"] + ) + if data_type := entity_meta.get("dataType", None): + datacite_json["types"]["resourceType"] += f"/{data_type['name']}" + if exp_type := entity_meta.get("experimentType", None): + datacite_json["types"]["resourceType"] += f"/{exp_type['name']}" + if sim_type := entity_meta.get("simulationType", None): + datacite_json["types"]["resourceType"] += f"/{sim_type['name']}" + if location := entity_meta.get("location", None): + datacite_json["types"]["resourceType"] += f"/{location}" + + datacite_json["types"]["resourceTypeGeneral"] = "Dataset" + + datacite_json["descriptions"] = [ + { + "descriptionType": "Abstract", + "description": desc, + "lang": "en-Us", + } + for desc in set([base_meta["description"], entity_meta["description"]]) + ] + + datacite_json["subjects"] = [ + {"subject": keyword} for keyword in base_meta.get("keywords", []) + ] + + facilities = entity_meta.get("facilities", []) + if exp_facility := entity_meta.get("facility", None): + facilities.append(exp_facility) + + for facility in facilities: + datacite_json["subjects"].append(facility["name"]) + datacite_json["contributors"].append( + { + "contributorType": "HostingInstitution", + "nameType": "Organizational", + "name": facility["name"], + } + ) + + datacite_json["language"] = "English" + datacite_json["identifiers"] = [ + { + "identifierType": "Project ID", + "identifier": base_meta["projectId"], + } + ] + + datacite_json["fundingReferences"] = [ + { + "awardTitle": award["name"], + "awardNumber": award["number"], + "funderName": award.get("fundingSource", "N/A") or "N/A", + } + for award in base_meta["awardNumbers"] + ] + + datacite_json["relatedIdentifiers"] = [] + relation_mapping = { + "Linked Project": "IsPartOf", + "Linked Dataset": "IsPartOf", + "Cited By": "IsCitedBy", + "Context": "IsDocumentedBy", + } + for a_proj in entity_meta.get("associatedProjects", []): # relatedwork + identifier = {} + if {"type", "href", "hrefType"} <= a_proj.keys(): + identifier["relationType"] = relation_mapping[a_proj["type"]] + identifier["relatedIdentifierType"] = a_proj["hrefType"] + identifier["relatedIdentifier"] = a_proj["href"] + datacite_json["relatedIdentifiers"].append(identifier) + + for r_data in entity_meta.get("referencedData", []): + identifier = {} + if {"doi", "hrefType"} <= r_data.keys(): + identifier["relationType"] = "References" + identifier["relatedIdentifier"] = r_data["doi"] + identifier["relatedIdentifierType"] = r_data["hrefType"] + datacite_json["relatedIdentifiers"].append(identifier) + + project_id = base_meta["projectId"] + datacite_url = f"https://www.designsafe-ci.org/data/browser/public/designsafe.storage.published/{project_id}" + if not is_other: + datacite_url += f"/#detail-{entity_uuid}" + if version and version > 1: + datacite_url += f"/?version={version}" + + datacite_json["url"] = datacite_url + datacite_json["prefix"] = settings.DATACITE_SHOULDER + + return datacite_json + + +def upsert_datacite_json(datacite_json: dict, doi: Optional[str] = None): + """ + Create a draft DOI in datacite with the specified metadata. If a DOI is specified, + the metadata for that DOI is updated instead. + """ + if doi: + datacite_json.pop("publicationYear", None) + + datacite_payload = { + "data": { + "type": "dois", + "relationships": { + "client": {"data": {"type": "clients", "id": "tdl.tacc"}} + }, + "attributes": datacite_json, + } + } + if not doi: + res = requests.post( + f"{settings.DATACITE_URL.strip('/')}/dois", + auth=(settings.DATACITE_USER, settings.DATACITE_PASS), + data=json.dumps(datacite_payload), + headers={"Content-Type": "application/vnd.api+json"}, + timeout=30, + ) + else: + res = requests.put( + f"{settings.DATACITE_URL.strip('/')}/dois/{doi}", + auth=(settings.DATACITE_USER, settings.DATACITE_PASS), + data=json.dumps(datacite_payload), + headers={"Content-Type": "application/vnd.api+json"}, + timeout=30, + ) + + return res.json() + + +def publish_datacite_doi(doi: str): + """ + Set a DOI's status to `Findable` in Datacite. + """ + payload = {"data": {"type": "dois", "attributes": {"event": "publish"}}} + + res = requests.put( + f"{settings.DATACITE_URL.strip('/')}/dois/{doi}", + auth=(settings.DATACITE_USER, settings.DATACITE_PASS), + data=json.dumps(payload), + headers={"Content-Type": "application/vnd.api+json"}, + timeout=30, + ) + return res.json() + + +def hide_datacite_doi(doi: str): + """ + Remove a Datacite DOI from public consumption. + """ + payload = {"data": {"type": "dois", "attributes": {"event": "hide"}}} + + res = requests.put( + f"{settings.DATACITE_URL.strip('/')}/dois/{doi}", + auth=(settings.DATACITE_USER, settings.DATACITE_PASS), + data=json.dumps(payload), + headers={"Content-Type": "application/vnd.api+json"}, + timeout=30, + ) + return res.json() diff --git a/designsafe/apps/api/projects_v2/operations/path_operations.py b/designsafe/apps/api/projects_v2/operations/path_operations.py new file mode 100644 index 0000000000..1afa5f0b3f --- /dev/null +++ b/designsafe/apps/api/projects_v2/operations/path_operations.py @@ -0,0 +1,124 @@ +"""Operations for updating file and tag paths so that the entity relations are represented +in the directory structure. UNUSED for now, pending stakeholder approval. +""" + +from typing import Optional +from pathlib import Path +import networkx as nx +from django.utils.text import slugify +from designsafe.apps.api.projects_v2.schema_models import PATH_SLUGS + + +def construct_entity_filepaths(pub_graph: nx.DiGraph, version: Optional[int] = None): + """ + Walk the publication graph and construct base file paths for each node. + The file path for a node contains the titles of each entity above it in the + hierarchy. Returns the publication graph with basePath data added for each node. + """ + for parent_node, child_node in nx.bfs_edges(pub_graph, "NODE_ROOT"): + # Construct paths based on the entity hierarchy + parent_base_path = pub_graph.nodes[parent_node]["basePath"] + entity_name_slug = PATH_SLUGS.get(pub_graph.nodes[child_node]["name"]) + entity_title = pub_graph.nodes[child_node]["value"]["title"] + + entity_dirname = f"{entity_name_slug}--{slugify(entity_title)}" + + if version and version > 1 and child_node in pub_graph.successors("NODE_ROOT"): + # Version datasets if the containing publication is versioned. + child_path = Path(parent_base_path) / f"{entity_dirname}--v{version}" + elif parent_node in pub_graph.successors("NODE_ROOT"): + # Publishable entities have a "data" folder in Bagit ontology. + child_path = Path(parent_base_path) / "data" / entity_dirname + else: + child_path = Path(parent_base_path) / entity_dirname + + pub_graph.nodes[child_node]["basePath"] = str(child_path) + return pub_graph + + +def map_project_paths_to_published( + file_objs: list[dict], base_path: str +) -> dict[str, str]: + """Construct a mapping of project paths to paths in the published archive.""" + path_mapping = {} + duplicate_counts = {} + for file_obj in file_objs: + pub_path = str(Path(base_path) / Path(file_obj["path"]).name) + if pub_path in path_mapping.values(): + duplicate_counts[pub_path] = duplicate_counts.get(pub_path, 0) + 1 + # splice dupe count into name, e.g. "myfile(1).txt" + [base_name, *ext] = Path(pub_path).name.split(".", 1) + deduped_name = f"{base_name}({duplicate_counts[pub_path]})" + pub_path = str(Path(base_path) / ".".join([deduped_name, *ext])) + + path_mapping[file_obj["path"]] = pub_path + + return path_mapping + + +def construct_published_path_mappings( + pub_graph: nx.DiGraph, +) -> dict[str, dict[str, str]]: + """ + For each node in the publication graph, get the mapping of file paths in the + PROJECT system to file paths in the PUBLICATION system. Returns a dict of form: + {"NODE_ID": {"PROJECT_PATH": "PUBLICATION_PATH"}} + """ + path_mappings = {} + for node in pub_graph: + node_data = pub_graph.nodes[node] + if not node_data.get("value"): + continue + path_mapping = map_project_paths_to_published( + node_data["value"].get("fileObjs", []), node_data["basePath"] + ) + path_mappings[node] = path_mapping + return path_mappings + + +def update_path_mappings(pub_graph: nx.DiGraph): + """update fileObjs and fileTags to point to published paths.""" + pub_mapping = construct_published_path_mappings(pub_graph) + for node in pub_graph: + node_data = pub_graph.nodes[node] + if ( + node not in pub_mapping + or not node_data.get("value") + or not node_data["value"].get("fileObjs") + ): + continue + path_mapping = pub_mapping[node] + new_file_objs = [ + { + **file_obj, + "path": path_mapping[file_obj["path"]], + "system": "designsafe.storage.published", + } + for file_obj in node_data["value"].get("fileObjs", []) + ] + node_data["value"]["fileObjs"] = new_file_objs + + # Update file tags. If the path mapping contains: + # {"/path/to/dir1": "/entity1/dir1"} + # and the tags contain: + # {"path": "/path/to/dir1/file1", "tagName": "my_tag"} + # then we need to construct the file tag: + # {"path": "/entity1/dir1/file1", "tagName": "my_tag"} + file_tags = node_data["value"].get("fileTags", []) + updated_tags = [] + for tag in file_tags: + if not tag.get("path", None): + # If there is no path, we can't recover the tag. + continue + tag_path_prefixes = [p for p in path_mapping if tag["path"].startswith(p)] + + for prefix in tag_path_prefixes: + updated_tags.append( + { + **tag, + "path": tag["path"].replace(prefix, path_mapping[prefix], 1), + } + ) + node_data["value"]["fileTags"] = updated_tags + + return pub_graph diff --git a/designsafe/apps/api/projects_v2/operations/project_meta_operations.py b/designsafe/apps/api/projects_v2/operations/project_meta_operations.py index 91fe8a64f3..45abdf3820 100644 --- a/designsafe/apps/api/projects_v2/operations/project_meta_operations.py +++ b/designsafe/apps/api/projects_v2/operations/project_meta_operations.py @@ -120,6 +120,16 @@ def add_file_associations(uuid: str, new_file_objs: list[FileObj]): return entity +def set_file_associations(uuid: str, new_file_objs: list[FileObj]): + """Replace the file associations for an entity with the specified set.""" + # Use atomic transaction here to prevent multiple calls from clobbering each other + with transaction.atomic(): + entity = ProjectMetadata.objects.select_for_update().get(uuid=uuid) + entity.value["fileObjs"] = [f.model_dump() for f in new_file_objs] + entity.save() + return entity + + def remove_file_associations(uuid: str, file_paths: list[str]): """Remove file associations from an entity by their paths.""" with transaction.atomic(): diff --git a/designsafe/apps/api/projects_v2/operations/project_publish_operations.py b/designsafe/apps/api/projects_v2/operations/project_publish_operations.py index bd8b75406d..dbcc8ef511 100644 --- a/designsafe/apps/api/projects_v2/operations/project_publish_operations.py +++ b/designsafe/apps/api/projects_v2/operations/project_publish_operations.py @@ -1,14 +1,24 @@ """Utils for generating published metadata""" from typing import Optional, Literal +import subprocess +import os +import shutil +import datetime from pathlib import Path import logging +from django.conf import settings import networkx as nx -from django.utils.text import slugify -from designsafe.apps.api.projects_v2.schema_models import PATH_SLUGS from designsafe.apps.api.projects_v2 import constants from designsafe.apps.api.projects_v2.models.project_metadata import ProjectMetadata +from designsafe.apps.api.projects_v2.operations.datacite_operations import ( + get_datacite_json, + publish_datacite_doi, + upsert_datacite_json, +) + +from designsafe.apps.api.publications_v2.models import Publication logger = logging.getLogger(__name__) @@ -138,128 +148,68 @@ def add_values_to_tree(project_id: str) -> nx.DiGraph: return publication_tree -def construct_entity_filepaths(pub_graph: nx.DiGraph, version: Optional[int] = None): - """ - Walk the publication graph and construct base file paths for each node. - The file path for a node contains the titles of each entity above it in the - hierarchy. Returns the publication graph with basePath data added for each node. - """ - for parent_node, child_node in nx.bfs_edges(pub_graph, "NODE_ROOT"): - # Construct paths based on the entity hierarchy - parent_base_path = pub_graph.nodes[parent_node]["basePath"] - entity_name_slug = PATH_SLUGS.get(pub_graph.nodes[child_node]["name"]) - entity_title = pub_graph.nodes[child_node]["value"]["title"] - - entity_dirname = f"{entity_name_slug}--{slugify(entity_title)}" - - if version and version > 1 and child_node in pub_graph.successors("NODE_ROOT"): - # Version datasets if the containing publication is versioned. - child_path = Path(parent_base_path) / f"{entity_dirname}--v{version}" - elif parent_node in pub_graph.successors("NODE_ROOT"): - # Publishable entities have a "data" folder in Bagit ontology. - child_path = Path(parent_base_path) / "data" / entity_dirname - else: - child_path = Path(parent_base_path) / entity_dirname - - pub_graph.nodes[child_node]["basePath"] = str(child_path) - return pub_graph - - -def map_project_paths_to_published( - file_objs: list[dict], base_path: str -) -> dict[str, str]: - """Construct a mapping of project paths to paths in the published archive.""" +def update_path_mappings(pub_graph: nx.DiGraph, project_uuid: str): + """update fileObjs and fileTags to point to published paths.""" path_mapping = {} - duplicate_counts = {} - for file_obj in file_objs: - pub_path = str(Path(base_path) / Path(file_obj["path"]).name) - if pub_path in path_mapping.values(): - duplicate_counts[pub_path] = duplicate_counts.get(pub_path, 0) + 1 - # splice dupe count into name, e.g. "myfile(1).txt" - [base_name, *ext] = Path(pub_path).name.split(".", 1) - deduped_name = f"{base_name}({duplicate_counts[pub_path]})" - pub_path = str(Path(base_path) / ".".join([deduped_name, *ext])) - - path_mapping[file_obj["path"]] = pub_path - - return path_mapping - - -def construct_published_path_mappings( - pub_graph: nx.DiGraph, -) -> dict[str, dict[str, str]]: - """ - For each node in the publication graph, get the mapping of file paths in the - PROJECT system to file paths in the PUBLICATION system. Returns a dict of form: - {"NODE_ID": {"PROJECT_PATH": "PUBLICATION_PATH"}} - """ - path_mappings = {} for node in pub_graph: - node_data = pub_graph.nodes[node] - if not node_data.get("value"): + if node == "NODE_ROOT": continue - path_mapping = map_project_paths_to_published( - node_data["value"].get("fileObjs", []), node_data["basePath"] - ) - path_mappings[node] = path_mapping - return path_mappings - - -def update_path_mappings(pub_graph: nx.DiGraph): - """update fileObjs and fileTags to point to published paths.""" - pub_mapping = construct_published_path_mappings(pub_graph) - for node in pub_graph: node_data = pub_graph.nodes[node] - if ( - node not in pub_mapping - or not node_data.get("value") - or not node_data["value"].get("fileObjs") - ): - continue - path_mapping = pub_mapping[node] - new_file_objs = [ + new_file_objs = [] + for file_obj in node_data["value"].get("fileObjs", []): + pub_path = str( + Path(settings.DESIGNSAFE_PUBLISHED_PATH) + / Path(node_data["basePath"].lstrip("/")) + / Path(file_obj["path"].lstrip("/")) + ) + project_path = str( + Path(settings.DESIGNSAFE_PROJECTS_PATH) + / Path(project_uuid) + / Path(file_obj["path"].lstrip("/")) + ) + path_mapping[project_path] = pub_path + + new_file_objs.append( + { + **file_obj, + "path": str( + Path(node_data["basePath"]) / Path(file_obj["path"].lstrip("/")) + ), + "system": "designsafe.storage.published", + } + ) + + if new_file_objs: + node_data["value"]["fileObjs"] = new_file_objs + + updated_tags = [ { - **file_obj, - "path": path_mapping[file_obj["path"]], - "system": "designsafe.storage.published", + **file_tag, + "path": str( + Path(node_data["basePath"]) / Path(file_tag["path"].lstrip("/")) + ), } - for file_obj in node_data["value"].get("fileObjs", []) + for file_tag in node_data["value"].get("fileTags", []) + if file_tag.get("path", None) ] - node_data["value"]["fileObjs"] = new_file_objs - - # Update file tags. If the path mapping contains: - # {"/path/to/dir1": "/entity1/dir1"} - # and the tags contain: - # {"path": "/path/to/dir1/file1", "tagName": "my_tag"} - # then we need to construct the file tag: - # {"path": "/entity1/dir1/file1", "tagName": "my_tag"} - file_tags = node_data["value"].get("fileTags", []) - updated_tags = [] - for tag in file_tags: - if not tag.get("path", None): - # If there is no path, we can't recover the tag. - continue - tag_path_prefixes = [p for p in path_mapping if tag["path"].startswith(p)] - - for prefix in tag_path_prefixes: - updated_tags.append( - { - **tag, - "path": tag["path"].replace(prefix, path_mapping[prefix], 1), - } - ) - node_data["value"]["fileTags"] = updated_tags + if updated_tags: + node_data["value"]["fileTags"] = updated_tags - return pub_graph + return pub_graph, path_mapping def get_publication_subtree( - project_id: str, entity_uuid: str, version: Optional[int] = None + project_id: str, + entity_uuid: str, + version: Optional[int] = None, + version_info: Optional[int] = None, ) -> tuple[str, nx.DiGraph]: """ Obtain the subtree for a single publishable entity (experiment/simulation/etc) and - add version information if relevant. + add version information if relevant. The subtree includes the root node and any + nodes associated with the UUID. """ + project_uuid = ProjectMetadata.get_project_by_id(project_id).uuid tree_with_values = add_values_to_tree(project_id) pub_root = next( ( @@ -276,9 +226,20 @@ def get_publication_subtree( ).copy() subtree.nodes[pub_root]["version"] = version or 1 + subtree.nodes[pub_root]["status"] = "published" + subtree.nodes[pub_root]["publicationDate"] = datetime.datetime.now( + datetime.UTC + ).isoformat() + base_pub_path = f"/{project_id}" + if version and version > 1: + subtree.nodes[pub_root]["versionDate"] = datetime.datetime.now( + datetime.UTC + ).isoformat() + subtree.nodes[pub_root]["versionInfo"] = version_info or "" + base_pub_path += f"v{version}" subtree.add_node( - "NODE_ROOT", basePath=f"/{project_id}", **tree_with_values.nodes["NODE_ROOT"] + "NODE_ROOT", basePath=base_pub_path, **tree_with_values.nodes["NODE_ROOT"] ) subtree.add_edge("NODE_ROOT", pub_root) if version and version > 1: @@ -288,6 +249,168 @@ def get_publication_subtree( {node: f"{node}_V{version}" for node in subtree if node != "NODE_ROOT"}, ) - subtree = construct_entity_filepaths(subtree, version) - subtree = update_path_mappings(subtree) - return subtree + for node in subtree.nodes: + subtree.nodes[node]["basePath"] = base_pub_path + subtree, path_mapping = update_path_mappings(subtree, project_uuid) + return subtree, path_mapping + + +def fix_publication_dates(existing_tree: nx.DiGraph, incoming_tree: nx.DiGraph): + """ + Update publication date on versioned pubs to match the initial publication date. + """ + initial_pub_dates = {} + for published_entity in existing_tree.successors("NODE_ROOT"): + published_uuid = existing_tree.nodes[published_entity]["uuid"] + initial_pub_dates[published_uuid] = existing_tree.nodes[published_entity][ + "publicationDate" + ] + for node in incoming_tree: + if incoming_tree.nodes[node]["uuid"] in initial_pub_dates: + incoming_tree.nodes[node]["publicationDate"] = initial_pub_dates[ + incoming_tree.nodes[node]["uuid"] + ] + + return incoming_tree + + +def get_publication_full_tree( + project_id: str, + entity_uuids: list[str], + version: Optional[int] = None, + version_info: Optional[str] = None, +): + """Combine subtrees to create the full publishable metadata object.""" + full_path_mapping = {} + subtrees = [] + for uuid in entity_uuids: + subtree, path_mapping = get_publication_subtree( + project_id, uuid, version=version, version_info=version_info + ) + subtrees.append(subtree) + full_path_mapping = {**full_path_mapping, **path_mapping} + + full_tree = nx.compose_all(subtrees) + + if version and version > 1: + existing_pub = Publication.objects.get(project_id=project_id) + published_tree: nx.DiGraph = nx.node_link_graph(existing_pub.tree) + + # Update publication date on versioned pubs to match the initial publication date. + full_tree = fix_publication_dates(published_tree, full_tree) + full_tree = nx.compose(published_tree, full_tree) + + return full_tree, full_path_mapping + + +class ProjectFileNotFound(Exception): + """exception raised when attempting to copy a non-existent file for publication""" + + +def copy_publication_files( + path_mapping: dict, project_id: str, version: Optional[int] = None +): + """ + Copy files from My Projects to the published area on Corral. + `path_mapping` is a dict mapping project paths to their corresponding paths in the + published area. + """ + pub_dirname = project_id + if version and version > 1: + pub_dirname = f"{project_id}v{version}" + + pub_root_dir = str(Path(f"{settings.DESIGNSAFE_PUBLISHED_PATH}") / pub_dirname) + os.makedirs(pub_root_dir, exist_ok=True) + + for src_path in path_mapping: + src_path_obj = Path(src_path) + if not src_path_obj.exists(): + raise ProjectFileNotFound(f"File not found: {src_path}") + + os.makedirs(src_path_obj.parent, exist_ok=True) + + if src_path_obj.is_dir(): + shutil.copytree( + src_path, + path_mapping[src_path], + dirs_exist_ok=True, + symlinks=True, + copy_function=shutil.copy, + ) + else: + shutil.copy(src_path, path_mapping[src_path]) + + # Lock the publication directory so that non-root users can only read files and list directories + subprocess.run(["chmod", "-R", "a-x,a=rX", pub_root_dir], check=True) + + +# pylint: disable=too-many-locals, too-many-branches, too-many-statements +def publish_project( + project_id: str, + entity_uuids: list[str], + version: Optional[int] = None, + version_info: Optional[str] = None, + dry_run: bool = False, +): + """ + Publish a project. The following steps occur during publication: + - Create a published metadata record for the project and its entities + - Generate a doi for each published entity in draft form + - Transfer published files to the Published area on Corral. + - Publish the DOI to make it world-readable. + (todo) + - ZIP publication files and metadata + - upload metadata/manifest to Fedora repo + """ + + pub_tree, path_mapping = get_publication_full_tree( + project_id, entity_uuids, version=version, version_info=version_info + ) + if dry_run: + return pub_tree, path_mapping + + new_dois = [] + + for entity_uuid in entity_uuids: + entity_meta = ProjectMetadata.objects.get(uuid=entity_uuid) + existing_dois = entity_meta.value.get("dois", []) + existing_doi = next(iter(existing_dois), None) + + datacite_json = get_datacite_json(pub_tree, entity_uuid) + datacite_resp = upsert_datacite_json(datacite_json, doi=existing_doi) + doi = datacite_resp["data"]["id"] + new_dois.append(doi) + + entity_meta.value["dois"] = [doi] + entity_meta.save() + + entity_nodes = [ + node + for node in pub_tree.nodes + if pub_tree.nodes[node]["uuid"] == entity_uuid + ] + for node in entity_nodes: + pub_tree.nodes[node]["value"]["dois"] = [doi] + + if not settings.DEBUG: + copy_publication_files(path_mapping, project_id) + for doi in new_dois: + publish_datacite_doi(doi) + + base_meta_node = next( + ( + node + for node in pub_tree.nodes + if pub_tree.nodes[node]["name"] == constants.PROJECT + and pub_tree.nodes[node].get("version", version) == version + ) + ) + base_meta_value = pub_tree.nodes[base_meta_node]["value"] + + pub_metadata, _ = Publication.objects.update_or_create( + project_id=project_id, + defaults={"value": base_meta_value, "tree": nx.node_link_data(pub_tree)}, + ) + pub_metadata.save() + + return pub_metadata diff --git a/designsafe/apps/api/projects_v2/schema_models/_field_models.py b/designsafe/apps/api/projects_v2/schema_models/_field_models.py index 867a98c3fb..d138b305d1 100644 --- a/designsafe/apps/api/projects_v2/schema_models/_field_models.py +++ b/designsafe/apps/api/projects_v2/schema_models/_field_models.py @@ -1,4 +1,5 @@ """Utiity models used in multiple field types""" + from datetime import datetime from functools import partial from typing import Annotated, Literal, Optional @@ -21,7 +22,9 @@ class MetadataModel(BaseModel): def model_dump(self, *args, **kwargs): # default by_alias to true for camelCase serialization - return partial(super().model_dump, by_alias=True)(*args, **kwargs) + return partial(super().model_dump, by_alias=True, exclude_none=True)( + *args, **kwargs + ) class ProjectUser(MetadataModel): diff --git a/designsafe/apps/api/projects_v2/tests/schema_integration.py b/designsafe/apps/api/projects_v2/tests/schema_integration.py index 2473d4c1b5..04dc4c0e1b 100644 --- a/designsafe/apps/api/projects_v2/tests/schema_integration.py +++ b/designsafe/apps/api/projects_v2/tests/schema_integration.py @@ -1,13 +1,22 @@ """Integration-type tests to confirm that Pydantic schemas are exhaustive.""" + import json from typing import Iterator +import networkx as nx from pydantic import BaseModel, ValidationError +from designsafe.apps.api.projects_v2.operations.datacite_operations import ( + get_datacite_json, +) from designsafe.apps.api.projects_v2.schema_models.base import BaseProject from designsafe.apps.api.projects_v2.migration_utils.graph_constructor import ( transform_pub_entities, ) from designsafe.apps.api.agave import service_account from designsafe.apps.api.publications.operations import listing as list_pubs +from designsafe.apps.api.projects_v2.models.project_metadata import ProjectMetadata +from designsafe.apps.api.projects_v2.operations.project_publish_operations import ( + get_publication_full_tree, +) def update_project(uuid, new_value): @@ -79,3 +88,27 @@ def validate_publications(): except ValidationError as exc: print(pub["projectId"]) print(exc) + + +def validate_datacite_json(): + """Attempt to generate datacite json for all publishable entities""" + graphs = ProjectMetadata.objects.filter(name="designsafe.project.graph") + for graph in graphs: + graph_obj = nx.node_link_graph(graph.value) + + project_type = graph.base_project.value["projectType"] + if project_type == "None": + continue + project_id = graph.base_project.value["projectId"] + + publishable_uuids = [ + graph_obj.nodes[node_id]["uuid"] + for node_id in graph_obj.successors("NODE_ROOT") + ] + if not publishable_uuids: + continue + + full_tree, _ = get_publication_full_tree(project_id, publishable_uuids) + + for pub_id in publishable_uuids: + get_datacite_json(full_tree, pub_id) diff --git a/designsafe/apps/api/projects_v2/views.py b/designsafe/apps/api/projects_v2/views.py index 0008aadb8a..fa41c2b5aa 100644 --- a/designsafe/apps/api/projects_v2/views.py +++ b/designsafe/apps/api/projects_v2/views.py @@ -15,6 +15,7 @@ from designsafe.apps.api.projects_v2.operations.project_meta_operations import ( patch_metadata, add_file_associations, + set_file_associations, remove_file_associations, set_file_tags, change_project_type, @@ -30,6 +31,9 @@ def get_search_filter(query_string): + """ + Construct a search filter for projects. + """ id_filter = models.Q(value__projectId__icontains=query_string) title_filter = models.Q(value__title__icontains=query_string) desc_filter = models.Q(value__description__icontains=query_string) @@ -51,7 +55,7 @@ def get(self, request: HttpRequest): if not request.user.is_authenticated: raise ApiException("Unauthenticated user", status=401) - projects = user.projects.order_by("last_updated") + projects = user.projects.order_by("-last_updated") if query_string: projects = projects.filter(get_search_filter(query_string)) total = user.projects.count() @@ -105,7 +109,7 @@ def put(self, request: HttpRequest, project_id: str): raise ApiException("Unauthenticated user", status=401) try: - project = user.projects.get( + user.projects.get( models.Q(uuid=project_id) | models.Q(value__projectId=project_id) ) except ProjectMetadata.DoesNotExist as exc: @@ -286,7 +290,7 @@ def delete(self, request: HttpRequest, project_id, node_id): class ProjectFileAssociationsView(BaseApiView): """View for managing associations between entities and data files.""" - def post(self, request: HttpRequest, project_id, entity_uuid): + def patch(self, request: HttpRequest, project_id, entity_uuid): """Associate one or more files to an entity""" file_obj_data: list[dict] = json.loads(request.body).get("fileObjs", []) file_objs = [ @@ -325,10 +329,51 @@ def post(self, request: HttpRequest, project_id, entity_uuid): "Entity is not part of the specified project", status=400 ) from exc - logger.debug(file_objs) add_file_associations(entity_uuid, file_objs) return JsonResponse({"result": "OK"}) + def put(self, request: HttpRequest, project_id, entity_uuid): + """Replace an entity's file associations with a new set.""" + file_obj_data: list[dict] = json.loads(request.body).get("fileObjs", []) + file_objs = [ + FileObj( + system=file_obj.get("system"), + path=file_obj.get("path"), + name=file_obj.get("name"), + type=file_obj.get("type"), + length=file_obj.get("length"), + last_modified=file_obj.get("lastModified"), + ) + for file_obj in file_obj_data + ] + + user = request.user + + if not entity_uuid: + raise ApiException("Entity UUID must be provided", status=400) + + if not request.user.is_authenticated: + raise ApiException("Unauthenticated user", status=401) + + try: + project = user.projects.get( + models.Q(uuid=project_id) | models.Q(value__projectId=project_id) + ) + except ProjectMetadata.DoesNotExist as exc: + raise ApiException( + "User does not have access to the requested project", status=403 + ) from exc + + try: + ProjectMetadata.objects.get(uuid=entity_uuid, base_project=project) + except ProjectMetadata.DoesNotExist as exc: + raise ApiException( + "Entity is not part of the specified project", status=400 + ) from exc + + set_file_associations(entity_uuid, file_objs) + return JsonResponse({"result": "OK"}) + def delete(self, request: HttpRequest, project_id, entity_uuid, file_path): """Remove the association between a file and an entity.""" user = request.user diff --git a/designsafe/apps/api/publications_v2/urls.py b/designsafe/apps/api/publications_v2/urls.py index f9d8e2bdd6..7684703605 100644 --- a/designsafe/apps/api/publications_v2/urls.py +++ b/designsafe/apps/api/publications_v2/urls.py @@ -9,7 +9,10 @@ urlpatterns = [ path("", PublicationListingView.as_view()), path("/", PublicationListingView.as_view()), - re_path(r'^(?P[A-Z\-]+-[0-9]+)(v(?P[0-9]+))?/?$', PublicationDetailView.as_view()), - #path("", PublicationDetailView.as_view()), - #path("/", PublicationDetailView.as_view()), + re_path( + r"^(?P[A-Z\-]+-[0-9]+)(v(?P[0-9]+))?/?$", + PublicationDetailView.as_view(), + ), + # path("", PublicationDetailView.as_view()), + # path("/", PublicationDetailView.as_view()), ] diff --git a/designsafe/settings/common_settings.py b/designsafe/settings/common_settings.py index 6c2c96760e..e37b4cd889 100644 --- a/designsafe/settings/common_settings.py +++ b/designsafe/settings/common_settings.py @@ -570,7 +570,8 @@ #FOR RAPID UPLOADS DESIGNSAFE_UPLOAD_PATH = '/corral-repl/tacc/NHERI/uploads' -DESIGNSAFE_PUBLISHED_PATH = '/corral-repl/tacc/NHERI/published/' +DESIGNSAFE_PROJECTS_PATH = os.environ.get('DESIGNSAFE_PROJECTS_PATH', '/corral-repl/tacc/NHERI/projects/') +DESIGNSAFE_PUBLISHED_PATH = os.environ.get('DESIGNSAFE_PUBLISHED_PATH', '/corral-repl/tacc/NHERI/published/') DATACITE_URL = os.environ.get('DATACITE_URL', 'https://doi.test.datacite.org/') DATACITE_USER = os.environ.get('DATACITE_USER') DATACITE_PASS = os.environ.get('DATACITE_PASS') diff --git a/designsafe/settings/test_settings.py b/designsafe/settings/test_settings.py index 44b03240c7..e2b3a73388 100644 --- a/designsafe/settings/test_settings.py +++ b/designsafe/settings/test_settings.py @@ -85,6 +85,7 @@ 'designsafe.apps.api', 'designsafe.apps.api.notifications', 'designsafe.apps.api.projects_v2', + 'designsafe.apps.api.publications_v2', 'designsafe.apps.api.filemeta', 'designsafe.apps.accounts', 'designsafe.apps.cms_plugins', @@ -488,6 +489,7 @@ #FOR RAPID UPLOADS DESIGNSAFE_UPLOAD_PATH = '/corral-repl/tacc/NHERI/uploads' +DESIGNSAFE_PROJECTS_PATH = '/corral-repl/tacc/NHERI/projects/' DESIGNSAFE_PUBLISHED_PATH = '/corral-repl/tacc/NHERI/published/' DATACITE_USER = os.environ.get('DATACITE_USER') DATACITE_PASS = os.environ.get('DATACITE_PASS')