diff --git a/install/data_test_api.sql b/install/data_test_api.sql index e5e08d1d3..b58720988 100644 --- a/install/data_test_api.sql +++ b/install/data_test_api.sql @@ -24,7 +24,7 @@ INSERT INTO model.entity (cidoc_class_code, openatlas_class_name, name, descript ('E33', 'source', 'Silmarillion', NULL), ('E21', 'person', 'Frodo', 'That is Frodo'), ('E21', 'person', 'Sam', 'That is Sam'), - ('E32', 'external_reference', 'https://lotr.fandom.com/', NULL), + ('E31', 'external_reference', 'https://lotr.fandom.com/', NULL), ('E41', 'appellation', 'Sûza', NULL), ('E41', 'appellation', 'The ring bearer', NULL), ('E7', 'activity', 'Travel to Mordor', NULL), diff --git a/openatlas/models/imports.py b/openatlas/models/imports.py index 58f61a26b..c3a94d27c 100644 --- a/openatlas/models/imports.py +++ b/openatlas/models/imports.py @@ -8,6 +8,8 @@ from openatlas.api.import_scripts.util import get_match_types, \ get_reference_system_by_name +from openatlas.api.resources.error import EntityDoesNotExistError +from openatlas.api.resources.model_mapper import get_entity_by_id from openatlas.database import imports as db from openatlas.display.util2 import sanitize from openatlas.models.entity import Entity @@ -142,6 +144,18 @@ def import_data(project: Project, class_: str, data: list[Any]) -> None: g.types[int(value_type[0])], value_type[1]) + # References + if data := row.get('references'): + for references in str(data).split(): + reference = references.split(';') + if len(reference) <= 2 and reference[0].isdigit(): + try: + ref_entity = get_entity_by_id(int(reference[0])) + except EntityDoesNotExistError: + continue + page = reference[1] if len(reference) > 1 else None + ref_entity.link('P67', entity, page) + # External reference systems match_types = get_match_types() reference_systems = list(set( @@ -199,4 +213,3 @@ def import_data(project: Project, class_: str, data: list[Any]) -> None: entities[entry['parent_id']]['entity'].link( 'P46', entry['entity']) - diff --git a/openatlas/static/example.csv b/openatlas/static/example.csv index 531fa5d09..8e478fb28 100644 --- a/openatlas/static/example.csv +++ b/openatlas/static/example.csv @@ -1,4 +1,4 @@ -id,name,alias,description,begin_from,begin_to,begin_comment,end_from,end_to,end_comment,wkt,type_ids,value_types,reference_system_wikidata,reference_system_geonames,administrative_unit,historical_place -place_1,Vienna,Wien;City of Vienna,Capital of Austria,1500-01-01,1500-12-31,It was a rainy day.,2045-01-01,2049-12-31,We'll see about that.,"POLYGON((16.1203 48.30671, 16.606275 48.30671, 16.606275 48.3154, 16.1203 48.3154, 16.1203 48.30671))",80 184895,128787;-13.56,Q152419;close_match,2761369;exact_match,87,221630 -place_2,London,,,,,,,,,"POINT (-0.1290 51.5053)",,,,,, -place_3,Rom,,,,,,,,,"LINESTRING (12.458533781141528 41.922205268362234, 12.53062334955289 41.917606998887024, 12.52169797441624 41.888476931243254)",,,,,, +id,name,alias,description,begin_from,begin_to,begin_comment,end_from,end_to,end_comment,wkt,type_ids,value_types,references,reference_system_wikidata,reference_system_geonames,administrative_unit,historical_place +place_1,Vienna,Wien;City of Vienna,Capital of Austria,1500-01-01,1500-12-31,It was a rainy day.,2045-01-01,2049-12-31,We'll see about that.,"POLYGON((16.1203 48.30671, 16.606275 48.30671, 16.606275 48.3154, 16.1203 48.3154, 16.1203 48.30671))",80 184895,128787;-13.56,117293;IV,Q152419;close_match,2761369;exact_match,87,221630 +place_2,London,,,,,,,,,"POINT (-0.1290 51.5053)",,,,,,, +place_3,Rom,,,,,,,,,"LINESTRING (12.458533781141528 41.922205268362234, 12.53062334955289 41.917606998887024, 12.52169797441624 41.888476931243254)",,,,,,, diff --git a/openatlas/views/imports.py b/openatlas/views/imports.py index cac7c346d..111be9e5a 100644 --- a/openatlas/views/imports.py +++ b/openatlas/views/imports.py @@ -18,6 +18,8 @@ from openatlas import app from openatlas.api.import_scripts.util import ( get_match_types, get_reference_system_by_name) +from openatlas.api.resources.error import EntityDoesNotExistError +from openatlas.api.resources.model_mapper import get_entity_by_id from openatlas.database.connect import Transaction from openatlas.display.tab import Tab from openatlas.display.table import Table @@ -45,6 +47,8 @@ _('invalid value type values') _('invalid coordinates') _('invalid openatlas class') +_('invalid references') +_('invalid reference id') _('empty names') _('empty ids') _('missing name column') @@ -394,6 +398,7 @@ def check_parent( return False return False # pragma: no cover + def get_clean_header( data_frame: DataFrame, class_: str, @@ -418,7 +423,8 @@ def get_allowed_columns(class_: str) -> dict[str, list[str]]: if class_ not in g.view_class_mapping['reference']: columns.extend([ 'begin_from', 'begin_to', 'begin_comment', - 'end_from', 'end_to', 'end_comment']) + 'end_from', 'end_to', 'end_comment', + 'references']) if class_ in ['place', 'person', 'group']: columns.append('alias') if class_ in ['place', 'artifact']: @@ -477,6 +483,25 @@ def check_cell_value( checks.set_warning('invalid_value_type_values', id_) value_types.append(';'.join(values)) value = ' '.join(value_types) + case 'references' if value: + references = [] + for reference in str(value).split(): + values = str(reference).split(';') + if len(values) > 2: + references.append(error_span(reference)) + checks.set_warning('invalid_references', id_) + continue + if not values[0].isdigit(): + values[0] = error_span(values[0]) + checks.set_warning('invalid_reference_id', id_) + else: + try: + get_entity_by_id(int(values[0])) + except EntityDoesNotExistError: + values[0] = error_span(values[0]) + checks.set_warning('invalid_reference_id', id_) + references.append(';'.join(values)) + value = ' '.join(references) case 'wkt' if value: wkt_ = None try: diff --git a/tests/base.py b/tests/base.py index c1f9b7aae..d01bfb4da 100644 --- a/tests/base.py +++ b/tests/base.py @@ -64,6 +64,16 @@ def setUp(self) -> None: self.cursor.execute(sql_file.read()) +class ExportImportTestCase(TestBaseCase): + + def setUp(self) -> None: + super().setUp() + with open( + Path(app.root_path).parent / 'install' / 'data_test_api.sql', + encoding='utf8') as sql_file: + self.cursor.execute(sql_file.read()) + + def insert( class_: str, name: str, diff --git a/tests/invalid_2.csv b/tests/invalid_2.csv index dd0d3a41f..d42077ae8 100644 --- a/tests/invalid_2.csv +++ b/tests/invalid_2.csv @@ -1,4 +1,4 @@ -id,name,alias,description,begin_from,begin_to,begin_comment,end_from,end_to,end_comment,wkt,type_ids,value_types,reference_system_wikidata,reference_system_geon,administrative_unit,historical_place,not_existing_column -place_1,Vienna,Wien,Capital of Austria,not_a_date,NaT,It was a rainy day.,,2049-12-31,"We'll see about that.","MULTILINESTRING ((10 10, 20 20, 10 40),(40 40, 30 30, 40 20, 30 10))",666,666;12B34,123;away,juhhu;4545,777,888, +id,name,alias,description,begin_from,begin_to,begin_comment,end_from,end_to,end_comment,wkt,type_ids,value_types,references,reference_system_wikidata,reference_system_geon,administrative_unit,historical_place,not_existing_column +place_1,Vienna,Wien,Capital of Austria,not_a_date,NaT,It was a rainy day.,,2049-12-31,"We'll see about that.","MULTILINESTRING ((10 10, 20 20, 10 40),(40 40, 30 30, 40 20, 30 10))",666,666;12B34,666;213;41 12b 666;IV,123;away,juhhu;4545,777,888, place_1,,,,,,,,,,"MULTILINESTRING ((BLA 10, 20 20, 10 40),(40 40, 30 30, 40 20, 30 10))",,,,,, place_1,Vienna,,,,,,,,,,,,,, diff --git a/tests/test_export_import.py b/tests/test_export_import.py index 65629b0c7..a8b38fac9 100644 --- a/tests/test_export_import.py +++ b/tests/test_export_import.py @@ -8,10 +8,10 @@ from openatlas import app from openatlas.api.resources.model_mapper import get_by_cidoc_classes from openatlas.models.export import current_date_for_filename -from tests.base import TestBaseCase +from tests.base import ExportImportTestCase, TestBaseCase -class ExportImportTest(TestBaseCase): +class ExportImportTest(ExportImportTestCase): def test_export(self) -> None: with app.app_context(): @@ -31,6 +31,8 @@ def test_export(self) -> None: carantania = entity case 'Place': place_type = entity + case 'https://lotr.fandom.com/': + reference = entity rv: Any = self.app.get(url_for('export_sql')) assert b'Export SQL' in rv.data @@ -140,6 +142,8 @@ def test_export(self) -> None: assert b'invalid value type values' in rv.data assert b'invalid coordinates' in rv.data assert b'invalid reference system' in rv.data + assert b'invalid references' in rv.data + assert b'invalid reference id' in rv.data assert b'empty names' in rv.data assert b'double IDs in import' in rv.data @@ -192,6 +196,7 @@ def test_export(self) -> None: boundary_mark.id, infrastructure.id, austria.id, place_type.id] data_frame.at[0, 'type_ids'] = ' '.join(map(str, type_ids_list)) data_frame.at[0, 'value_types'] = f'{height.id};42' + data_frame.at[0, 'references'] = f'{reference.id};IV' data_frame.at[0, 'wkt'] = "POLYGON((16.1203 BLA, 16.606275))" data_frame.to_csv(test_path / 'example.csv', index=False) with open(test_path / 'example.csv', 'rb') as file: