Skip to content

Commit

Permalink
add references to import
Browse files Browse the repository at this point in the history
  • Loading branch information
BernhardKoschicek committed Apr 3, 2024
1 parent f036272 commit e6b3558
Show file tree
Hide file tree
Showing 7 changed files with 64 additions and 11 deletions.
2 changes: 1 addition & 1 deletion install/data_test_api.sql
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ INSERT INTO model.entity (cidoc_class_code, openatlas_class_name, name, descript
('E33', 'source', 'Silmarillion', NULL),
('E21', 'person', 'Frodo', 'That is Frodo'),
('E21', 'person', 'Sam', 'That is Sam'),
('E32', 'external_reference', 'https://lotr.fandom.com/', NULL),
('E31', 'external_reference', 'https://lotr.fandom.com/', NULL),
('E41', 'appellation', 'Sûza', NULL),
('E41', 'appellation', 'The ring bearer', NULL),
('E7', 'activity', 'Travel to Mordor', NULL),
Expand Down
15 changes: 14 additions & 1 deletion openatlas/models/imports.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@

from openatlas.api.import_scripts.util import get_match_types, \
get_reference_system_by_name
from openatlas.api.resources.error import EntityDoesNotExistError
from openatlas.api.resources.model_mapper import get_entity_by_id
from openatlas.database import imports as db
from openatlas.display.util2 import sanitize
from openatlas.models.entity import Entity
Expand Down Expand Up @@ -142,6 +144,18 @@ def import_data(project: Project, class_: str, data: list[Any]) -> None:
g.types[int(value_type[0])],
value_type[1])

# References
if data := row.get('references'):
for references in str(data).split():
reference = references.split(';')
if len(reference) <= 2 and reference[0].isdigit():
try:
ref_entity = get_entity_by_id(int(reference[0]))
except EntityDoesNotExistError:
continue
page = reference[1] if len(reference) > 1 else None
ref_entity.link('P67', entity, page)

# External reference systems
match_types = get_match_types()
reference_systems = list(set(
Expand Down Expand Up @@ -199,4 +213,3 @@ def import_data(project: Project, class_: str, data: list[Any]) -> None:
entities[entry['parent_id']]['entity'].link(
'P46',
entry['entity'])

8 changes: 4 additions & 4 deletions openatlas/static/example.csv
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
id,name,alias,description,begin_from,begin_to,begin_comment,end_from,end_to,end_comment,wkt,type_ids,value_types,reference_system_wikidata,reference_system_geonames,administrative_unit,historical_place
place_1,Vienna,Wien;City of Vienna,Capital of Austria,1500-01-01,1500-12-31,It was a rainy day.,2045-01-01,2049-12-31,We'll see about that.,"POLYGON((16.1203 48.30671, 16.606275 48.30671, 16.606275 48.3154, 16.1203 48.3154, 16.1203 48.30671))",80 184895,128787;-13.56,Q152419;close_match,2761369;exact_match,87,221630
place_2,London,,,,,,,,,"POINT (-0.1290 51.5053)",,,,,,
place_3,Rom,,,,,,,,,"LINESTRING (12.458533781141528 41.922205268362234, 12.53062334955289 41.917606998887024, 12.52169797441624 41.888476931243254)",,,,,,
id,name,alias,description,begin_from,begin_to,begin_comment,end_from,end_to,end_comment,wkt,type_ids,value_types,references,reference_system_wikidata,reference_system_geonames,administrative_unit,historical_place
place_1,Vienna,Wien;City of Vienna,Capital of Austria,1500-01-01,1500-12-31,It was a rainy day.,2045-01-01,2049-12-31,We'll see about that.,"POLYGON((16.1203 48.30671, 16.606275 48.30671, 16.606275 48.3154, 16.1203 48.3154, 16.1203 48.30671))",80 184895,128787;-13.56,117293;IV,Q152419;close_match,2761369;exact_match,87,221630
place_2,London,,,,,,,,,"POINT (-0.1290 51.5053)",,,,,,,
place_3,Rom,,,,,,,,,"LINESTRING (12.458533781141528 41.922205268362234, 12.53062334955289 41.917606998887024, 12.52169797441624 41.888476931243254)",,,,,,,
27 changes: 26 additions & 1 deletion openatlas/views/imports.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@
from openatlas import app
from openatlas.api.import_scripts.util import (
get_match_types, get_reference_system_by_name)
from openatlas.api.resources.error import EntityDoesNotExistError
from openatlas.api.resources.model_mapper import get_entity_by_id
from openatlas.database.connect import Transaction
from openatlas.display.tab import Tab
from openatlas.display.table import Table
Expand Down Expand Up @@ -45,6 +47,8 @@
_('invalid value type values')
_('invalid coordinates')
_('invalid openatlas class')
_('invalid references')
_('invalid reference id')
_('empty names')
_('empty ids')
_('missing name column')
Expand Down Expand Up @@ -394,6 +398,7 @@ def check_parent(
return False
return False # pragma: no cover


def get_clean_header(
data_frame: DataFrame,
class_: str,
Expand All @@ -418,7 +423,8 @@ def get_allowed_columns(class_: str) -> dict[str, list[str]]:
if class_ not in g.view_class_mapping['reference']:
columns.extend([
'begin_from', 'begin_to', 'begin_comment',
'end_from', 'end_to', 'end_comment'])
'end_from', 'end_to', 'end_comment',
'references'])
if class_ in ['place', 'person', 'group']:
columns.append('alias')
if class_ in ['place', 'artifact']:
Expand Down Expand Up @@ -477,6 +483,25 @@ def check_cell_value(
checks.set_warning('invalid_value_type_values', id_)
value_types.append(';'.join(values))
value = ' '.join(value_types)
case 'references' if value:
references = []
for reference in str(value).split():
values = str(reference).split(';')
if len(values) > 2:
references.append(error_span(reference))
checks.set_warning('invalid_references', id_)
continue
if not values[0].isdigit():
values[0] = error_span(values[0])
checks.set_warning('invalid_reference_id', id_)
else:
try:
get_entity_by_id(int(values[0]))
except EntityDoesNotExistError:
values[0] = error_span(values[0])
checks.set_warning('invalid_reference_id', id_)
references.append(';'.join(values))
value = ' '.join(references)
case 'wkt' if value:
wkt_ = None
try:
Expand Down
10 changes: 10 additions & 0 deletions tests/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,16 @@ def setUp(self) -> None:
self.cursor.execute(sql_file.read())


class ExportImportTestCase(TestBaseCase):

def setUp(self) -> None:
super().setUp()
with open(
Path(app.root_path).parent / 'install' / 'data_test_api.sql',
encoding='utf8') as sql_file:
self.cursor.execute(sql_file.read())


def insert(
class_: str,
name: str,
Expand Down
4 changes: 2 additions & 2 deletions tests/invalid_2.csv
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
id,name,alias,description,begin_from,begin_to,begin_comment,end_from,end_to,end_comment,wkt,type_ids,value_types,reference_system_wikidata,reference_system_geon,administrative_unit,historical_place,not_existing_column
place_1,Vienna,Wien,Capital of Austria,not_a_date,NaT,It was a rainy day.,,2049-12-31,"We'll see about that.","MULTILINESTRING ((10 10, 20 20, 10 40),(40 40, 30 30, 40 20, 30 10))",666,666;12B34,123;away,juhhu;4545,777,888,
id,name,alias,description,begin_from,begin_to,begin_comment,end_from,end_to,end_comment,wkt,type_ids,value_types,references,reference_system_wikidata,reference_system_geon,administrative_unit,historical_place,not_existing_column
place_1,Vienna,Wien,Capital of Austria,not_a_date,NaT,It was a rainy day.,,2049-12-31,"We'll see about that.","MULTILINESTRING ((10 10, 20 20, 10 40),(40 40, 30 30, 40 20, 30 10))",666,666;12B34,666;213;41 12b 666;IV,123;away,juhhu;4545,777,888,
place_1,,,,,,,,,,"MULTILINESTRING ((BLA 10, 20 20, 10 40),(40 40, 30 30, 40 20, 30 10))",,,,,,
place_1,Vienna,,,,,,,,,,,,,,
9 changes: 7 additions & 2 deletions tests/test_export_import.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,10 @@
from openatlas import app
from openatlas.api.resources.model_mapper import get_by_cidoc_classes
from openatlas.models.export import current_date_for_filename
from tests.base import TestBaseCase
from tests.base import ExportImportTestCase, TestBaseCase


class ExportImportTest(TestBaseCase):
class ExportImportTest(ExportImportTestCase):

def test_export(self) -> None:
with app.app_context():
Expand All @@ -31,6 +31,8 @@ def test_export(self) -> None:
carantania = entity
case 'Place':
place_type = entity
case 'https://lotr.fandom.com/':
reference = entity

rv: Any = self.app.get(url_for('export_sql'))
assert b'Export SQL' in rv.data
Expand Down Expand Up @@ -140,6 +142,8 @@ def test_export(self) -> None:
assert b'invalid value type values' in rv.data
assert b'invalid coordinates' in rv.data
assert b'invalid reference system' in rv.data
assert b'invalid references' in rv.data
assert b'invalid reference id' in rv.data
assert b'empty names' in rv.data
assert b'double IDs in import' in rv.data

Expand Down Expand Up @@ -192,6 +196,7 @@ def test_export(self) -> None:
boundary_mark.id, infrastructure.id, austria.id, place_type.id]
data_frame.at[0, 'type_ids'] = ' '.join(map(str, type_ids_list))
data_frame.at[0, 'value_types'] = f'{height.id};42'
data_frame.at[0, 'references'] = f'{reference.id};IV'
data_frame.at[0, 'wkt'] = "POLYGON((16.1203 BLA, 16.606275))"
data_frame.to_csv(test_path / 'example.csv', index=False)
with open(test_path / 'example.csv', 'rb') as file:
Expand Down

0 comments on commit e6b3558

Please sign in to comment.