Skip to content

Commit

Permalink
Merge develop
Browse files Browse the repository at this point in the history
  • Loading branch information
AlexanderWatzinger committed Dec 5, 2024
2 parents ec7b7fb + ebe5a16 commit 172b137
Show file tree
Hide file tree
Showing 127 changed files with 2,485 additions and 2,564 deletions.
2 changes: 1 addition & 1 deletion .pylintrc
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
[MASTER]
ignore=openatlas.wsgi
disable=C0111, broad-except, duplicate-code
good-names=bc, e, ex, f, i, id, ip, j, js, k, l, Run, rv, to, x, y, _
good-names=bc, c, e, ex, f, i, id, ip, j, js, k, l, op, Run, rv, to, x, y, _
min-public-methods=1 # default=2 but isn't useful when working with inheritance

[FORMAT]
Expand Down
Empty file modified files/export/.gitignore
100644 → 100755
Empty file.
Empty file modified files/processed_images/resized/.gitignore
100644 → 100755
Empty file.
Empty file modified files/uploads/.gitignore
100644 → 100755
Empty file.
4 changes: 2 additions & 2 deletions install/data_test.sql
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
-- Create test user
INSERT INTO web.user (group_id, username, password, active, email, password_reset_code, password_reset_date, unsubscribe_code)
VALUES
((SELECT id FROM web.group WHERE name = 'admin'), 'Alice', '$2b$12$yPQCBsSQdZxESEz79SFiOOZBLG2GZ9Cc2rzVMgZxXyW2y3T499LYK', True, '[email protected]', '1234', current_timestamp, NULL),
((SELECT id FROM web.group WHERE name = 'admin'), 'Alice', '$2b$12$yPQCBsSQdZxESEz79SFiOOZBLG2GZ9Cc2rzVMgZxXyW2y3T499LYK', True, '[email protected]', '123', current_timestamp, NULL),
((SELECT id FROM web.group WHERE name = 'admin'), 'Inactive', '$2b$12$yPQCBsSQdZxESEz79SFiOOZBLG2GZ9Cc2rzVMgZxXyW2y3T499LYK', False, '[email protected]', NULL, NULL, NULL),
((SELECT id FROM web.group WHERE name = 'manager'), 'Manager', '$2b$12$yPQCBsSQdZxESEz79SFiOOZBLG2GZ9Cc2rzVMgZxXyW2y3T499LYK', True, '[email protected]', '5678', '2020-02-02', '1234'),
((SELECT id FROM web.group WHERE name = 'manager'), 'Manager', '$2b$12$yPQCBsSQdZxESEz79SFiOOZBLG2GZ9Cc2rzVMgZxXyW2y3T499LYK', True, '[email protected]', '5678', '2020-02-02', '123'),
((SELECT id FROM web.group WHERE name = 'contributor'), 'Contributor', '$2b$12$yPQCBsSQdZxESEz79SFiOOZBLG2GZ9Cc2rzVMgZxXyW2y3T499LYK', True, '[email protected]', NULL, NULL, NULL),
((SELECT id FROM web.group WHERE name = 'editor'), 'Editor', '$2b$12$yPQCBsSQdZxESEz79SFiOOZBLG2GZ9Cc2rzVMgZxXyW2y3T499LYK', True, '[email protected]', NULL, NULL, NULL),
((SELECT id FROM web.group WHERE name = 'readonly'), 'Readonly', '$2b$12$yPQCBsSQdZxESEz79SFiOOZBLG2GZ9Cc2rzVMgZxXyW2y3T499LYK', True, '[email protected]', NULL, NULL, NULL);
Expand Down
3 changes: 2 additions & 1 deletion install/data_test_api.sql
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,8 @@ VALUES
('P67', (SELECT id FROM model.entity WHERE name='Shire'), (SELECT id FROM model.entity WHERE name='GeoNames'), '2761369', (SELECT id FROM model.entity WHERE name='close match') ),
('OA7', (SELECT id FROM model.entity WHERE name='Frodo'), (SELECT id FROM model.entity WHERE name='Sam'), NULL, (SELECT id FROM model.entity WHERE name='Economical') ),
('P67', (SELECT id FROM model.entity WHERE name='Shire'), (SELECT id FROM model.entity WHERE name='https://lotr.fandom.com/'), 'Fandom Wiki of lord of the rings', NULL),
('P2', (SELECT id FROM model.entity WHERE name='Height'), (SELECT id FROM model.entity WHERE name='Shire'), '23.0', NULL );
('P2', (SELECT id FROM model.entity WHERE name='Height'), (SELECT id FROM model.entity WHERE name='Shire'), '23.0', NULL ),
('P2', (SELECT id FROM model.entity WHERE name='Weight'), (SELECT id FROM model.entity WHERE name='Shire'), '999.0', NULL );

INSERT INTO web.entity_profile_image (entity_id, image_id)
VALUES ( (SELECT id FROM model.entity WHERE name='Shire'), (SELECT id FROM model.entity WHERE name='Picture with a License') )
Expand Down
45 changes: 18 additions & 27 deletions openatlas/api/endpoints/endpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from openatlas import app
from openatlas.api.endpoints.parser import Parser
from openatlas.api.formats.csv import (
build_entity_dataframe, build_link_dataframe)
build_dataframe, build_link_dataframe)
from openatlas.api.formats.loud import get_loud_entities
from openatlas.api.resources.api_entity import ApiEntity
from openatlas.api.resources.resolve_endpoints import (
Expand Down Expand Up @@ -44,8 +44,7 @@ def resolve_entities(self) -> Response | dict[str, Any]:
self.remove_duplicate_entities()
self.sorting()
result = self.get_json_output()
if (self.parser.format
in app.config['RDF_FORMATS']): # pragma: no cover
if self.parser.format in app.config['RDF_FORMATS']: # pragma: no cover
return Response(
self.parser.rdf_output(result['results']),
mimetype=app.config['RDF_FORMATS'][self.parser.format])
Expand Down Expand Up @@ -100,8 +99,7 @@ def filter_by_type(self) -> list[Entity]:
return result

def export_entities_csv(self) -> Response:
frames = \
[build_entity_dataframe(e, relations=True) for e in self.entities]
frames = [build_dataframe(e, relations=True) for e in self.entities]
return Response(
pd.DataFrame(data=frames).to_csv(),
mimetype='text/csv',
Expand All @@ -118,7 +116,7 @@ def export_csv_for_network_analysis(self) -> Response:
link_frame = [
build_link_dataframe(link_) for link_ in
(self.link_parser_check()
+ self.link_parser_check_inverse())]
+ self.link_parser_check(inverse=True))]
file.write(bytes(
pd.DataFrame(data=link_frame).to_csv(), encoding='utf8'))
return Response(
Expand All @@ -134,25 +132,18 @@ def get_entities_grouped_by_class(self) -> dict[str, Any]:
sorted(entities, key=lambda entity: entity.class_.name),
key=lambda entity: entity.class_.name):
grouped_entities[class_] = \
[build_entity_dataframe(entity) for entity in entities_]
[build_dataframe(entity) for entity in entities_]
return grouped_entities

def link_parser_check(self) -> list[Link]:
if any(i in ['relations', 'types', 'depictions', 'links', 'geometry']
for i in self.parser.show):
return Entity.get_links_of_entities(
[entity.id for entity in self.entities],
self.parser.get_properties_for_links())
return []

def link_parser_check_inverse(self) -> list[Link]:
if any(i in ['relations', 'types', 'depictions', 'links', 'geometry']
for i in self.parser.show):
return Entity.get_links_of_entities(
def link_parser_check(self, inverse: bool = False) -> list[Link]:
links = []
show_ = {'relations', 'types', 'depictions', 'links', 'geometry'}
if set(self.parser.show) & show_:
links = Entity.get_links_of_entities(
[entity.id for entity in self.entities],
self.parser.get_properties_for_links(),
inverse=True)
return []
inverse=inverse)
return links

def sorting(self) -> None:
if 'latest' in request.path:
Expand All @@ -172,8 +163,8 @@ def remove_duplicate_entities(self) -> None:
def get_json_output(self) -> dict[str, Any]:
total = [e.id for e in self.entities]
count = len(total)
self.parser.limit = count \
if self.parser.limit == 0 else self.parser.limit
if self.parser.limit == 0:
self.parser.limit = count
e_list = []
if total:
e_list = list(itertools.islice(total, 0, None, self.parser.limit))
Expand Down Expand Up @@ -211,7 +202,7 @@ def get_entities_formatted(self) -> list[dict[str, Any]]:
'links_inverse': []}
for link_ in self.link_parser_check():
entities_dict[link_.domain.id]['links'].append(link_)
for link_ in self.link_parser_check_inverse():
for link_ in self.link_parser_check(inverse=True):
entities_dict[link_.range.id]['links_inverse'].append(link_)
if self.parser.format == 'loud' \
or self.parser.format in app.config['RDF_FORMATS']:
Expand Down Expand Up @@ -242,9 +233,9 @@ def get_geojson(self) -> dict[str, Any]:

def get_geojson_v2(self) -> dict[str, Any]:
out = []
links = [
link_ for link_ in self.link_parser_check() if link_.property.code
in ['P53', 'P74', 'OA8', 'OA9', 'P7', 'P26', 'P27']]
property_codes = ['P53', 'P74', 'OA8', 'OA9', 'P7', 'P26', 'P27']
link_parser = self.link_parser_check()
links = [l for l in link_parser if l.property.code in property_codes]
for entity in self.entities:
entity_links = [
link_ for link_ in links if link_.domain.id == entity.id]
Expand Down
15 changes: 6 additions & 9 deletions openatlas/api/endpoints/entities.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,28 +15,25 @@

class GetByCidocClass(Resource):
@staticmethod
def get(cidoc_class: str) \
-> tuple[Resource, int] | Response | dict[str, Any]:
def get(class_: str) -> tuple[Resource, int] | Response | dict[str, Any]:
return Endpoint(
ApiEntity.get_by_cidoc_classes([cidoc_class]),
ApiEntity.get_by_cidoc_classes([class_]),
entity_.parse_args()).resolve_entities()


class GetBySystemClass(Resource):
@staticmethod
def get(system_class: str) \
-> tuple[Resource, int] | Response | dict[str, Any]:
def get(class_: str) -> tuple[Resource, int] | Response | dict[str, Any]:
return Endpoint(
ApiEntity.get_by_system_classes([system_class]),
ApiEntity.get_by_system_classes([class_]),
entity_.parse_args()).resolve_entities()


class GetByViewClass(Resource):
@staticmethod
def get(view_class: str) \
-> tuple[Resource, int] | Response | dict[str, Any]:
def get(class_: str) -> tuple[Resource, int] | Response | dict[str, Any]:
return Endpoint(
ApiEntity.get_by_view_classes([view_class]),
ApiEntity.get_by_view_classes([class_]),
entity_.parse_args()).resolve_entities()


Expand Down
72 changes: 40 additions & 32 deletions openatlas/api/endpoints/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,9 @@
from openatlas.api.resources.error import (
EntityDoesNotExistError, InvalidSearchSyntax, InvalidSearchValueError,
LastEntityError, UrlNotValid)
from openatlas.api.resources.search import (
get_search_values, search_entity, value_to_be_searched)
from openatlas.api.resources.search import get_search_values, search_entity
from openatlas.api.resources.search_validation import (
check_if_date_search, check_search_parameters)
check_if_date_search, validate_search_parameters)
from openatlas.api.resources.templates import (
geojson_pagination, linked_place_pagination, loud_pagination)
from openatlas.api.resources.util import (
Expand All @@ -39,7 +38,7 @@ class Parser:
sort = None
column: str = ''
search: str = ''
search_param: list[dict[str, Any]]
search_param: list[list[dict[str, Any]]]
limit: int = 0
first = None
last = None
Expand Down Expand Up @@ -80,12 +79,12 @@ def set_search_param(self) -> None:
url_parameters = [ast.literal_eval(i) for i in self.search]
except Exception as e:
raise InvalidSearchSyntax from e
for item in url_parameters:
for category, value_list in item.items():
for search in url_parameters:
for category, value_list in search.items():
for values in value_list:
values['logicalOperator'] = (
values.get('logicalOperator') or 'or')
check_search_parameters(category, values)
values['logicalOperator'] = \
values.get('logicalOperator') or 'or'
validate_search_parameters(category, values)
if category in app.config['INT_VALUES']:
values['values'] = list(map(int, values['values']))
if check_if_date_search(category):
Expand All @@ -97,31 +96,41 @@ def set_search_param(self) -> None:
category,
values["values"]) from e

for item in url_parameters:
for category, values in item.items():
for parameter in values:
self.search_param.append({
"search_values": get_search_values(
category,
parameter),
"logical_operator": parameter['logicalOperator'],
"operator": 'equal' if category == "valueTypeID"
else parameter['operator'],
for search in url_parameters:
search_parameter = []
for category, value_list in search.items():
for values in value_list:
links = []
is_comparable = check_if_date_search(category)
if category == 'valueTypeID':
is_comparable = True
for value in values["values"]:
links.append(
Entity.get_links_of_entities(
value[0],
inverse=True))
search_parameter.append({
"search_values": get_search_values(category, values),
"logical_operator": values['logicalOperator'],
"operator": values['operator'],
"category": category,
"is_date": check_if_date_search(category)})
"is_comparable": is_comparable,
"value_type_links":
flatten_list_and_remove_duplicates(links)})
self.search_param.append(search_parameter)

def search_filter(self, entity: Entity) -> bool:
for i in self.search_param:
if not search_entity(
entity_values=value_to_be_searched(entity, i['category']),
operator_=i['operator'],
search_values=i['search_values'],
logical_operator=i['logical_operator'],
is_comparable=i['is_date']):
return False
return True
found = False
for set_of_param in self.search_param:
for param in set_of_param:
if not search_entity(entity, param):
found = False
break
found = True
return found

def get_properties_for_links(self) -> Optional[list[str]]:
def get_properties_for_links(self) -> list[str]:
codes: list[str] = []
if self.relation_type:
codes = self.relation_type
if 'geometry' in self.show:
Expand All @@ -130,8 +139,7 @@ def get_properties_for_links(self) -> Optional[list[str]]:
codes.append('P2')
if any(i in ['depictions', 'links'] for i in self.show):
codes.append('P67')
return codes
return None
return codes

def get_key(self, entity: Entity) -> datetime64 | str:
if self.column == 'cidoc_class':
Expand Down
2 changes: 1 addition & 1 deletion openatlas/api/formats/csv.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from openatlas.models.gis import Gis


def build_entity_dataframe(
def build_dataframe(
entity: Entity,
relations: bool = False) -> dict[str, Any]:
geom = get_csv_geom_entry(entity)
Expand Down
3 changes: 2 additions & 1 deletion openatlas/api/resources/api_entity.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,8 @@ def get_by_system_classes(classes: list[str]) -> list[Entity]:
def get_linked_entities_with_properties(
id_: int,
properties: list[str]) -> list[Entity]:
properties = list(g.properties) if 'all' in properties else properties
if 'all' in properties:
properties = list(g.properties)
entity = ApiEntity.get_by_id(id_, types=True)
return ([entity]
+ entity.get_linked_entities_recursive(properties, types=True)
Expand Down
Loading

0 comments on commit 172b137

Please sign in to comment.