Skip to content

Commit

Permalink
refactored csv
Browse files Browse the repository at this point in the history
  • Loading branch information
BernhardKoschicek committed Dec 10, 2024
1 parent ea82fc9 commit d57f4d5
Show file tree
Hide file tree
Showing 3 changed files with 24 additions and 22 deletions.
26 changes: 14 additions & 12 deletions openatlas/api/endpoints/endpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,11 +28,11 @@ def __init__(
self,
entities: Entity | list[Entity],
parser: dict[str, Any],
single: bool = False) -> None:
single_entity: bool = False) -> None:
self.entities = entities if isinstance(entities, list) else [entities]
self.parser = Parser(parser)
self.pagination = None
self.single = single
self.single_entity = single_entity
self.entities_with_links: dict[int, dict[str, Any]] = {}
self.formated_entities = []

Expand Down Expand Up @@ -79,7 +79,7 @@ def reduce_entities_to_limit(self) -> None:
self.entities = self.entities[start_index:end_index]

def resolve_entities(self) -> Response | dict[str, Any]:
if not self.single:
if not self.single_entity:
if self.parser.type_id:
self.entities = self.filter_by_type()
if self.parser.search:
Expand Down Expand Up @@ -111,7 +111,7 @@ def resolve_entities(self) -> Response | dict[str, Any]:
return marshal(result, self.get_entities_template(result))

def get_json_output(self) -> dict[str, Any]:
if not self.single:
if not self.single_entity:
result = {
"results": self.formated_entities,
"pagination": {
Expand Down Expand Up @@ -145,8 +145,10 @@ def export_csv_for_network_analysis(self) -> Response:
with zipfile.ZipFile(archive, 'w') as zipped_file:
for key, frame in self.get_entities_grouped_by_class().items():
with zipped_file.open(f'{key}.csv', 'w') as file:
file.write(bytes(
pd.DataFrame(data=frame).to_csv(), encoding='utf8'))
file.write(
bytes(
pd.DataFrame(data=frame).to_csv(),
encoding='utf8'))
with zipped_file.open('links.csv', 'w') as file:
link_frame = [
build_link_dataframe(link_) for link_ in
Expand Down Expand Up @@ -196,6 +198,7 @@ def remove_duplicate_entities(self) -> None:
[e for e in self.entities if not (e.id in seen or seen_add(e.id))]

def get_entities_formatted(self) -> None:
entities = []
match self.parser.format:
case 'geojson':
entities = [self.get_geojson()]
Expand All @@ -210,13 +213,12 @@ def get_entities_formatted(self) -> None:
entities = [
self.parser.get_linked_places_entity(item)
for item in self.entities_with_links.values()]
case _ if self.parser.format in app.config['RDF_FORMATS']:
case _ if self.parser.format \
in app.config['RDF_FORMATS']: # pragma: no cover
parsed_context = parse_loud_context()
entities = [
get_loud_entities(item, parsed_context)
for item in self.entities_with_links.values()]
case _:
entities = []
self.formated_entities = entities

def get_geojson(self) -> dict[str, Any]:
Expand Down Expand Up @@ -258,14 +260,14 @@ def get_entities_template(self, result: dict[str, Any]) -> dict[str, Any]:
match self.parser.format:
case 'geojson' | 'geojson-v2':
template = geojson_collection_template()
if not self.single:
if not self.single_entity:
template = geojson_pagination()
case 'loud':
template = loud_template(result)
if not self.single:
if not self.single_entity:
template = loud_pagination()
case 'lp' | 'lpx' | _:
template = linked_places_template(self.parser)
if not self.single:
if not self.single_entity:
template = linked_place_pagination(self.parser)
return template
2 changes: 1 addition & 1 deletion openatlas/api/endpoints/entities.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ def get(id_: int) -> tuple[Resource, int] | Response | dict[str, Any]:
return Endpoint(
ApiEntity.get_by_id(id_, types=True, aliases=True),
entity_.parse_args(),
True).resolve_entities()
single_entity=True).resolve_entities()


class GetLatest(Resource):
Expand Down
18 changes: 9 additions & 9 deletions openatlas/api/formats/csv.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,9 @@ def build_dataframe(
'geom_type': geom['type'],
'coordinates': geom['coordinates']}
if relations:
for key, value in get_csv_links(entity).items():
for key, value in get_csv_links(entity_dict).items():
data[key] = ' | '.join(list(map(str, value)))
for key, value in get_csv_types(entity).items():
for key, value in get_csv_types(entity_dict).items():
data[key] = ' | '.join(list(map(str, value)))
return data

Expand All @@ -54,28 +54,28 @@ def build_link_dataframe(link: Link) -> dict[str, Any]:
'end_comment': link.end_comment}


def get_csv_types(entity: Entity) -> dict[Any, list[Any]]:
def get_csv_types(entity_dict: dict[str, Any]) -> dict[Any, list[Any]]:
types: dict[str, Any] = defaultdict(list)
for type_ in entity.types:
for type_ in entity_dict['entity'].types:
hierarchy = [g.types[root].name for root in type_.root]
value = ''
for link in Entity.get_links_of_entities(entity.id):
for link in entity_dict['links']:
if link.range.id == type_.id and link.description:
value += link.description
if link.range.id == type_.id and type_.description:
value += f' {type_.description}'
key = ' > '.join(map(str, hierarchy))
types[key].append(f"{type_.name}: {value or ''}")
types[key].append(type_.name + (f": {value}" if value else ''))
return types


def get_csv_links(entity: Entity) -> dict[str, Any]:
def get_csv_links(entity_dict: dict[str, Any]) -> dict[str, Any]:
links: dict[str, Any] = defaultdict(list)
for link in Entity.get_links_of_entities(entity.id):
for link in entity_dict['links']:
key = f"{link.property.i18n['en'].replace(' ', '_')}_" \
f"{link.range.class_.name}"
links[key].append(link.range.name)
for link in Entity.get_links_of_entities(entity.id, inverse=True):
for link in entity_dict['links_inverse']:
key = f"{link.property.i18n['en'].replace(' ', '_')}_" \
f"{link.range.class_.name}"
if link.property.i18n_inverse['en']:
Expand Down

0 comments on commit d57f4d5

Please sign in to comment.