From 9c87d1b3b3fc9a40483265008147feae3c919d59 Mon Sep 17 00:00:00 2001 From: Eric Nguyen Date: Thu, 21 Mar 2024 11:16:21 +0100 Subject: [PATCH] feat: Add advanced search API (#438) * feat: add advanced search on entry nodes API * feat: add pagination on search API * fix: add text search order in search API * fix: fix multi-word search * refactor: enable multi-word whitespaced search * chore: update Client SDK * chore: update frontend to support new API * style: lint backend * feat: return filters AST in search API call * refactor: simplify filter search term parsing * refactor: respond to comments * chore: regenerate SDK * style: lint * docs: add function doc * fix: reorder search response * refactor: add parsed query string to search response * docs: add code docs * docs: add filter docs --- backend/editor/api.py | 33 +- .../editor/controllers/search_controller.py | 310 ++++++++++++++++++ backend/editor/entries.py | 61 ---- backend/editor/models/node_models.py | 52 ++- backend/editor/models/search_models.py | 140 ++++++++ backend/openapi/openapi.json | 310 +++++++++++++----- backend/tests/test_api.py | 2 +- docker-compose.yml | 11 +- .../parser/parser.py | 4 +- .../test_parse_unparse_integration.py | 6 +- .../integration/test_parser_integration.py | 2 +- .../src/backend-types/types.ts | 2 - taxonomy-editor-frontend/src/client/index.ts | 8 + .../client/models/AncestorFilterSearchTerm.ts | 8 + .../client/models/ChildFilterSearchTerm.ts | 8 + .../models/DescendantFilterSearchTerm.ts | 8 + .../src/client/models/EntryNode.ts | 14 + .../client/models/EntryNodeSearchResult.ts | 25 ++ .../src/client/models/IsFilterSearchTerm.ts | 8 + .../client/models/LanguageFilterSearchTerm.ts | 10 + .../client/models/ParentFilterSearchTerm.ts | 8 + .../src/client/services/DefaultService.ts | 100 +++--- .../pages/project/search/SearchResults.tsx | 20 +- 23 files changed, 909 insertions(+), 241 deletions(-) create mode 100644 backend/editor/controllers/search_controller.py create mode 100644 backend/editor/models/search_models.py create mode 100644 taxonomy-editor-frontend/src/client/models/AncestorFilterSearchTerm.ts create mode 100644 taxonomy-editor-frontend/src/client/models/ChildFilterSearchTerm.ts create mode 100644 taxonomy-editor-frontend/src/client/models/DescendantFilterSearchTerm.ts create mode 100644 taxonomy-editor-frontend/src/client/models/EntryNode.ts create mode 100644 taxonomy-editor-frontend/src/client/models/EntryNodeSearchResult.ts create mode 100644 taxonomy-editor-frontend/src/client/models/IsFilterSearchTerm.ts create mode 100644 taxonomy-editor-frontend/src/client/models/LanguageFilterSearchTerm.ts create mode 100644 taxonomy-editor-frontend/src/client/models/ParentFilterSearchTerm.ts diff --git a/backend/editor/api.py b/backend/editor/api.py index dd2e9371..509e4414 100644 --- a/backend/editor/api.py +++ b/backend/editor/api.py @@ -8,7 +8,7 @@ # Required imports # ------------------------------------------------------------------------------------# from datetime import datetime -from typing import Optional +from typing import Annotated, Optional # FastAPI from fastapi import ( @@ -16,6 +16,7 @@ FastAPI, Form, HTTPException, + Query, Request, Response, UploadFile, @@ -31,7 +32,7 @@ from . import graph_db # Controller imports -from .controllers import project_controller +from .controllers import project_controller, search_controller from .entries import TaxonomyGraph # Custom exceptions @@ -40,6 +41,7 @@ # Data model imports from .models.node_models import EntryNodeCreate, ErrorNode, Footer, Header, NodeType from .models.project_models import Project, ProjectEdit, ProjectStatus +from .models.search_models import EntryNodeSearchResult from .scheduler import scheduler_lifespan # -----------------------------------------------------------------------------------# @@ -231,16 +233,6 @@ async def find_one_entry_children(response: Response, branch: str, taxonomy_name return one_entry_children -@app.get("/{taxonomy_name}/{branch}/entry") -async def find_all_entries(response: Response, branch: str, taxonomy_name: str): - """ - Get all entries within taxonomy - """ - taxonomy = TaxonomyGraph(branch, taxonomy_name) - all_entries = await taxonomy.get_all_nodes("ENTRY") - return all_entries - - @app.get("/{taxonomy_name}/{branch}/synonym/{synonym}") async def find_one_synonym(response: Response, branch: str, taxonomy_name: str, synonym: str): """ @@ -317,10 +309,21 @@ async def find_all_errors(branch: str, taxonomy_name: str) -> ErrorNode: return result -@app.get("/{taxonomy_name}/{branch}/search") -async def search_node(response: Response, branch: str, taxonomy_name: str, query: str): +@app.get("/{taxonomy_name}/{branch}/nodes/entry") +async def search_entry_nodes( + branch: str, + taxonomy_name: str, + q: Annotated[ + str, + Query( + description="The search query string to filter down the returned entry nodes.\ + Example: is:root language:en not(language):fr" + ), + ] = "", + page: int = 1, +) -> EntryNodeSearchResult: taxonomy = TaxonomyGraph(branch, taxonomy_name) - result = await taxonomy.full_text_search(query) + result = await search_controller.search_entry_nodes(taxonomy.project_name, q, page) return result diff --git a/backend/editor/controllers/search_controller.py b/backend/editor/controllers/search_controller.py new file mode 100644 index 00000000..0efa2f95 --- /dev/null +++ b/backend/editor/controllers/search_controller.py @@ -0,0 +1,310 @@ +import math +from dataclasses import dataclass + +from openfoodfacts_taxonomy_parser import utils as parser_utils +from pydantic import ValidationError + +from ..graph_db import get_current_transaction +from ..models.node_models import EntryNode +from ..models.search_models import ( + CypherQuery, + EntryNodeSearchResult, + FilterSearchTerm, + FilterSearchTermValidator, +) + + +def get_query_param_name_prefix(index: int) -> str: + return f"value_{index}" + + +@dataclass(frozen=True) +class Query: + project_id: str + search_terms: list[str] + name_search_terms: list[str] + filter_search_terms: list[FilterSearchTerm] + + +def split_query_into_search_terms(query: str) -> list[str]: + """ + Queries should be split by whitespaces that are not inside quotes + """ + query = query.strip() + search_terms = [] + + inside_quotes = False + term_start = 0 + + for term_end in range(len(query)): + if query[term_end] == '"': + inside_quotes = not inside_quotes + # If we are not inside quotes and we encounter a whitespace + # we are at the end of the current search term + elif query[term_end] == " " and not inside_quotes: + # If the term is not empty, we add it to the list of search terms + if term_start != term_end: + search_term = query[term_start:term_end] + search_terms.append(search_term) + term_start = term_end + 1 + + search_terms.append(query[term_start:]) + + return search_terms + + +def parse_filter_search_term(search_term: str) -> FilterSearchTerm | None: + """ + Parses a filter search term of the format `filter:value` if possible + OR + Returns None + """ + + if ":" not in search_term: + return None + + filter_name, filter_value = search_term.split(":", maxsplit=1) + + if filter_value.startswith('"') and filter_value.endswith('"'): + filter_value = filter_value[1:-1] + + # If the filter value contains quotes, it is invalid + if '"' in filter_value: + return None + + try: + # dispatch according to filter_name + return FilterSearchTermValidator.validate_python( + {"filter_type": filter_name, "filter_value": filter_value} + ) + except ValidationError: + return None + + +def validate_query(project_id: str, query: str) -> Query: + """ + A query is composed of search terms separated by whitespaces. + A search term is either a name search term or a filter search term. + + A filter search term is of the format `filter:value` where `filter` is a valid filter value + and `value` is a valid search value for the particular filter. + The `value` is surrounded by quotes if it contains whitespaces. + The value cannot contain quotes. + + All other terms are considered name search terms. + The name search term allows for a text search on a node's tags. + + The possible filters are: + - `is`: `root`, `external` and `not:external` are the only possible values. + It allows to filter on the root and external nodes. + - `language`: the value is a language code. It allows to filter on + if the language exists or not on the node. + You can negate the filter with the not:lc syntax. + - `parent`: the value is a node's id. It allows to filter on if the node is a + parent of the node with the given id. + - `child`: the value is a node's id. It allows to filter on if the node is a child of + the node with the given id. + - `ancestor`: the value is a node's id. It allows to filter on if the node is an ancestor + of the node with the given id. + - `descendant`: the value is a node's id. It allows to filter on if the node is a descendant + of the node with the given id. + - `property`: the value is a property name and an optional value (property_name:value). + It allows to filter on if the node has the given property and if the property has the + given value if it is provided. You can add the `not:inherited:` prefix to the filter to + negate it or to also search on parent nodes for inherited properties. + + Examples: + - "is:root language:en not(language):fr property:inherited:vegan:en:yes" + - "is:not:external parent:"en:apple juice" descendant:en:juices "fruit concentrate"" + """ + + search_terms = split_query_into_search_terms(query) + + parsed_search_terms = [] + name_search_terms = [] + filter_search_terms = [] + + for search_term in search_terms: + if (filter_search_term := parse_filter_search_term(search_term)) is not None: + filter_search_terms.append(filter_search_term) + parsed_search_terms.append(filter_search_term.to_query_string()) + else: + name_search_terms.append(search_term) + parsed_search_terms.append(search_term) + + return Query(project_id, parsed_search_terms, name_search_terms, filter_search_terms) + + +def _get_token_query(token: str) -> str: + """ + Returns the lucene query for a token. + The tokens are additive and the fuzziness of the search depends on the length of the token. + """ + + token = "+" + token + if len(token) > 10: + return token + "~2" + elif len(token) > 4: + return token + "~1" + else: + return token + + +def build_lucene_name_search_query(search_value: str) -> str | None: + """ + The name search term can trigger two types of searches: + - if the search value is in the format `language_code:raw_search_value`, + it triggers a search on the tags_ids_{language_code} index + - else it triggers a search on the tags_ids index + + If the `raw_search_value` is surrounded by quotes, the search will be exact. + Otherwise, the search is fuzzy when the search value is longer than 4 characters + (the edit distance depends of the length of the search value) + """ + language_code = None + + # get an eventual language prefix + if len(search_value) > 2 and search_value[2] == ":" and search_value[0:2].isalpha(): + language_code, search_value = search_value.split(":", maxsplit=1) + language_code = language_code.lower() + + def get_search_query() -> str | None: + if search_value.startswith('"') and search_value.endswith('"'): + return search_value if len(search_value) > 2 else None + + if language_code is not None: + normalized_text = parser_utils.normalize_text(search_value, language_code) + else: + normalized_text = parser_utils.normalize_text(search_value) + + # If normalized text is empty, no searches are found + if normalized_text.strip() == "": + return None + + tokens = normalized_text.split("-") + + return "(" + " ".join(map(_get_token_query, tokens)) + ")" + + search_query = get_search_query() + + if search_query is None: + return None + + if language_code is not None: + search_query = f"tags_ids_{language_code}:{search_query}" + + return search_query + + +def build_cypher_query(query: Query, skip: int, limit: int) -> tuple[str, str, dict[str, str]]: + # build part of the query doing full text search + lucene_name_search_queries = list( + filter( + lambda q: q is not None, map(build_lucene_name_search_query, query.name_search_terms) + ) + ) + + # build part of the query for filter:value members + cypher_filter_search_terms = [ + term.build_cypher_query(get_query_param_name_prefix(index)) + for index, term in enumerate(query.filter_search_terms) + ] + + full_text_search_query, order_clause = "", "WITH n ORDER BY n.is_external, n.id" + query_params = {} + + if lucene_name_search_queries: + SEARCH_QUERY_PARAM_NAME = "search_query" + MIN_SEARCH_SCORE = 0.1 + + full_text_search_query = f""" + CALL db.index.fulltext.queryNodes("{query.project_id}_SearchTagsIds", + ${SEARCH_QUERY_PARAM_NAME}) + YIELD node, score + WHERE score > {MIN_SEARCH_SCORE} + WITH node.id AS nodeId + WITH COLLECT(nodeId) AS nodeIds + """ + query_params[SEARCH_QUERY_PARAM_NAME] = " AND ".join(lucene_name_search_queries) + + order_clause = ( + "WITH n, apoc.coll.indexOf(nodeIds, n.id) AS index ORDER BY index, n.is_external" + ) + + name_filter_search_term = "n.id IN nodeIds" + cypher_filter_search_terms.append(CypherQuery(name_filter_search_term)) + + for cypher_filter_search_term in cypher_filter_search_terms: + query_params |= cypher_filter_search_term.params + + combined_filter_query = ( + f"WHERE {' AND '.join([cypher_query.query for cypher_query in cypher_filter_search_terms])}" + if cypher_filter_search_terms + else "" + ) + + base_query = f""" + {full_text_search_query} + MATCH (n:{query.project_id}:ENTRY) + {combined_filter_query} + """ + + page_subquery = f""" + {order_clause} + WITH collect(n) AS nodeList, count(n) AS nodeCount + UNWIND nodeList AS node + WITH node, nodeCount + SKIP {skip} LIMIT {limit} + WITH collect(node) AS nodeList, nodeCount + RETURN nodeList, nodeCount; + """ + + count_subquery = """ + RETURN count(n) AS nodeCount; + """ + + page_query = base_query + page_subquery + count_query = base_query + count_subquery + + return page_query, count_query, query_params + + +async def search_entry_nodes(project_id: str, raw_query: str, page: int) -> EntryNodeSearchResult: + """ + Search for entry nodes in the database + """ + query = validate_query(project_id, raw_query) + + parsed_query_string = " ".join(query.search_terms) + # For better UX on the search bar + if parsed_query_string != "": + parsed_query_string += " " + + PAGE_LENGTH = 50 + skip = max(0, (page - 1) * PAGE_LENGTH) + + cypher_query = build_cypher_query(query, skip, PAGE_LENGTH) + + page_query, count_query, query_params = cypher_query + + result = await get_current_transaction().run(page_query, query_params) + search_result = await result.single() + + if search_result is None: + count_result = await get_current_transaction().run(count_query, query_params) + node_count = (await count_result.single())["nodeCount"] + return EntryNodeSearchResult( + node_count=node_count, + page_count=math.ceil(node_count / PAGE_LENGTH), + q=parsed_query_string, + filters=query.filter_search_terms, + ) + + node_count, nodes = search_result["nodeCount"], search_result["nodeList"] + return EntryNodeSearchResult( + node_count=node_count, + page_count=math.ceil(node_count / PAGE_LENGTH), + q=parsed_query_string, + filters=query.filter_search_terms, + nodes=[EntryNode(**node) for node in nodes], + ) diff --git a/backend/editor/entries.py b/backend/editor/entries.py index 2511ae7e..04bf3123 100644 --- a/backend/editor/entries.py +++ b/backend/editor/entries.py @@ -663,64 +663,3 @@ async def update_node_children(self, entry, new_children_ids): result = list(await _result.value()) return result - - async def full_text_search(self, text): - """ - Helper function used for searching a taxonomy - """ - # Escape special characters - normalized_text = re.sub(r"[^A-Za-z0-9_]", r" ", text) - normalized_id_text = parser_utils.normalize_text(text) - - # If normalized text is empty, no searches are found - if normalized_text.strip() == "": - return [] - - id_index = self.project_name + "_SearchIds" - tags_index = self.project_name + "_SearchTags" - - text_query_exact = "*" + normalized_text + "*" - text_query_fuzzy = normalized_text + "~" - text_id_query_fuzzy = normalized_id_text + "~" - text_id_query_exact = "*" + normalized_id_text + "*" - params = { - "id_index": id_index, - "tags_index": tags_index, - "text_query_fuzzy": text_query_fuzzy, - "text_query_exact": text_query_exact, - "text_id_query_fuzzy": text_id_query_fuzzy, - "text_id_query_exact": text_id_query_exact, - } - - # Fuzzy search and wildcard (*) search on two indexes - # Fuzzy search has more priority, since it matches more close strings - # IDs are given slightly lower priority than tags in fuzzy search - query = """ - CALL { - CALL db.index.fulltext.queryNodes($id_index, $text_id_query_fuzzy) - yield node, score as score_ - where score_ > 0 - return node, score_ * 3 as score - UNION - CALL db.index.fulltext.queryNodes($tags_index, $text_query_fuzzy) - yield node, score as score_ - where score_ > 0 - return node, score_ * 5 as score - UNION - CALL db.index.fulltext.queryNodes($id_index, $text_id_query_exact) - yield node, score as score_ - where score_ > 0 - return node, score_ as score - UNION - CALL db.index.fulltext.queryNodes($tags_index, $text_query_exact) - yield node, score as score_ - where score_ > 0 - return node, score_ as score - } - WITH node.id AS node_id, node.is_external AS is_external, score - RETURN {id: node_id, is_external: is_external} AS node, sum(score) AS score - ORDER BY score DESC - """ - _result = await get_current_transaction().run(query, params) - result = [record["node"] for record in await _result.data()] - return result diff --git a/backend/editor/models/node_models.py b/backend/editor/models/node_models.py index 6dde3bb9..28542888 100644 --- a/backend/editor/models/node_models.py +++ b/backend/editor/models/node_models.py @@ -1,8 +1,7 @@ -""" -Required pydantic models for API -""" - from enum import StrEnum +from typing import Any + +from pydantic import model_validator from .base_models import BaseModel from .types.datetime import DateTime @@ -28,6 +27,51 @@ class EntryNodeCreate(BaseModel): main_language_code: str +class EntryNode(BaseModel): + id: str + preceding_lines: list[str] + src_position: int + main_language: str + tags: dict[str, list[str]] + properties: dict[str, str] + comments: dict[str, list[str]] + is_external: bool = False + + @model_validator(mode="before") + @classmethod + def construct_tags_and_properties_and_comments(cls, data: Any) -> Any: + """ + Before model validation, construct tags, properties, and comments from the data dict. + Usage docs: https://docs.pydantic.dev/latest/concepts/validators/#model-validators + """ + if not isinstance(data, dict): + return data + + is_tag_property_or_comment = ( + lambda key: key.startswith("tags_") + or key.startswith("prop_") + or key.endswith("_comments") + ) + + parsed_data = { + key: value for key, value in data.items() if not is_tag_property_or_comment(key) + } + + parsed_data["tags"] = parsed_data.get("tags", {}) + parsed_data["properties"] = parsed_data.get("properties", {}) + parsed_data["comments"] = parsed_data.get("comments", {}) + + for key, value in data.items(): + if key.endswith("_comments"): + parsed_data["comments"][key] = value + elif key.startswith("tags_"): + parsed_data["tags"][key] = value + elif key.startswith("prop_"): + parsed_data["properties"][key] = value + + return parsed_data + + class ErrorNode(BaseModel): id: str taxonomy_name: str diff --git a/backend/editor/models/search_models.py b/backend/editor/models/search_models.py new file mode 100644 index 00000000..6e016979 --- /dev/null +++ b/backend/editor/models/search_models.py @@ -0,0 +1,140 @@ +from abc import ABC, abstractmethod +from dataclasses import dataclass, field +from typing import Annotated, Literal + +from pydantic import Field, StringConstraints, TypeAdapter, computed_field + +from .base_models import BaseModel +from .node_models import EntryNode + + +@dataclass(frozen=True) +class CypherQuery: + """ + Each search filter will return a CypherQuery with a condition (query) + and corresponding parameters (params) + """ + + query: str + params: dict[str, str] = field(default_factory=dict) + + +class AbstractFilterSearchTerm(BaseModel, ABC): + filter_type: str + filter_value: str + + def to_query_string(self) -> str: + filter_value = self.filter_value + if " " in self.filter_value: + filter_value = f'"{self.filter_value}"' + return f"{self.filter_type}:{filter_value}" + + @abstractmethod + def build_cypher_query(self, param_name: str) -> CypherQuery: + """Builds a Cypher query for the filter search term. + + Args: + param_name (str): The param_name is used to avoid name conflicts in the Cypher query. + """ + pass + + +class IsFilterSearchTerm(AbstractFilterSearchTerm): + filter_type: Literal["is"] + filter_value: Literal["root"] | Literal["external"] | Literal["not:external"] + + def build_cypher_query(self, _param_name: str) -> CypherQuery: + match self.filter_value: + case "root": + return CypherQuery("NOT (n)-[:is_child_of]->()") + case "external": + return CypherQuery("n.is_external = true") + case "not:external": + return CypherQuery("n.is_external = false") + case _: + raise ValueError("Invalid filter value") + + +class LanguageFilterSearchTerm(AbstractFilterSearchTerm): + filter_type: Literal["language"] + # Only allow 2-letter language codes + filter_value: Annotated[str, StringConstraints(pattern="^(not:)?[a-z]{2}$")] + + @computed_field + def negated(self) -> bool: + return self.filter_value.startswith("not:") + + @computed_field + def language(self) -> str: + return self.filter_value[4:] if self.negated else self.filter_value + + def build_cypher_query(self, _param_name: str) -> CypherQuery: + if self.negated: + return CypherQuery(f"n.tags_ids_{self.language} IS NULL") + else: + return CypherQuery(f"n.tags_ids_{self.language} IS NOT NULL") + + +class ParentFilterSearchTerm(AbstractFilterSearchTerm): + filter_type: Literal["parent"] + + def build_cypher_query(self, param_name: str) -> CypherQuery: + return CypherQuery( + "(n)<-[:is_child_of]-(:ENTRY {id: $" + param_name + "})", + {param_name: self.filter_value}, + ) + + +class ChildFilterSearchTerm(AbstractFilterSearchTerm): + filter_type: Literal["child"] + + def build_cypher_query(self, param_name: str) -> CypherQuery: + return CypherQuery( + "(n)-[:is_child_of]->(:ENTRY {id: $" + param_name + "})", + {param_name: self.filter_value}, + ) + + +class AncestorFilterSearchTerm(AbstractFilterSearchTerm): + filter_type: Literal["ancestor"] + + def build_cypher_query(self, param_name: str) -> CypherQuery: + return CypherQuery( + "(n)<-[:is_child_of*]-(:ENTRY {id: $" + param_name + "})", + {param_name: self.filter_value}, + ) + + +class DescendantFilterSearchTerm(AbstractFilterSearchTerm): + filter_type: Literal["descendant"] + + def build_cypher_query(self, param_name: str) -> CypherQuery: + return CypherQuery( + "(n)-[:is_child_of*]->(:ENTRY {id: $" + param_name + "})", + {param_name: self.filter_value}, + ) + + +FilterSearchTerm = Annotated[ + ( + IsFilterSearchTerm + | LanguageFilterSearchTerm + | ParentFilterSearchTerm + | ChildFilterSearchTerm + | AncestorFilterSearchTerm + | DescendantFilterSearchTerm + ), + Field(discriminator="filter_type"), +] + +# This will create the right FilterSearchTerm based upon filter_type +# https://docs.pydantic.dev/dev/concepts/type_adapter/ +FilterSearchTermValidator = TypeAdapter(FilterSearchTerm) + + +class EntryNodeSearchResult(BaseModel): + q: str = "" + node_count: int = 0 + page_count: int = 0 + filters: list[FilterSearchTerm] = Field(default_factory=list) + nodes: list[EntryNode] = Field(default_factory=list) diff --git a/backend/openapi/openapi.json b/backend/openapi/openapi.json index 563b6438..50479060 100644 --- a/backend/openapi/openapi.json +++ b/backend/openapi/openapi.json @@ -441,82 +441,6 @@ } } }, - "/{taxonomy_name}/{branch}/entry": { - "get": { - "summary": "Find All Entries", - "description": "Get all entries within taxonomy", - "operationId": "find_all_entries__taxonomy_name___branch__entry_get", - "parameters": [ - { - "name": "branch", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Branch" } - }, - { - "name": "taxonomy_name", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Taxonomy Name" } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { "application/json": { "schema": {} } } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - } - } - }, - "post": { - "summary": "Create Entry Node", - "description": "Creating a new entry node in a taxonomy", - "operationId": "create_entry_node__taxonomy_name___branch__entry_post", - "parameters": [ - { - "name": "branch", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Branch" } - }, - { - "name": "taxonomy_name", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Taxonomy Name" } - } - ], - "requestBody": { - "required": true, - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/EntryNodeCreate" } - } - } - }, - "responses": { - "201": { - "description": "Successful Response", - "content": { "application/json": { "schema": {} } } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - } - } - } - }, "/{taxonomy_name}/{branch}/synonym/{synonym}": { "get": { "summary": "Find One Synonym", @@ -938,10 +862,10 @@ } } }, - "/{taxonomy_name}/{branch}/search": { + "/{taxonomy_name}/{branch}/nodes/entry": { "get": { - "summary": "Search Node", - "operationId": "search_node__taxonomy_name___branch__search_get", + "summary": "Search Entry Nodes", + "operationId": "search_entry_nodes__taxonomy_name___branch__nodes_entry_get", "parameters": [ { "name": "branch", @@ -956,16 +880,34 @@ "schema": { "type": "string", "title": "Taxonomy Name" } }, { - "name": "query", + "name": "q", "in": "query", - "required": true, - "schema": { "type": "string", "title": "Query" } + "required": false, + "schema": { + "type": "string", + "description": "The search query string to filter down the returned entry nodes. Example: is:root language:en not(language):fr", + "default": "", + "title": "Q" + }, + "description": "The search query string to filter down the returned entry nodes. Example: is:root language:en not(language):fr" + }, + { + "name": "page", + "in": "query", + "required": false, + "schema": { "type": "integer", "default": 1, "title": "Page" } } ], "responses": { "200": { "description": "Successful Response", - "content": { "application/json": { "schema": {} } } + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/EntryNodeSearchResult" + } + } + } }, "422": { "description": "Validation Error", @@ -1126,6 +1068,49 @@ } } }, + "/{taxonomy_name}/{branch}/entry": { + "post": { + "summary": "Create Entry Node", + "description": "Creating a new entry node in a taxonomy", + "operationId": "create_entry_node__taxonomy_name___branch__entry_post", + "parameters": [ + { + "name": "branch", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Branch" } + }, + { + "name": "taxonomy_name", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Taxonomy Name" } + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/EntryNodeCreate" } + } + } + }, + "responses": { + "201": { + "description": "Successful Response", + "content": { "application/json": { "schema": {} } } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, "/{taxonomy_name}/{branch}": { "delete": { "summary": "Delete Project", @@ -1161,6 +1146,15 @@ }, "components": { "schemas": { + "AncestorFilterSearchTerm": { + "properties": { + "filterType": { "const": "ancestor", "title": "Filtertype" }, + "filterValue": { "type": "string", "title": "Filtervalue" } + }, + "type": "object", + "required": ["filterType", "filterValue"], + "title": "AncestorFilterSearchTerm" + }, "Body_upload_taxonomy__taxonomy_name___branch__upload_post": { "properties": { "file": { "type": "string", "format": "binary", "title": "File" }, @@ -1170,6 +1164,68 @@ "required": ["file", "description"], "title": "Body_upload_taxonomy__taxonomy_name___branch__upload_post" }, + "ChildFilterSearchTerm": { + "properties": { + "filterType": { "const": "child", "title": "Filtertype" }, + "filterValue": { "type": "string", "title": "Filtervalue" } + }, + "type": "object", + "required": ["filterType", "filterValue"], + "title": "ChildFilterSearchTerm" + }, + "DescendantFilterSearchTerm": { + "properties": { + "filterType": { "const": "descendant", "title": "Filtertype" }, + "filterValue": { "type": "string", "title": "Filtervalue" } + }, + "type": "object", + "required": ["filterType", "filterValue"], + "title": "DescendantFilterSearchTerm" + }, + "EntryNode": { + "properties": { + "id": { "type": "string", "title": "Id" }, + "precedingLines": { + "items": { "type": "string" }, + "type": "array", + "title": "Precedinglines" + }, + "srcPosition": { "type": "integer", "title": "Srcposition" }, + "mainLanguage": { "type": "string", "title": "Mainlanguage" }, + "tags": { + "additionalProperties": { + "items": { "type": "string" }, + "type": "array" + }, + "type": "object", + "title": "Tags" + }, + "properties": { + "additionalProperties": { "type": "string" }, + "type": "object", + "title": "Properties" + }, + "comments": { + "additionalProperties": { + "items": { "type": "string" }, + "type": "array" + }, + "type": "object", + "title": "Comments" + } + }, + "type": "object", + "required": [ + "id", + "precedingLines", + "srcPosition", + "mainLanguage", + "tags", + "properties", + "comments" + ], + "title": "EntryNode" + }, "EntryNodeCreate": { "properties": { "name": { "type": "string", "title": "Name" }, @@ -1179,6 +1235,53 @@ "required": ["name", "mainLanguageCode"], "title": "EntryNodeCreate" }, + "EntryNodeSearchResult": { + "properties": { + "q": { "type": "string", "title": "Q", "default": "" }, + "nodeCount": { + "type": "integer", + "title": "Nodecount", + "default": 0 + }, + "pageCount": { + "type": "integer", + "title": "Pagecount", + "default": 0 + }, + "filters": { + "items": { + "oneOf": [ + { "$ref": "#/components/schemas/IsFilterSearchTerm" }, + { "$ref": "#/components/schemas/LanguageFilterSearchTerm" }, + { "$ref": "#/components/schemas/ParentFilterSearchTerm" }, + { "$ref": "#/components/schemas/ChildFilterSearchTerm" }, + { "$ref": "#/components/schemas/AncestorFilterSearchTerm" }, + { "$ref": "#/components/schemas/DescendantFilterSearchTerm" } + ], + "discriminator": { + "propertyName": "filterType", + "mapping": { + "ancestor": "#/components/schemas/AncestorFilterSearchTerm", + "child": "#/components/schemas/ChildFilterSearchTerm", + "descendant": "#/components/schemas/DescendantFilterSearchTerm", + "is": "#/components/schemas/IsFilterSearchTerm", + "language": "#/components/schemas/LanguageFilterSearchTerm", + "parent": "#/components/schemas/ParentFilterSearchTerm" + } + } + }, + "type": "array", + "title": "Filters" + }, + "nodes": { + "items": { "$ref": "#/components/schemas/EntryNode" }, + "type": "array", + "title": "Nodes" + } + }, + "type": "object", + "title": "EntryNodeSearchResult" + }, "ErrorNode": { "properties": { "id": { "type": "string", "title": "Id" }, @@ -1224,6 +1327,47 @@ "title": "HTTPValidationError" }, "Header": { "properties": {}, "type": "object", "title": "Header" }, + "IsFilterSearchTerm": { + "properties": { + "filterType": { "const": "is", "title": "Filtertype" }, + "filterValue": { "const": "root", "title": "Filtervalue" } + }, + "type": "object", + "required": ["filterType", "filterValue"], + "title": "IsFilterSearchTerm" + }, + "LanguageFilterSearchTerm": { + "properties": { + "filterType": { "const": "language", "title": "Filtertype" }, + "filterValue": { + "type": "string", + "pattern": "^(not:)?[a-z]{2}$", + "title": "Filtervalue" + }, + "negated": { + "type": "boolean", + "title": "Negated", + "readOnly": true + }, + "language": { + "type": "string", + "title": "Language", + "readOnly": true + } + }, + "type": "object", + "required": ["filterType", "filterValue", "negated", "language"], + "title": "LanguageFilterSearchTerm" + }, + "ParentFilterSearchTerm": { + "properties": { + "filterType": { "const": "parent", "title": "Filtertype" }, + "filterValue": { "type": "string", "title": "Filtervalue" } + }, + "type": "object", + "required": ["filterType", "filterValue"], + "title": "ParentFilterSearchTerm" + }, "Project": { "properties": { "id": { "type": "string", "title": "Id" }, diff --git a/backend/tests/test_api.py b/backend/tests/test_api.py index 1eaf58ed..c72b4ddc 100644 --- a/backend/tests/test_api.py +++ b/backend/tests/test_api.py @@ -15,7 +15,7 @@ def test_setup(neo4j): session.run(query) query = "DROP INDEX p_test_branch_SearchIds IF EXISTS" session.run(query) - query = "DROP INDEX p_test_branch_SearchTags IF EXISTS" + query = "DROP INDEX p_test_branch_SearchTagsIds IF EXISTS" session.run(query) diff --git a/docker-compose.yml b/docker-compose.yml index 93808b77..2cbbe207 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -11,12 +11,13 @@ services: - "${NEO4J_BOLT_EXPOSE:-127.0.0.1:7687}:7687" environment: # we should not expose it publicly, so no auth is ok - NEO4J_AUTH: none + - NEO4J_AUTH=none + - NEO4J_PLUGINS=["apoc"] # memory configuration from .env - NEO4J_server_memory_heap_initial__size: - NEO4J_server_memory_heap_max__size: - NEO4J_server_memory_pagecache_size: - NEO4J_db_memory_transaction_total_max: + - NEO4J_server_memory_heap_initial__size=${NEO4J_server_memory_heap_initial__size} + - NEO4J_server_memory_heap_max__size=${NEO4J_server_memory_heap_max__size} + - NEO4J_server_memory_pagecache_size=${NEO4J_server_memory_pagecache_size} + - NEO4J_db_memory_transaction_total_max=${NEO4J_db_memory_transaction_total_max} volumes: # put data in a volume - neo4j-data:/data diff --git a/parser/openfoodfacts_taxonomy_parser/parser/parser.py b/parser/openfoodfacts_taxonomy_parser/parser/parser.py index 12a7dcb9..e2f051a3 100644 --- a/parser/openfoodfacts_taxonomy_parser/parser/parser.py +++ b/parser/openfoodfacts_taxonomy_parser/parser/parser.py @@ -211,9 +211,9 @@ def _create_node_fulltext_index(self, project_label: str): self.session.run(query) language_codes = [lang.alpha2 for lang in list(iso639.languages) if lang.alpha2 != ""] - tags_prefixed_lc = ["n.tags_" + lc for lc in language_codes] + tags_prefixed_lc = ["n.tags_ids_" + lc for lc in language_codes] tags_prefixed_lc = ", ".join(tags_prefixed_lc) - query = f"""CREATE FULLTEXT INDEX {project_label+'_SearchTags'} IF NOT EXISTS + query = f"""CREATE FULLTEXT INDEX {project_label+'_SearchTagsIds'} IF NOT EXISTS FOR (n:{project_label}) ON EACH [{tags_prefixed_lc}]""" self.session.run(query) diff --git a/parser/tests/integration/test_parse_unparse_integration.py b/parser/tests/integration/test_parse_unparse_integration.py index 16bca285..1afb9410 100644 --- a/parser/tests/integration/test_parse_unparse_integration.py +++ b/parser/tests/integration/test_parse_unparse_integration.py @@ -17,21 +17,21 @@ def test_setup(neo4j): neo4j.session().run(query) query = "DROP INDEX p_test_branch_SearchIds IF EXISTS" neo4j.session().run(query) - query = "DROP INDEX p_test_branch_SearchTags IF EXISTS" + query = "DROP INDEX p_test_branch_SearchTagsIds IF EXISTS" neo4j.session().run(query) query1 = "MATCH (n:p_test_branch1) DETACH DELETE n" neo4j.session().run(query1) query1 = "DROP INDEX p_test_branch1_SearchIds IF EXISTS" neo4j.session().run(query1) - query1 = "DROP INDEX p_test_branch1_SearchTags IF EXISTS" + query1 = "DROP INDEX p_test_branch1_SearchTagsIds IF EXISTS" neo4j.session().run(query1) query2 = "MATCH (n:p_test_branch2) DETACH DELETE n" neo4j.session().run(query2) query2 = "DROP INDEX p_test_branch2_SearchIds IF EXISTS" neo4j.session().run(query2) - query2 = "DROP INDEX p_test_branch2_SearchTags IF EXISTS" + query2 = "DROP INDEX p_test_branch2_SearchTagsIds IF EXISTS" neo4j.session().run(query2) diff --git a/parser/tests/integration/test_parser_integration.py b/parser/tests/integration/test_parser_integration.py index c67ba3eb..d9ce53e7 100644 --- a/parser/tests/integration/test_parser_integration.py +++ b/parser/tests/integration/test_parser_integration.py @@ -21,7 +21,7 @@ def test_setup(neo4j): neo4j.session().run(query) query = "DROP INDEX p_test_branch_SearchIds IF EXISTS" neo4j.session().run(query) - query = "DROP INDEX p_test_branch_SearchTags IF EXISTS" + query = "DROP INDEX p_test_branch_SearchTagsIds IF EXISTS" neo4j.session().run(query) diff --git a/taxonomy-editor-frontend/src/backend-types/types.ts b/taxonomy-editor-frontend/src/backend-types/types.ts index 99536e3e..8822f20e 100644 --- a/taxonomy-editor-frontend/src/backend-types/types.ts +++ b/taxonomy-editor-frontend/src/backend-types/types.ts @@ -5,6 +5,4 @@ export type NodeInfo = { export type RootEntriesAPIResponse = Array; -export type SearchAPIResponse = NodeInfo[]; - export type ParentsAPIResponse = string[]; diff --git a/taxonomy-editor-frontend/src/client/index.ts b/taxonomy-editor-frontend/src/client/index.ts index 460760fa..120fac04 100644 --- a/taxonomy-editor-frontend/src/client/index.ts +++ b/taxonomy-editor-frontend/src/client/index.ts @@ -7,12 +7,20 @@ export { CancelablePromise, CancelError } from "./core/CancelablePromise"; export { OpenAPI } from "./core/OpenAPI"; export type { OpenAPIConfig } from "./core/OpenAPI"; +export type { AncestorFilterSearchTerm } from "./models/AncestorFilterSearchTerm"; export type { Body_upload_taxonomy__taxonomy_name___branch__upload_post } from "./models/Body_upload_taxonomy__taxonomy_name___branch__upload_post"; +export type { ChildFilterSearchTerm } from "./models/ChildFilterSearchTerm"; +export type { DescendantFilterSearchTerm } from "./models/DescendantFilterSearchTerm"; +export type { EntryNode } from "./models/EntryNode"; export type { EntryNodeCreate } from "./models/EntryNodeCreate"; +export type { EntryNodeSearchResult } from "./models/EntryNodeSearchResult"; export type { ErrorNode } from "./models/ErrorNode"; export type { Footer } from "./models/Footer"; export type { Header } from "./models/Header"; export type { HTTPValidationError } from "./models/HTTPValidationError"; +export type { IsFilterSearchTerm } from "./models/IsFilterSearchTerm"; +export type { LanguageFilterSearchTerm } from "./models/LanguageFilterSearchTerm"; +export type { ParentFilterSearchTerm } from "./models/ParentFilterSearchTerm"; export type { Project } from "./models/Project"; export { ProjectStatus } from "./models/ProjectStatus"; export type { ValidationError } from "./models/ValidationError"; diff --git a/taxonomy-editor-frontend/src/client/models/AncestorFilterSearchTerm.ts b/taxonomy-editor-frontend/src/client/models/AncestorFilterSearchTerm.ts new file mode 100644 index 00000000..eaa964f9 --- /dev/null +++ b/taxonomy-editor-frontend/src/client/models/AncestorFilterSearchTerm.ts @@ -0,0 +1,8 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export type AncestorFilterSearchTerm = { + filterType: "ancestor"; + filterValue: string; +}; diff --git a/taxonomy-editor-frontend/src/client/models/ChildFilterSearchTerm.ts b/taxonomy-editor-frontend/src/client/models/ChildFilterSearchTerm.ts new file mode 100644 index 00000000..154b4e3b --- /dev/null +++ b/taxonomy-editor-frontend/src/client/models/ChildFilterSearchTerm.ts @@ -0,0 +1,8 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export type ChildFilterSearchTerm = { + filterType: "child"; + filterValue: string; +}; diff --git a/taxonomy-editor-frontend/src/client/models/DescendantFilterSearchTerm.ts b/taxonomy-editor-frontend/src/client/models/DescendantFilterSearchTerm.ts new file mode 100644 index 00000000..4b82f4f9 --- /dev/null +++ b/taxonomy-editor-frontend/src/client/models/DescendantFilterSearchTerm.ts @@ -0,0 +1,8 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export type DescendantFilterSearchTerm = { + filterType: "descendant"; + filterValue: string; +}; diff --git a/taxonomy-editor-frontend/src/client/models/EntryNode.ts b/taxonomy-editor-frontend/src/client/models/EntryNode.ts new file mode 100644 index 00000000..f8157f15 --- /dev/null +++ b/taxonomy-editor-frontend/src/client/models/EntryNode.ts @@ -0,0 +1,14 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export type EntryNode = { + id: string; + precedingLines: Array; + srcPosition: number; + mainLanguage: string; + tags: Record>; + properties: Record; + comments: Record>; + isExternal: boolean; +}; diff --git a/taxonomy-editor-frontend/src/client/models/EntryNodeSearchResult.ts b/taxonomy-editor-frontend/src/client/models/EntryNodeSearchResult.ts new file mode 100644 index 00000000..5be264be --- /dev/null +++ b/taxonomy-editor-frontend/src/client/models/EntryNodeSearchResult.ts @@ -0,0 +1,25 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { AncestorFilterSearchTerm } from "./AncestorFilterSearchTerm"; +import type { ChildFilterSearchTerm } from "./ChildFilterSearchTerm"; +import type { DescendantFilterSearchTerm } from "./DescendantFilterSearchTerm"; +import type { EntryNode } from "./EntryNode"; +import type { IsFilterSearchTerm } from "./IsFilterSearchTerm"; +import type { LanguageFilterSearchTerm } from "./LanguageFilterSearchTerm"; +import type { ParentFilterSearchTerm } from "./ParentFilterSearchTerm"; +export type EntryNodeSearchResult = { + q: string; + nodeCount: number; + pageCount: number; + filters: Array< + | IsFilterSearchTerm + | LanguageFilterSearchTerm + | ParentFilterSearchTerm + | ChildFilterSearchTerm + | AncestorFilterSearchTerm + | DescendantFilterSearchTerm + >; + nodes: Array; +}; diff --git a/taxonomy-editor-frontend/src/client/models/IsFilterSearchTerm.ts b/taxonomy-editor-frontend/src/client/models/IsFilterSearchTerm.ts new file mode 100644 index 00000000..1f4e6e38 --- /dev/null +++ b/taxonomy-editor-frontend/src/client/models/IsFilterSearchTerm.ts @@ -0,0 +1,8 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export type IsFilterSearchTerm = { + filterType: "is"; + filterValue: "root" | "external" | "not:external"; +}; diff --git a/taxonomy-editor-frontend/src/client/models/LanguageFilterSearchTerm.ts b/taxonomy-editor-frontend/src/client/models/LanguageFilterSearchTerm.ts new file mode 100644 index 00000000..f2df214a --- /dev/null +++ b/taxonomy-editor-frontend/src/client/models/LanguageFilterSearchTerm.ts @@ -0,0 +1,10 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export type LanguageFilterSearchTerm = { + filterType: "language"; + filterValue: string; + readonly negated: boolean; + readonly language: string; +}; diff --git a/taxonomy-editor-frontend/src/client/models/ParentFilterSearchTerm.ts b/taxonomy-editor-frontend/src/client/models/ParentFilterSearchTerm.ts new file mode 100644 index 00000000..04ed49c2 --- /dev/null +++ b/taxonomy-editor-frontend/src/client/models/ParentFilterSearchTerm.ts @@ -0,0 +1,8 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export type ParentFilterSearchTerm = { + filterType: "parent"; + filterValue: string; +}; diff --git a/taxonomy-editor-frontend/src/client/services/DefaultService.ts b/taxonomy-editor-frontend/src/client/services/DefaultService.ts index 019b74d3..5bc97c10 100644 --- a/taxonomy-editor-frontend/src/client/services/DefaultService.ts +++ b/taxonomy-editor-frontend/src/client/services/DefaultService.ts @@ -4,6 +4,7 @@ /* eslint-disable */ import type { Body_upload_taxonomy__taxonomy_name___branch__upload_post } from "../models/Body_upload_taxonomy__taxonomy_name___branch__upload_post"; import type { EntryNodeCreate } from "../models/EntryNodeCreate"; +import type { EntryNodeSearchResult } from "../models/EntryNodeSearchResult"; import type { ErrorNode } from "../models/ErrorNode"; import type { Footer } from "../models/Footer"; import type { Header } from "../models/Header"; @@ -312,58 +313,6 @@ export class DefaultService { }, }); } - /** - * Find All Entries - * Get all entries within taxonomy - * @param branch - * @param taxonomyName - * @returns any Successful Response - * @throws ApiError - */ - public static findAllEntriesTaxonomyNameBranchEntryGet( - branch: string, - taxonomyName: string - ): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/{taxonomy_name}/{branch}/entry", - path: { - branch: branch, - taxonomy_name: taxonomyName, - }, - errors: { - 422: `Validation Error`, - }, - }); - } - /** - * Create Entry Node - * Creating a new entry node in a taxonomy - * @param branch - * @param taxonomyName - * @param requestBody - * @returns any Successful Response - * @throws ApiError - */ - public static createEntryNodeTaxonomyNameBranchEntryPost( - branch: string, - taxonomyName: string, - requestBody: EntryNodeCreate - ): CancelablePromise { - return __request(OpenAPI, { - method: "POST", - url: "/{taxonomy_name}/{branch}/entry", - path: { - branch: branch, - taxonomy_name: taxonomyName, - }, - body: requestBody, - mediaType: "application/json", - errors: { - 422: `Validation Error`, - }, - }); - } /** * Find One Synonym * Get synonym corresponding to id within taxonomy @@ -653,27 +602,30 @@ export class DefaultService { }); } /** - * Search Node + * Search Entry Nodes * @param branch * @param taxonomyName - * @param query - * @returns any Successful Response + * @param q The search query string to filter down the returned entry nodes. Example: is:root language:en not(language):fr + * @param page + * @returns EntryNodeSearchResult Successful Response * @throws ApiError */ - public static searchNodeTaxonomyNameBranchSearchGet( + public static searchEntryNodesTaxonomyNameBranchNodesEntryGet( branch: string, taxonomyName: string, - query: string - ): CancelablePromise { + q: string = "", + page: number = 1 + ): CancelablePromise { return __request(OpenAPI, { method: "GET", - url: "/{taxonomy_name}/{branch}/search", + url: "/{taxonomy_name}/{branch}/nodes/entry", path: { branch: branch, taxonomy_name: taxonomyName, }, query: { - query: query, + q: q, + page: page, }, errors: { 422: `Validation Error`, @@ -778,6 +730,34 @@ export class DefaultService { }, }); } + /** + * Create Entry Node + * Creating a new entry node in a taxonomy + * @param branch + * @param taxonomyName + * @param requestBody + * @returns any Successful Response + * @throws ApiError + */ + public static createEntryNodeTaxonomyNameBranchEntryPost( + branch: string, + taxonomyName: string, + requestBody: EntryNodeCreate + ): CancelablePromise { + return __request(OpenAPI, { + method: "POST", + url: "/{taxonomy_name}/{branch}/entry", + path: { + branch: branch, + taxonomy_name: taxonomyName, + }, + body: requestBody, + mediaType: "application/json", + errors: { + 422: `Validation Error`, + }, + }); + } /** * Delete Project * Delete a project diff --git a/taxonomy-editor-frontend/src/pages/project/search/SearchResults.tsx b/taxonomy-editor-frontend/src/pages/project/search/SearchResults.tsx index 11be42c9..e5edaacc 100644 --- a/taxonomy-editor-frontend/src/pages/project/search/SearchResults.tsx +++ b/taxonomy-editor-frontend/src/pages/project/search/SearchResults.tsx @@ -23,9 +23,10 @@ import Dialog from "@mui/material/Dialog"; import useFetch from "@/components/useFetch"; import { createBaseURL } from "@/utils"; import { greyHexCode } from "@/constants"; -import type { SearchAPIResponse } from "@/backend-types/types"; + import CreateNodeDialogContent from "@/components/CreateNodeDialogContent"; import NodesTableBody from "@/components/NodesTableBody"; +import { EntryNodeSearchResult } from "@/client"; type Props = { query: string; @@ -39,11 +40,21 @@ const SearchResults = ({ query, taxonomyName, branchName }: Props) => { const baseUrl = createBaseURL(taxonomyName, branchName); const { - data: nodeInfos, + data: result, isPending, isError, errorMessage, - } = useFetch(`${baseUrl}search?query=${encodeURI(query)}`); + } = useFetch( + `${baseUrl}nodes/entry?q=${encodeURI(query)}` + ); + + const nodes = result?.nodes; + const nodeInfos = nodes?.map((node) => { + return { + id: node.id, + is_external: node.isExternal, + }; + }); const handleCloseAddDialog = () => { setOpenNewNodeDialog(false); @@ -108,7 +119,8 @@ const SearchResults = ({ query, taxonomyName, branchName }: Props) => { Search Results - Number of nodes found: {(nodeInfos ?? []).length} + Number of nodes found:{" "} + {`${result?.nodeCount} | pages: ${result?.pageCount}`} {/* Table for listing all nodes in taxonomy */}