From b1b66c481022eb111a3e106cd81d6f589b6caf2c Mon Sep 17 00:00:00 2001 From: Johan Bergsma <29785380+JPBergsma@users.noreply.github.com> Date: Fri, 13 Oct 2023 17:02:02 +0200 Subject: [PATCH] Made changes to maje mypy happy. --- optimade/server/entry_collections/mongo.py | 17 ++++++++++------- optimade/server/exception_handlers.py | 2 +- optimade/server/main.py | 2 +- optimade/server/routers/utils.py | 18 ++++++++++-------- 4 files changed, 22 insertions(+), 17 deletions(-) diff --git a/optimade/server/entry_collections/mongo.py b/optimade/server/entry_collections/mongo.py index 3f2dca3276..9e40b08531 100644 --- a/optimade/server/entry_collections/mongo.py +++ b/optimade/server/entry_collections/mongo.py @@ -102,21 +102,24 @@ def count(self, **kwargs: Any) -> int: kwargs["filter"] = {} return len(self.collection.find(**kwargs)) - def insert(self, content: bytes, filename: str, metadata: dict = {}) -> None: + def insert(self, data: list) -> None: """Add the given entries to the underlying database. Warning: No validation is performed on the incoming data. Arguments: - content: The file content to add to gridfs. - filename: The filename of the added content. - metadata: extra metadata to add to the gridfs entry. + data: a list of dictionaries. Each dictionary contains the data belonging to one file. + These dictionaries contain the fields: + data: The file content to add to gridfs. + filename: The filename of the added content. + metadata: extra metadata to add to the gridfs entry. """ - self.collection.put(content, filename=filename, metadata=metadata) + for entry in data: # todo check whether I can insert multiple files in one go. + self.collection.put(**entry) def handle_query_params( - self, params: Union[SingleEntryQueryParams, PartialDataQueryParams] + self, params: Union[SingleEntryQueryParams, PartialDataQueryParams] # type: ignore[override] ) -> Dict[str, Any]: """Parse and interpret the backend-agnostic query parameter models into a dictionary that can be used by MongoDB. @@ -373,7 +376,7 @@ def insert(self, data: List[EntryResource]) -> None: self.collection.insert_many(data) def handle_query_params( - self, params: Union[EntryListingQueryParams, SingleEntryQueryParams] + self, params: Union[EntryListingQueryParams, SingleEntryQueryParams] # type: ignore[override] ) -> Dict[str, Any]: """Parse and interpret the backend-agnostic query parameter models into a dictionary that can be used by MongoDB. diff --git a/optimade/server/exception_handlers.py b/optimade/server/exception_handlers.py index 06fc083a85..efbaa0a72c 100644 --- a/optimade/server/exception_handlers.py +++ b/optimade/server/exception_handlers.py @@ -230,7 +230,7 @@ def general_exception_handler(request: Request, exc: Exception) -> JSONAPIRespon (OptimadeHTTPException, http_exception_handler), (RequestValidationError, request_validation_exception_handler), (ValidationError, validation_exception_handler), - (VisitError, grammar_not_implemented_handler), + (VisitError, grammar_not_implemented_handler), # type: ignore[list-item] # not entirely sure why this entry triggers mypy (NotImplementedError, not_implemented_handler), # type: ignore[list-item] # not entirely sure why this entry triggers mypy (Exception, general_exception_handler), ] diff --git a/optimade/server/main.py b/optimade/server/main.py index 56efd4c16a..5f75db972d 100644 --- a/optimade/server/main.py +++ b/optimade/server/main.py @@ -101,7 +101,7 @@ def read_array_header(fobj): "name": numpy_meta[2].name, "itemsize": numpy_meta[2].itemsize, } - partial_data_coll.insert(f, filename=filename, metadata=metadata) + partial_data_coll.insert([{"data": f, "filename": filename, "metadata": metadata}]) # type: ignore[list-item] # Todo : Perhaps this can be reduced to a single insert statement. def load_entries(endpoint_name: str, endpoint_collection: EntryCollection): LOGGER.debug("Loading test %s...", endpoint_name) diff --git a/optimade/server/routers/utils.py b/optimade/server/routers/utils.py index 27fd2a6c9e..3ac5819caf 100644 --- a/optimade/server/routers/utils.py +++ b/optimade/server/routers/utils.py @@ -420,11 +420,13 @@ def get_partial_entry( ) array = np.frombuffer( - results["attributes"]["data"], - dtype=getattr(np, results["attributes"]["dtype"]["name"]), - ).reshape(results["attributes"]["shape"]) + results["attributes"]["data"], # type: ignore[call-overload] + dtype=getattr(np, results["attributes"]["dtype"]["name"]), # type: ignore[call-overload] + ).reshape( + results["attributes"]["shape"] + ) # type: ignore[call-overload] # slice array - property_ranges = results["attributes"]["property_ranges"] + property_ranges = results["attributes"]["property_ranges"] # type: ignore[call-overload] slice_ind = [ slice( 0, @@ -455,14 +457,14 @@ def get_partial_entry( "has_references": False, } # Todo: add support for non_dense data if more_data_available: - next_link = ["PARTIAL-DATA-NEXT", [results["attributes"].pop("next")]] + next_link = ["PARTIAL-DATA-NEXT", [results["attributes"].pop("next")]] # type: ignore[call-overload] if params.response_format == "json": for key in header: - results["attributes"][key] = header[key] - results["attributes"]["data"] = array.tolist() + results["attributes"][key] = header[key] # type: ignore[call-overload] + results["attributes"]["data"] = array.tolist() # type: ignore[call-overload] if more_data_available: - results["attributes"]["next"] = next_link + results["attributes"]["next"] = next_link # type: ignore[call-overload] return dict( links=links, data=[results] if results else None,