From 2c2ead664ca0b8d3858437120c038a58a6a13463 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 25 Jul 2022 10:45:07 -0700 Subject: [PATCH 001/239] added skeleton outline of all new files for schematic/schema refactor --- schematic/schemas/data_model_edges.py | 8 ++ schematic/schemas/data_model_graph.py | 133 ++++++++++++++++++++ schematic/schemas/data_model_json_schema.py | 8 ++ schematic/schemas/data_model_jsonld.py | 38 ++++++ schematic/schemas/data_model_nodes.py | 8 ++ schematic/schemas/data_model_parser.py | 25 ++++ schematic/schemas/data_model_validator.py | 51 ++++++++ schematic/utils/schema_util.py | 10 ++ 8 files changed, 281 insertions(+) create mode 100644 schematic/schemas/data_model_edges.py create mode 100644 schematic/schemas/data_model_graph.py create mode 100644 schematic/schemas/data_model_json_schema.py create mode 100644 schematic/schemas/data_model_jsonld.py create mode 100644 schematic/schemas/data_model_nodes.py create mode 100644 schematic/schemas/data_model_parser.py create mode 100644 schematic/schemas/data_model_validator.py create mode 100644 schematic/utils/schema_util.py diff --git a/schematic/schemas/data_model_edges.py b/schematic/schemas/data_model_edges.py new file mode 100644 index 000000000..8c5659603 --- /dev/null +++ b/schematic/schemas/data_model_edges.py @@ -0,0 +1,8 @@ +class DataModelEdges(): + def __init__(): + + def generate_edge(): + return + + def edit_edge(): + return \ No newline at end of file diff --git a/schematic/schemas/data_model_graph.py b/schematic/schemas/data_model_graph.py new file mode 100644 index 000000000..55b2c4f34 --- /dev/null +++ b/schematic/schemas/data_model_graph.py @@ -0,0 +1,133 @@ +import os +import string +import json +import logging + +from typing import Any, Dict, Optional, Text, List + +import inflection +import networkx as nx + +from rdflib import Graph, Namespace, plugin, query +from networkx.algorithms.cycles import find_cycle +from networkx.readwrite import json_graph + +from schematic.utils.curie_utils import ( + expand_curies_in_schema, + uri2label, + extract_name_from_uri_or_curie, +) +from schematic.utils.general import find_duplicates +from schematic.utils.io_utils import load_default, load_json, load_schemaorg +from schematic.utils.schema_utils import ( + load_schema_into_networkx, + node_attrs_cleanup, + class_to_node, + relationship_edges, +) +from schematic.utils.general import dict2list, unlist +from schematic.utils.viz_utils import visualize +from schematic.utils.validate_utils import ( + validate_class_schema, + validate_property_schema, + validate_schema, +) +from schematic.schemas.curie import uri2curie, curie2uri +from schematic.schemas.data_model_parser import parse_model + + +namespaces = dict(rdf=Namespace("http://www.w3.org/1999/02/22-rdf-syntax-ns#")) + + +logger = logging.getLogger(__name__) + + + +class DataModelGraphMeta(object): + _instances = {} + + def __call__(cls, *args, **kwargs): + """ + Possible changes to the value of the `__init__` argument do not affect + the returned instance. + """ + if cls not in cls._instances: + instance = super().__call__(*args, **kwargs) + cls._instances[cls] = instance + return cls._instances[cls] + + +class DataModelGraph(metaclass=DataModelGraphMeta): + ''' + Generate graph network (networkx) from the attributes and relationships returned + fromt he data model parser. + + Create a singleton. + ''' + def __init__( + path_to_data_model: str = None, + ): + ''' + If no path_to_data_model, load default schema. + ''' + self.data_model = parse_model(path_to_data_model) + + + def generate_data_model_graph(self, data_model): + ''' + + ''' + data_model_graph = None + return data_model_graph + +class DataModelGraphExporer(): + def __init__(): + ''' + Load data model graph as a singleton. + ''' + self.data_model_graph = DataModelGraph.generate_data_model_graph(data_model) + + + def get_adjacent_nodes_by_relationship(): + return + + def get_component_requirements(): + return + + def get_component_requirements_graph(): + return + + def get_descendants_by_edge_type(): + return + + def get_digraph_by_edge_type(): + return + + def get_edges_by_relationship(): + return + + def get_node_definition(): + return + + def get_node_dependencies(): + return + + def get_node_label(): + return + + def find_adjacent_child_classes(): + return + + def find_all_class_properties(): + return + + def find_class_specific_properties(): + return + + def find_class_usages(): + return + + def is_node_required(): + return + + \ No newline at end of file diff --git a/schematic/schemas/data_model_json_schema.py b/schematic/schemas/data_model_json_schema.py new file mode 100644 index 000000000..6afdfe9b4 --- /dev/null +++ b/schematic/schemas/data_model_json_schema.py @@ -0,0 +1,8 @@ +class DataModelJSONSchema: + def __init__(): + + def get_json_validation_schema(): + ''' + A refactor of get_json_schema_requirements() from the + schema generator. + ''' \ No newline at end of file diff --git a/schematic/schemas/data_model_jsonld.py b/schematic/schemas/data_model_jsonld.py new file mode 100644 index 000000000..7e42177c0 --- /dev/null +++ b/schematic/schemas/data_model_jsonld.py @@ -0,0 +1,38 @@ +class DataModelJsonLD(): + def __init__(): + + + def generate_data_model_jsonld(self): + +class JSONLD_object(): + ''' + Decorator class design + ''' + def __init__(): + self.to_template() + + def to_template(self): + ''' + Returns jsonld_class_template or jsonld_property_template + ''' + return + +class JSONLD_property(): + def __init__(): + self.to_template() + + def explore_property(): + return + + def edit_property(): + return + +class JSONLD_class(): + def __init__(): + self.to_template() + + def explore_class(): + return + + def edit_class(): + return diff --git a/schematic/schemas/data_model_nodes.py b/schematic/schemas/data_model_nodes.py new file mode 100644 index 000000000..c24935933 --- /dev/null +++ b/schematic/schemas/data_model_nodes.py @@ -0,0 +1,8 @@ +class DataModelNodes(): + def __init__(): + + def generate_node(): + return + + def edit_node(): + return \ No newline at end of file diff --git a/schematic/schemas/data_model_parser.py b/schematic/schemas/data_model_parser.py new file mode 100644 index 000000000..c903aac1a --- /dev/null +++ b/schematic/schemas/data_model_parser.py @@ -0,0 +1,25 @@ +class DataModelParser(): + def __init__( + path_to_data_model: str, + ): + + model_type = self.get_model_type(path_to_data_model) + parse_model(model_type) + + def get_model_type(self, path_to_data_model): + + return + + def parse_model(self, model_type) + if model_type == 'csv' + DataModelCSVParser.parse_csv_model() + elif model_type == 'jsonld' + DataModelJSONLDParser.parse_jsonld_model() + return + +class DataModelCSVParser(): + def __init__(): + +class DataModelJSONLDParser(): + def __init__(): + diff --git a/schematic/schemas/data_model_validator.py b/schematic/schemas/data_model_validator.py new file mode 100644 index 000000000..1277f905f --- /dev/null +++ b/schematic/schemas/data_model_validator.py @@ -0,0 +1,51 @@ +class DataModelValidator(): + ''' + Check for consistency within data model. + ''' + def __init__( + data_model, + run_all_checks: bool = True, + ): + data_model = self.data_model + if run_all_checks: + ''' + If there are errors log them. + ''' + errors = self.run_checks(data_model) + + def run_checks(self): + checks = [ + self.check_has_name(), + self.check_is_dag(), + self.check_json_(), + self.check_name_is_valid(), + self.check_name_overlap() + ] + errors = [error for check in checks] + return errors + + def check_has_name(self): + error = None + return error + + def check_is_dag(self): + error = None + return + + def check_json(self): + ''' + Standard JSON validation. + ''' + error = None + return + + def check_name_is_valid(self): + error = None + return + + def check_name_overlap(self): + ''' + Check if name is repeated in a valid value + ''' + error = None + return \ No newline at end of file diff --git a/schematic/utils/schema_util.py b/schematic/utils/schema_util.py new file mode 100644 index 000000000..f0fff6fa2 --- /dev/null +++ b/schematic/utils/schema_util.py @@ -0,0 +1,10 @@ +''' +General methods. + +''' + +def get_property_label_from_display_name(): + return + +def get_class_label_from_display_name(): + return \ No newline at end of file From f6054e2370718643a3a0482aaa64233d93acf802 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 18 Jan 2023 10:08:06 -0800 Subject: [PATCH 002/239] WIP: Changes to schema commands to work with working refactor changes --- schematic/schemas/commands.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/schematic/schemas/commands.py b/schematic/schemas/commands.py index 3e4ee7b8c..be2fec25c 100644 --- a/schematic/schemas/commands.py +++ b/schematic/schemas/commands.py @@ -6,7 +6,7 @@ import sys import re -from schematic.schemas.df_parser import _convert_csv_to_data_model +from schematic.schemas.data_model_parser import DataModelParser from schematic.utils.cli_utils import query_dict from schematic.help import schema_commands @@ -32,7 +32,7 @@ def schema(): # use as `schematic model ...` ) @click_log.simple_verbosity_option(logger) @click.argument( - "schema_csv", type=click.Path(exists=True), metavar="", nargs=1 + "schema", type=click.Path(exists=True), metavar="", nargs=1 ) @click.option( "--base_schema", @@ -47,11 +47,18 @@ def schema(): # use as `schematic model ...` metavar="", help=query_dict(schema_commands, ("schema", "convert", "output_jsonld")), ) -def convert(schema_csv, base_schema, output_jsonld): +def convert(schema, base_schema, output_jsonld): """ Running CLI to convert data model specification in CSV format to data model in JSON-LD format. """ + # Instantiate Parser + data_model_parser = DataModelParser(schema) + + #Parse Model + parse_data_model = data_model_parser.parse_model() + + ''' # convert RFC to Data Model base_se = _convert_csv_to_data_model(schema_csv, base_schema) @@ -72,4 +79,4 @@ def convert(schema_csv, base_schema, output_jsonld): click.echo(f"The Data Model was created and saved to '{output_jsonld}' location.") except: click.echo(f"The Data Model could not be created by using '{output_jsonld}' location. Please check your file path again") - + ''' From 6e5ae6d714b649a2d7467545434832d794e4195f Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 18 Jan 2023 10:10:01 -0800 Subject: [PATCH 003/239] WIP: Add skeleton code for data_model_graph refactor work --- schematic/schemas/data_model_graph.py | 1 - 1 file changed, 1 deletion(-) diff --git a/schematic/schemas/data_model_graph.py b/schematic/schemas/data_model_graph.py index 55b2c4f34..308537387 100644 --- a/schematic/schemas/data_model_graph.py +++ b/schematic/schemas/data_model_graph.py @@ -87,7 +87,6 @@ def __init__(): ''' self.data_model_graph = DataModelGraph.generate_data_model_graph(data_model) - def get_adjacent_nodes_by_relationship(): return From 9f689053494aa7399a7384defb18a792b6ce8928 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 18 Jan 2023 10:10:25 -0800 Subject: [PATCH 004/239] WIP: Add skeleton code for data_model_jsonld refactor work --- schematic/schemas/data_model_jsonld.py | 43 +++++++++++++++++++------- 1 file changed, 32 insertions(+), 11 deletions(-) diff --git a/schematic/schemas/data_model_jsonld.py b/schematic/schemas/data_model_jsonld.py index 7e42177c0..6a9bf6bcb 100644 --- a/schematic/schemas/data_model_jsonld.py +++ b/schematic/schemas/data_model_jsonld.py @@ -1,25 +1,43 @@ -class DataModelJsonLD(): - def __init__(): +class DataModelJsonLD(object): + ''' + Interface to JSONLD_object + ''' + + def __init__(data_model_graph): def generate_data_model_jsonld(self): + ''' + Will call JSONLD_object class to create properties and classes in the process. + ''' + pass class JSONLD_object(): ''' Decorator class design + Base decorator class. ''' - def __init__(): + _template: template = None + + def __init__(self, to_template) -> None: self.to_template() - def to_template(self): + def _create_template(self): ''' Returns jsonld_class_template or jsonld_property_template ''' - return + return self._template -class JSONLD_property(): - def __init__(): - self.to_template() + @property + def to_template(self): + return self._template.to_template() + +class JSONLD_property(JSONLD_object): + ''' + Property Decorator + ''' + def to_template(self) + return JSONLD_property(self._template.to_template()) def explore_property(): return @@ -27,9 +45,12 @@ def explore_property(): def edit_property(): return -class JSONLD_class(): - def __init__(): - self.to_template() +class JSONLD_class(JSONLD_object): + ''' + Class Decorator + ''' + def to_template(self) + return JSONLD_class(self._template.to_template()) def explore_class(): return From 749773d2caa0c2703173967add786ea455dbe634 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 18 Jan 2023 10:10:49 -0800 Subject: [PATCH 005/239] WIP: Add skeleton code for data_model_parser refactor work --- schematic/schemas/data_model_parser.py | 236 +++++++++++++++++++++++-- 1 file changed, 224 insertions(+), 12 deletions(-) diff --git a/schematic/schemas/data_model_parser.py b/schematic/schemas/data_model_parser.py index c903aac1a..71ae6d937 100644 --- a/schematic/schemas/data_model_parser.py +++ b/schematic/schemas/data_model_parser.py @@ -1,25 +1,237 @@ +#import numpy as np +import json +import logging +import pandas as pd +import pathlib +from typing import Any, Dict, Optional, Text, List + +from schematic.utils.df_utils import load_df +from schematic.utils.io_utils import load_json + +logger = logging.getLogger(__name__) + + class DataModelParser(): + ''' + This class takes in a path to a data model (either CSV for JSONLD for now) + and will convert it to attributes and relationships that can then + be further converted into a graph data model. Other data model types + may be added in the future. + + ''' def __init__( + self, path_to_data_model: str, - ): + ) -> None: - model_type = self.get_model_type(path_to_data_model) - parse_model(model_type) + self.path_to_data_model = path_to_data_model + self.model_type = self.get_model_type(path_to_data_model) def get_model_type(self, path_to_data_model): - - return + ''' + Parses the path to the data model to extract the extension and determine the data model type. + ''' + model_type = pathlib.Path(path_to_data_model).suffix.replace('.', '').upper() + return model_type - def parse_model(self, model_type) - if model_type == 'csv' - DataModelCSVParser.parse_csv_model() - elif model_type == 'jsonld' - DataModelJSONLDParser.parse_jsonld_model() + def parse_model(self): + ''' + Given a data model type, instantiate and call the appropriate data model parser. + ''' + if self.model_type == 'CSV': + csv_parser = DataModelCSVParser() + csv_parser.parse_csv_model(self.path_to_data_model) + elif self.model_type == 'JSONLD': + jsonld_parser = DataModelJSONLDParser() + jsonld_parser.parse_jsonld_model(self.path_to_data_model) return class DataModelCSVParser(): - def __init__(): + ''' + + ''' + + def __init__( + self + ): + + self.required_headers = set( + [ + "Attribute", + "Description", + "Valid Values", + "DependsOn", + "Required", + "Parent", + "Properties", + "DependsOn Component", + "Source", + "Validation Rules", + ] + ) + + def check_schema_definition(self, model_df: pd.DataFrame) -> bool: + + """Checks if a schema definition data frame contains the right required headers. + + See schema definition guide for more details + TODO: post and link schema definition guide + + Args: + schema_definition: a pandas dataframe containing schema definition; see example here: https://docs.google.com/spreadsheets/d/1J2brhqO4kpeHIkNytzlqrdIiRanXDr6KD2hqjOTC9hs/edit#gid=0 + Raises: Exception + """ + try: + if self.required_headers.issubset(set(list(model_df.columns))): + return + elif "Requires" in list(model_df.columns) or "Requires Component" in list( + model_df.columns + ): + raise ValueError( + "The input CSV schema file contains the 'Requires' and/or the 'Requires " + "Component' column headers. These columns were renamed to 'DependsOn' and " + "'DependsOn Component', respectively. Switch to the new column names." + ) + logger.debug("Schema definition csv ready for processing!") + except: + raise ValueError( + f"Schema extension headers: {set(list(model_df.columns))} " + f"do not match required schema headers: {self.required_headers}" + ) + return + + + def gather_csv_attributes_relationships(self, model_df): + ''' + Note: Modeled after the current df_parser.create_nx_schema_objects but without reliance + on the SE. Will just try to gather all the attributes and their relationships to one another. + They will be loaded into a graph at a later stage. + ''' + + # Check csv schema follows expectations. + self.check_schema_definition(model_df) + + #load into format that can be read by validator.py + + # get attributes from Attribute column + attributes = model_df[list(self.required_headers)].to_dict("records") + + # Build attribute/relationship dictionary + relationship_types = ['Parent', 'DependsOn', 'DependsOn Component'] + #Does not include anything like valid values or properties... + #Need to add these. + + attr_rel_dictionary = {} + for attr in attributes: + attr_rel_dictionary.update({attr['Attribute']: {'Relationships': {}}}) + for relationship in relationship_types: + if not pd.isnull(attr[relationship]): + rels = attr[relationship].strip().split(',') + attr_rel_dictionary[attr['Attribute']]['Relationships'].update({relationship:rels}) + + return attr_rel_dictionary + + + def parse_csv_model( + self, + path_to_data_model: str, + ): + + ''' + Note: + Leave out loading the base schema for now. Add it later at the + model graph stage. + + ''' + + # Load the csv data model to DF + model_df = load_df(path_to_data_model, data_model=True) + + # Gather info from the model + + model_dict = self.gather_csv_attributes_relationships(model_df) + + return model_dict class DataModelJSONLDParser(): - def __init__(): + def __init__( + self, + ): + ''' + Does not include anything like valid values or properties... + Need to add these. + + ''' + + self.relationship_types = { + 'sms:requiresDependency': 'DependsOn', + 'sms:requiresComponent': 'DependsOn Component', + 'rdfs:subClassOf': 'Parent', + 'sms:validationRules': 'Validation Rules', + 'schema:rangeIncludes': 'Valid Values', + } + + + + def gather_jsonld_attributes_relationships( + self, + model_jsonld): + ''' + Note: unlike a CSV the JSONLD might already have the biothings schema attached to it. + So the output may not initially look identical. + ''' + model_ids = [v['@id'] for v in model_jsonld] + attr_rel_dictionary = {} + + # For each entry in the jsonld model + for entry in model_jsonld: + + # Check to see if it has been assigned as a subclass as an attribute or parent. + if 'rdfs:subClassOf' in entry.keys(): + + # Checking if subclass type is list, actually gets rid of Biothings. + if type(entry['rdfs:subClassOf']) == list: + + # Determine if the id the entry has been assigned as a sublcass of is also recoreded + # as a model id. If it is, then the entry is not an attribute itself, but a valid value. + subclass_id = entry['rdfs:subClassOf'][0]['@id'] + if not subclass_id in model_ids: + + # Get the id of the entry + entry_id = entry['@id'].split(':')[1] + + # If the entry is an attribute that has not already been added to the dictionary, add it. + if entry_id not in attr_rel_dictionary.keys(): + attr_rel_dictionary.update({entry_id: {'Relationships': {}}}) + + + for relationship in self.relationship_types.keys(): + if relationship in entry.keys(): + if entry[relationship] != []: + if type(entry[relationship][0]) == dict: + rels = [r['@id'].split(':')[1] for r in entry[relationship]] + else: + rels = entry[relationship] + attr_rel_dictionary[ + entry_id]['Relationships'].update( + {self.relationship_types[relationship]:rels}) + + return attr_rel_dictionary + + def parse_jsonld_model( + self, + path_to_data_model:str, + ): + ''' + + + ''' + # Load the json_ld model to df + + json_load = load_json(path_to_data_model) + model_dict = self.gather_jsonld_attributes_relationships(json_load['@graph']) + breakpoint() + + return model_dict + From 9ab9a63179e9e09605c68330476de0fe28ac3c60 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Tue, 7 Feb 2023 11:36:38 -0800 Subject: [PATCH 006/239] allow data_model_parser to send base_schema --- schematic/schemas/commands.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic/schemas/commands.py b/schematic/schemas/commands.py index be2fec25c..e6d6ecc9b 100644 --- a/schematic/schemas/commands.py +++ b/schematic/schemas/commands.py @@ -53,7 +53,7 @@ def convert(schema, base_schema, output_jsonld): data model in JSON-LD format. """ # Instantiate Parser - data_model_parser = DataModelParser(schema) + data_model_parser = DataModelParser(schema, base_schema) #Parse Model parse_data_model = data_model_parser.parse_model() From a8ec3942dd3312549455cb615c0d87981a42cd44 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Tue, 7 Feb 2023 11:39:11 -0800 Subject: [PATCH 007/239] WIP: add biothings as an optional add on --- schematic/schemas/data_model_parser.py | 482 ++++++++++++++----------- 1 file changed, 265 insertions(+), 217 deletions(-) diff --git a/schematic/schemas/data_model_parser.py b/schematic/schemas/data_model_parser.py index 71ae6d937..6ebe78f28 100644 --- a/schematic/schemas/data_model_parser.py +++ b/schematic/schemas/data_model_parser.py @@ -8,230 +8,278 @@ from schematic.utils.df_utils import load_df from schematic.utils.io_utils import load_json +from schematic import LOADER + logger = logging.getLogger(__name__) class DataModelParser(): - ''' - This class takes in a path to a data model (either CSV for JSONLD for now) - and will convert it to attributes and relationships that can then - be further converted into a graph data model. Other data model types - may be added in the future. - - ''' - def __init__( - self, - path_to_data_model: str, - ) -> None: - - self.path_to_data_model = path_to_data_model - self.model_type = self.get_model_type(path_to_data_model) - - def get_model_type(self, path_to_data_model): - ''' - Parses the path to the data model to extract the extension and determine the data model type. - ''' - model_type = pathlib.Path(path_to_data_model).suffix.replace('.', '').upper() - return model_type - - def parse_model(self): - ''' - Given a data model type, instantiate and call the appropriate data model parser. - ''' - if self.model_type == 'CSV': - csv_parser = DataModelCSVParser() - csv_parser.parse_csv_model(self.path_to_data_model) - elif self.model_type == 'JSONLD': - jsonld_parser = DataModelJSONLDParser() - jsonld_parser.parse_jsonld_model(self.path_to_data_model) - return + ''' + This class takes in a path to a data model (either CSV for JSONLD for now) + and will convert it to attributes and relationships that can then + be further converted into a graph data model. Other data model types + may be added in the future. + + ''' + def __init__( + self, + path_to_data_model: str, + base_schema_path: None, + ) -> None: + + self.path_to_data_model = path_to_data_model + self.model_type = self.get_model_type(path_to_data_model) + self.base_schema_path = base_schema_path + + def _get_base_schema_path(self, base_schema: str = None) -> str: + """Evaluate path to base schema. + + Args: + base_schema: Path to base data model. BioThings data model is loaded by default. + + Returns: + base_schema_path: Path to base schema based on provided argument. + """ + biothings_schema_path = LOADER.filename("data_models/biothings.model.jsonld") + base_schema_path = biothings_schema_path if base_schema is None else base_schema + + return base_schema_path + + def get_model_type(self, path_to_data_model): + ''' + Parses the path to the data model to extract the extension and determine the data model type. + ''' + model_type = pathlib.Path(path_to_data_model).suffix.replace('.', '').upper() + return model_type + + def parse_base_model(self): + ''' + Add biothings to both models for consistency. + + Do separately from both parsers for clarity. + + Should this be its own class? + + Input: Base model path, if None do not add base model. + + ''' + + if self.base_schema_path == 'No base model': + return + else: + # determine base schema path + base_model_path = self._get_base_schema_path(self.base_schema_path) + + # parse + jsonld_parser = DataModelJSONLDParser() + base_model = jsonld_parser.parse_jsonld_model(base_model_path) + breakpoint() + return base_model + + def parse_model(self): + ''' + Given a data model type, instantiate and call the appropriate data model parser. + ''' + if self.model_type == 'CSV': + csv_parser = DataModelCSVParser() + csv_parser.parse_csv_model(self.path_to_data_model) + elif self.model_type == 'JSONLD': + jsonld_parser = DataModelJSONLDParser() + jsonld_parser.parse_jsonld_model(self.path_to_data_model) + + base_model = self.parse_base_model() + return class DataModelCSVParser(): - ''' - - ''' - - def __init__( - self - ): - - self.required_headers = set( - [ - "Attribute", - "Description", - "Valid Values", - "DependsOn", - "Required", - "Parent", - "Properties", - "DependsOn Component", - "Source", - "Validation Rules", - ] - ) - - def check_schema_definition(self, model_df: pd.DataFrame) -> bool: - - """Checks if a schema definition data frame contains the right required headers. - - See schema definition guide for more details - TODO: post and link schema definition guide - - Args: - schema_definition: a pandas dataframe containing schema definition; see example here: https://docs.google.com/spreadsheets/d/1J2brhqO4kpeHIkNytzlqrdIiRanXDr6KD2hqjOTC9hs/edit#gid=0 - Raises: Exception - """ - try: - if self.required_headers.issubset(set(list(model_df.columns))): - return - elif "Requires" in list(model_df.columns) or "Requires Component" in list( - model_df.columns - ): - raise ValueError( - "The input CSV schema file contains the 'Requires' and/or the 'Requires " - "Component' column headers. These columns were renamed to 'DependsOn' and " - "'DependsOn Component', respectively. Switch to the new column names." - ) - logger.debug("Schema definition csv ready for processing!") - except: - raise ValueError( - f"Schema extension headers: {set(list(model_df.columns))} " - f"do not match required schema headers: {self.required_headers}" - ) - return - - - def gather_csv_attributes_relationships(self, model_df): - ''' - Note: Modeled after the current df_parser.create_nx_schema_objects but without reliance - on the SE. Will just try to gather all the attributes and their relationships to one another. - They will be loaded into a graph at a later stage. - ''' - - # Check csv schema follows expectations. - self.check_schema_definition(model_df) - - #load into format that can be read by validator.py - - # get attributes from Attribute column - attributes = model_df[list(self.required_headers)].to_dict("records") - - # Build attribute/relationship dictionary - relationship_types = ['Parent', 'DependsOn', 'DependsOn Component'] - #Does not include anything like valid values or properties... - #Need to add these. - - attr_rel_dictionary = {} - for attr in attributes: - attr_rel_dictionary.update({attr['Attribute']: {'Relationships': {}}}) - for relationship in relationship_types: - if not pd.isnull(attr[relationship]): - rels = attr[relationship].strip().split(',') - attr_rel_dictionary[attr['Attribute']]['Relationships'].update({relationship:rels}) - - return attr_rel_dictionary - - - def parse_csv_model( - self, - path_to_data_model: str, - ): - - ''' - Note: - Leave out loading the base schema for now. Add it later at the - model graph stage. - - ''' - - # Load the csv data model to DF - model_df = load_df(path_to_data_model, data_model=True) - - # Gather info from the model - - model_dict = self.gather_csv_attributes_relationships(model_df) - - return model_dict + ''' + + ''' + + def __init__( + self + ): + + self.required_headers = set( + [ + "Attribute", + "Description", + "Valid Values", + "DependsOn", + "Required", + "Parent", + "Properties", + "DependsOn Component", + "Source", + "Validation Rules", + ] + ) + + def check_schema_definition(self, model_df: pd.DataFrame) -> bool: + + """Checks if a schema definition data frame contains the right required headers. + + See schema definition guide for more details + TODO: post and link schema definition guide + + Args: + schema_definition: a pandas dataframe containing schema definition; see example here: https://docs.google.com/spreadsheets/d/1J2brhqO4kpeHIkNytzlqrdIiRanXDr6KD2hqjOTC9hs/edit#gid=0 + Raises: Exception + """ + try: + if self.required_headers.issubset(set(list(model_df.columns))): + return + elif "Requires" in list(model_df.columns) or "Requires Component" in list( + model_df.columns + ): + raise ValueError( + "The input CSV schema file contains the 'Requires' and/or the 'Requires " + "Component' column headers. These columns were renamed to 'DependsOn' and " + "'DependsOn Component', respectively. Switch to the new column names." + ) + logger.debug("Schema definition csv ready for processing!") + except: + raise ValueError( + f"Schema extension headers: {set(list(model_df.columns))} " + f"do not match required schema headers: {self.required_headers}" + ) + return + + + def gather_csv_attributes_relationships(self, model_df): + ''' + Note: Modeled after the current df_parser.create_nx_schema_objects but without reliance + on the SE. Will just try to gather all the attributes and their relationships to one another. + They will be loaded into a graph at a later stage. + ''' + + # Check csv schema follows expectations. + self.check_schema_definition(model_df) + + #load into format that can be read by validator.py + + # get attributes from Attribute column + attributes = model_df[list(self.required_headers)].to_dict("records") + + # Build attribute/relationship dictionary + relationship_types = ['Parent', 'DependsOn', 'DependsOn Component'] + #Does not include anything like valid values or properties... + #Need to add these. + + attr_rel_dictionary = {} + for attr in attributes: + attr_rel_dictionary.update({attr['Attribute']: {'Relationships': {}}}) + for relationship in relationship_types: + if not pd.isnull(attr[relationship]): + rels = attr[relationship].strip().split(',') + attr_rel_dictionary[attr['Attribute']]['Relationships'].update({relationship:rels}) + + return attr_rel_dictionary + + + def parse_csv_model( + self, + path_to_data_model: str, + ): + + ''' + Note: + Leave out loading the base schema for now. Add it later at the + model graph stage. + + ''' + + # Load the csv data model to DF + model_df = load_df(path_to_data_model, data_model=True) + + # Gather info from the model + + model_dict = self.gather_csv_attributes_relationships(model_df) + + breakpoint() + return model_dict class DataModelJSONLDParser(): - def __init__( - self, - ): - ''' - Does not include anything like valid values or properties... - Need to add these. - - ''' - - self.relationship_types = { - 'sms:requiresDependency': 'DependsOn', - 'sms:requiresComponent': 'DependsOn Component', - 'rdfs:subClassOf': 'Parent', - 'sms:validationRules': 'Validation Rules', - 'schema:rangeIncludes': 'Valid Values', - } - - - - def gather_jsonld_attributes_relationships( - self, - model_jsonld): - ''' - Note: unlike a CSV the JSONLD might already have the biothings schema attached to it. - So the output may not initially look identical. - ''' - model_ids = [v['@id'] for v in model_jsonld] - attr_rel_dictionary = {} - - # For each entry in the jsonld model - for entry in model_jsonld: - - # Check to see if it has been assigned as a subclass as an attribute or parent. - if 'rdfs:subClassOf' in entry.keys(): - - # Checking if subclass type is list, actually gets rid of Biothings. - if type(entry['rdfs:subClassOf']) == list: - - # Determine if the id the entry has been assigned as a sublcass of is also recoreded - # as a model id. If it is, then the entry is not an attribute itself, but a valid value. - subclass_id = entry['rdfs:subClassOf'][0]['@id'] - if not subclass_id in model_ids: - - # Get the id of the entry - entry_id = entry['@id'].split(':')[1] - - # If the entry is an attribute that has not already been added to the dictionary, add it. - if entry_id not in attr_rel_dictionary.keys(): - attr_rel_dictionary.update({entry_id: {'Relationships': {}}}) - - - for relationship in self.relationship_types.keys(): - if relationship in entry.keys(): - if entry[relationship] != []: - if type(entry[relationship][0]) == dict: - rels = [r['@id'].split(':')[1] for r in entry[relationship]] - else: - rels = entry[relationship] - attr_rel_dictionary[ - entry_id]['Relationships'].update( - {self.relationship_types[relationship]:rels}) - - return attr_rel_dictionary - - def parse_jsonld_model( - self, - path_to_data_model:str, - ): - ''' - - - ''' - # Load the json_ld model to df - - json_load = load_json(path_to_data_model) - model_dict = self.gather_jsonld_attributes_relationships(json_load['@graph']) - breakpoint() - - return model_dict + def __init__( + self, + ): + ''' + Does not include anything like valid values or properties... + Need to add these. + + ''' + + # Do not pull in label in this step so it can be determined + # later by our program to allow consisency. + + self.relationship_types = { + 'sms:requiresDependency': 'DependsOn', + 'sms:requiresComponent': 'DependsOn Component', + 'rdfs:subClassOf': 'Parent', + 'sms:validationRules': 'Validation Rules', + 'schema:rangeIncludes': 'Valid Values', + 'rdfs:comment': 'Description', + 'sms:required': 'Required'} + + + + def gather_jsonld_attributes_relationships( + self, + model_jsonld): + ''' + Note: unlike a CSV the JSONLD might already have the biothings schema attached to it. + So the output may not initially look identical. + ''' + model_ids = [v['@id'] for v in model_jsonld] + attr_rel_dictionary = {} + breakpoint() + # For each entry in the jsonld model + for entry in model_jsonld: + # Check to see if it has been assigned as a subclass as an attribute or parent. + if 'rdfs:subClassOf' in entry.keys(): + + # Checking if subclass type is list, actually gets rid of Biothings. + if type(entry['rdfs:subClassOf']) == list: + + # Determine if the id the entry has been assigned as a sublcass of is also recoreded + # as a model id. If it is, then the entry is not an attribute itself, but a valid value. + subclass_id = entry['rdfs:subClassOf'][0]['@id'] + if not subclass_id in model_ids: + + # Get the id of the entry + entry_id = entry['@id'].split(':')[1] + + # If the entry is an attribute that has not already been added to the dictionary, add it. + if entry_id not in attr_rel_dictionary.keys(): + attr_rel_dictionary.update({entry_id: {'Relationships': {}}}) + + + for relationship in self.relationship_types.keys(): + if relationship in entry.keys(): + if entry[relationship] != []: + if type(entry[relationship][0]) == dict: + rels = [r['@id'].split(':')[1] for r in entry[relationship]] + else: + rels = entry[relationship] + attr_rel_dictionary[ + entry_id]['Relationships'].update( + {self.relationship_types[relationship]:rels}) + + return attr_rel_dictionary + + def parse_jsonld_model( + self, + path_to_data_model:str, + ): + ''' + Note: Converting JSONLD to look *Exactly* like the csv output would get rid + of a lot of information. Will need to decide later if we want to + preserve this information in some way. + + ''' + # Load the json_ld model to df + + json_load = load_json(path_to_data_model) + model_dict = self.gather_jsonld_attributes_relationships(json_load['@graph']) + return model_dict From 18b72e5290c3ddd6c6a7995829b52ead96ba6099 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Tue, 7 Feb 2023 11:41:51 -0800 Subject: [PATCH 008/239] WIP: add note about namespaces --- schematic/schemas/data_model_parser.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/schematic/schemas/data_model_parser.py b/schematic/schemas/data_model_parser.py index 6ebe78f28..485b7c7e8 100644 --- a/schematic/schemas/data_model_parser.py +++ b/schematic/schemas/data_model_parser.py @@ -20,6 +20,10 @@ class DataModelParser(): be further converted into a graph data model. Other data model types may be added in the future. + TODO: + + Make sure to build with namespace contexts in mind! + ''' def __init__( self, From 41fd9692959fd20fe0a94ab9a16ed82411210d88 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Thu, 9 Mar 2023 20:17:19 -0800 Subject: [PATCH 009/239] WIP: start merging parser with graph --- schematic/schemas/commands.py | 13 ++- schematic/schemas/data_model_edges.py | 3 +- schematic/schemas/data_model_graph.py | 127 ++++++++++++++----------- schematic/schemas/data_model_nodes.py | 5 +- schematic/schemas/data_model_parser.py | 20 ++-- 5 files changed, 98 insertions(+), 70 deletions(-) diff --git a/schematic/schemas/commands.py b/schematic/schemas/commands.py index e6d6ecc9b..b8fb1c7da 100644 --- a/schematic/schemas/commands.py +++ b/schematic/schemas/commands.py @@ -7,6 +7,7 @@ import re from schematic.schemas.data_model_parser import DataModelParser +from schematic.schemas.data_model_graph import DataModelGraph from schematic.utils.cli_utils import query_dict from schematic.help import schema_commands @@ -52,11 +53,21 @@ def convert(schema, base_schema, output_jsonld): Running CLI to convert data model specification in CSV format to data model in JSON-LD format. """ + + # TO DO: Throw these steps into their own function + # Instantiate Parser data_model_parser = DataModelParser(schema, base_schema) #Parse Model - parse_data_model = data_model_parser.parse_model() + parsed_data_model = data_model_parser.parse_model() + + # Convert parsed model to graph + # Instantiate DataModelGraph + data_model_grapher = DataModelGraph(parsed_data_model) + + graph_data_model = data_model_grapher.generate_data_model_graph() + ''' # convert RFC to Data Model diff --git a/schematic/schemas/data_model_edges.py b/schematic/schemas/data_model_edges.py index 8c5659603..7d0b36ba3 100644 --- a/schematic/schemas/data_model_edges.py +++ b/schematic/schemas/data_model_edges.py @@ -1,5 +1,6 @@ class DataModelEdges(): - def __init__(): + def __init__(self): + return def generate_edge(): return diff --git a/schematic/schemas/data_model_graph.py b/schematic/schemas/data_model_graph.py index 308537387..7b9d72dbb 100644 --- a/schematic/schemas/data_model_graph.py +++ b/schematic/schemas/data_model_graph.py @@ -12,6 +12,9 @@ from networkx.algorithms.cycles import find_cycle from networkx.readwrite import json_graph +from schematic.schemas.data_model_edges.DataModelEdges import generate_edge, edit_edge +from schematic.schemas.data_model_nodes.DataModelNodes import generate_node, edit_node + from schematic.utils.curie_utils import ( expand_curies_in_schema, uri2label, @@ -33,7 +36,6 @@ validate_schema, ) from schematic.schemas.curie import uri2curie, curie2uri -from schematic.schemas.data_model_parser import parse_model namespaces = dict(rdf=Namespace("http://www.w3.org/1999/02/22-rdf-syntax-ns#")) @@ -44,7 +46,7 @@ class DataModelGraphMeta(object): - _instances = {} + _instances = {} def __call__(cls, *args, **kwargs): """ @@ -57,76 +59,91 @@ def __call__(cls, *args, **kwargs): return cls._instances[cls] -class DataModelGraph(metaclass=DataModelGraphMeta): - ''' - Generate graph network (networkx) from the attributes and relationships returned - fromt he data model parser. +class DataModelGraph(): + ''' + Generate graph network (networkx) from the attributes and relationships returned + fromt he data model parser. + + Create a singleton. + ''' + __metaclass__ = DataModelGraphMeta + def __init__(self, parsed_data_model): + '''Load parsed data model. + ''' + + self.data_model = parsed_data_model + + if not self.data_model: + raise ValueError( + "Something has gone wrong, a data model was not loaded into the DataModelGraph Class. Please check that your paths are correct" + ) + + + def generate_data_model_graph(self): + '''Generate NetworkX Graph from the Relationships/attributes dictionary + + ''' + + G = nx.MultiDiGraph() + for attribute, relationships in self.data_model: + node = generate_node(G, attribute, relationship) + + + - Create a singleton. - ''' - def __init__( - path_to_data_model: str = None, - ): - ''' - If no path_to_data_model, load default schema. - ''' - self.data_model = parse_model(path_to_data_model) - def generate_data_model_graph(self, data_model): - ''' - - ''' - data_model_graph = None - return data_model_graph + data_model_graph = None + breakpoint() + return data_model_graph class DataModelGraphExporer(): - def __init__(): - ''' - Load data model graph as a singleton. - ''' - self.data_model_graph = DataModelGraph.generate_data_model_graph(data_model) + def __init__(): + ''' + Load data model graph as a singleton. + ''' + #self.data_model_graph = DataModelGraph.generate_data_model_graph(data_model) - def get_adjacent_nodes_by_relationship(): - return + def get_adjacent_nodes_by_relationship(): + return - def get_component_requirements(): - return + def get_component_requirements(): + return - def get_component_requirements_graph(): - return + def get_component_requirements_graph(): + return - def get_descendants_by_edge_type(): - return + def get_descendants_by_edge_type(): + return - def get_digraph_by_edge_type(): - return + def get_digraph_by_edge_type(): + return - def get_edges_by_relationship(): - return + def get_edges_by_relationship(): + return - def get_node_definition(): - return + def get_node_definition(): + return - def get_node_dependencies(): - return + def get_node_dependencies(): + return - def get_node_label(): - return + def get_node_label(): + return - def find_adjacent_child_classes(): - return + def find_adjacent_child_classes(): + return - def find_all_class_properties(): - return + def find_all_class_properties(): + return - def find_class_specific_properties(): - return + def find_class_specific_properties(): + return - def find_class_usages(): - return + def find_class_usages(): + return - def is_node_required(): - return + def is_node_required(): + return - \ No newline at end of file + \ No newline at end of file diff --git a/schematic/schemas/data_model_nodes.py b/schematic/schemas/data_model_nodes.py index c24935933..e9ebd1459 100644 --- a/schematic/schemas/data_model_nodes.py +++ b/schematic/schemas/data_model_nodes.py @@ -1,7 +1,8 @@ class DataModelNodes(): - def __init__(): + def __init__(self): + return - def generate_node(): + def generate_node(self, attribute, relationship): return def edit_node(): diff --git a/schematic/schemas/data_model_parser.py b/schematic/schemas/data_model_parser.py index 485b7c7e8..5bfefae73 100644 --- a/schematic/schemas/data_model_parser.py +++ b/schematic/schemas/data_model_parser.py @@ -77,7 +77,6 @@ def parse_base_model(self): # parse jsonld_parser = DataModelJSONLDParser() base_model = jsonld_parser.parse_jsonld_model(base_model_path) - breakpoint() return base_model def parse_model(self): @@ -86,13 +85,13 @@ def parse_model(self): ''' if self.model_type == 'CSV': csv_parser = DataModelCSVParser() - csv_parser.parse_csv_model(self.path_to_data_model) + model_dict = csv_parser.parse_csv_model(self.path_to_data_model) elif self.model_type == 'JSONLD': jsonld_parser = DataModelJSONLDParser() - jsonld_parser.parse_jsonld_model(self.path_to_data_model) + model_dict = jsonld_parser.parse_jsonld_model(self.path_to_data_model) base_model = self.parse_base_model() - return + return model_dict class DataModelCSVParser(): ''' @@ -165,16 +164,18 @@ def gather_csv_attributes_relationships(self, model_df): attributes = model_df[list(self.required_headers)].to_dict("records") # Build attribute/relationship dictionary - relationship_types = ['Parent', 'DependsOn', 'DependsOn Component'] - #Does not include anything like valid values or properties... - #Need to add these. + relationship_types = list(self.required_headers) + relationship_types.remove("Attribute") attr_rel_dictionary = {} for attr in attributes: attr_rel_dictionary.update({attr['Attribute']: {'Relationships': {}}}) for relationship in relationship_types: if not pd.isnull(attr[relationship]): - rels = attr[relationship].strip().split(',') + if type(attr[relationship]) == bool: + rels = attr[relationship] + else: + rels = attr[relationship].strip().split(',') attr_rel_dictionary[attr['Attribute']]['Relationships'].update({relationship:rels}) return attr_rel_dictionary @@ -199,7 +200,6 @@ def parse_csv_model( model_dict = self.gather_csv_attributes_relationships(model_df) - breakpoint() return model_dict class DataModelJSONLDParser(): @@ -235,7 +235,6 @@ def gather_jsonld_attributes_relationships( ''' model_ids = [v['@id'] for v in model_jsonld] attr_rel_dictionary = {} - breakpoint() # For each entry in the jsonld model for entry in model_jsonld: # Check to see if it has been assigned as a subclass as an attribute or parent. @@ -256,7 +255,6 @@ def gather_jsonld_attributes_relationships( if entry_id not in attr_rel_dictionary.keys(): attr_rel_dictionary.update({entry_id: {'Relationships': {}}}) - for relationship in self.relationship_types.keys(): if relationship in entry.keys(): if entry[relationship] != []: From bf5a7940f49de7eac67f8f8b2d10573fc0a25ca9 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Tue, 14 Mar 2023 11:19:58 -0700 Subject: [PATCH 010/239] WIP: start gather nodes to add --- schematic/schemas/data_model_graph.py | 25 ++++++------ schematic/schemas/data_model_nodes.py | 57 ++++++++++++++++++++++++--- 2 files changed, 65 insertions(+), 17 deletions(-) diff --git a/schematic/schemas/data_model_graph.py b/schematic/schemas/data_model_graph.py index 7b9d72dbb..68ae1304e 100644 --- a/schematic/schemas/data_model_graph.py +++ b/schematic/schemas/data_model_graph.py @@ -12,8 +12,8 @@ from networkx.algorithms.cycles import find_cycle from networkx.readwrite import json_graph -from schematic.schemas.data_model_edges.DataModelEdges import generate_edge, edit_edge -from schematic.schemas.data_model_nodes.DataModelNodes import generate_node, edit_node +from schematic.schemas.data_model_edges import DataModelEdges +from schematic.schemas.data_model_nodes import DataModelNodes from schematic.utils.curie_utils import ( expand_curies_in_schema, @@ -72,6 +72,8 @@ def __init__(self, parsed_data_model): ''' self.data_model = parsed_data_model + self.dmn = DataModelNodes() + self.dme = DataModelEdges() if not self.data_model: raise ValueError( @@ -83,19 +85,20 @@ def generate_data_model_graph(self): '''Generate NetworkX Graph from the Relationships/attributes dictionary ''' - + # Instantiate NetworkX MultiDigraph G = nx.MultiDiGraph() - for attribute, relationships in self.data_model: - node = generate_node(G, attribute, relationship) - - - - + # Add nodes to the graph + ## Find nodes all nodes + all_nodes = self.dmn.gather_all_nodes(self.data_model) + + ## Generate Nodes + for node in all_nodes: + G = self.dmn.generate_node(G, all_nodes, self.data_model) + #node = generate_node(G, attribute, relationship) - data_model_graph = None breakpoint() - return data_model_graph + return G class DataModelGraphExporer(): def __init__(): diff --git a/schematic/schemas/data_model_nodes.py b/schematic/schemas/data_model_nodes.py index e9ebd1459..ce726af3a 100644 --- a/schematic/schemas/data_model_nodes.py +++ b/schematic/schemas/data_model_nodes.py @@ -1,9 +1,54 @@ class DataModelNodes(): - def __init__(self): - return + def __init__(self): + return - def generate_node(self, attribute, relationship): - return + def node_present(self, G, node_name): + if node_name in G.nodes(): + return True + else: + return False - def edit_node(): - return \ No newline at end of file + def gather_nodes(self, attr_info): + """Take in a tuple containing attriute name and relationship dictionary, and find all nodes defined in attribute information. + Args: + + Returns: + list, nodes defined by attribure_info as being related to that attribute. + """ + rel_w_nodes = [ + "Valid Values", + "DependsOn", + "Parent", + "Properties", + "DependsOn Component", + ] + attribute, relationship = attr_info + relationship = relationship['Relationships'] + + nodes = [] + if attribute not in nodes: + nodes.append(attribute) + for rel in rel_w_nodes: + if rel in relationship.keys(): + nodes.extend([node.strip() + for node in relationship[rel]]) + return nodes + + def gather_all_nodes(self, data_model): + """ + + """ + all_nodes = [] + for attr_info in data_model.items(): + nodes = self.gather_nodes(attr_info=attr_info) + all_nodes.extend(nodes) + all_nodes = [*set(all_nodes)] + return all_nodes + + def generate_node(self, G, all_nodes, data_model): + """ + """ + return + + def edit_node(): + return \ No newline at end of file From 264378d7a5914569ca56e13e4ac98435e10c4067 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 15 Mar 2023 15:23:22 -0700 Subject: [PATCH 011/239] WIP: working on adding nodes/edge to data model graph --- schematic/schemas/data_model_edges.py | 3 +- schematic/schemas/data_model_graph.py | 5 +-- schematic/schemas/data_model_nodes.py | 50 ++++++++++++++++++++++++--- 3 files changed, 51 insertions(+), 7 deletions(-) diff --git a/schematic/schemas/data_model_edges.py b/schematic/schemas/data_model_edges.py index 7d0b36ba3..5e97cef63 100644 --- a/schematic/schemas/data_model_edges.py +++ b/schematic/schemas/data_model_edges.py @@ -2,7 +2,8 @@ class DataModelEdges(): def __init__(self): return - def generate_edge(): + def generate_edge(G, attribute, relationship, node_dict): + return def edit_edge(): diff --git a/schematic/schemas/data_model_graph.py b/schematic/schemas/data_model_graph.py index 68ae1304e..00993d3d0 100644 --- a/schematic/schemas/data_model_graph.py +++ b/schematic/schemas/data_model_graph.py @@ -94,8 +94,9 @@ def generate_data_model_graph(self): ## Generate Nodes for node in all_nodes: - G = self.dmn.generate_node(G, all_nodes, self.data_model) - #node = generate_node(G, attribute, relationship) + node_dict = self.dmn.generate_node_dict(node, self.data_model) + G = self.dmn.generate_edges(G, attribute, relationship, node_dict) + G = self.dmn.generate_node(G, node_dict) breakpoint() return G diff --git a/schematic/schemas/data_model_nodes.py b/schematic/schemas/data_model_nodes.py index ce726af3a..621b1ba48 100644 --- a/schematic/schemas/data_model_nodes.py +++ b/schematic/schemas/data_model_nodes.py @@ -1,3 +1,5 @@ +from utils.schema_util import get_property_label_from_display_name, get_class_label_from_display_name + class DataModelNodes(): def __init__(self): return @@ -7,7 +9,7 @@ def node_present(self, G, node_name): return True else: return False - + ''' def gather_nodes(self, attr_info): """Take in a tuple containing attriute name and relationship dictionary, and find all nodes defined in attribute information. Args: @@ -33,6 +35,41 @@ def gather_nodes(self, attr_info): nodes.extend([node.strip() for node in relationship[rel]]) return nodes + ''' + def gather_nodes(self, attr_info): + """Take in a tuple containing attriute name and relationship dictionary, and find all nodes defined in attribute information. + Args: + + Returns: + list, nodes defined by attribure_info as being related to that attribute. + """ + rel_w_nodes = [ + "Valid Values", + "DependsOn", + "Parent", + "Properties", + "DependsOn Component", + ] + attribute, relationship = attr_info + relationship = relationship['Relationships'] + + nodes = {} + node = {} + if attribute not in nodes: + node['display_name'] = attribute + node['label'] = get_class_label_from_display_name(attribute) + nodes[node['label']] = node + for rel in rel_w_nodes: + if rel in relationship.keys(): + for n in relationship[rel]: + if rel == 'Properties': + node['type'] = 'Property' + node['label'] = get_property_label_from_display_name(n.strip()) + else: + node['type'] = 'Class' + node['label'] = get_class_label_from_display_name(n.strip()) + nodes[node['label']] = node + return nodes def gather_all_nodes(self, data_model): """ @@ -45,10 +82,15 @@ def gather_all_nodes(self, data_model): all_nodes = [*set(all_nodes)] return all_nodes - def generate_node(self, G, all_nodes, data_model): + def generate_node_dict(self, node, data_model): + """Gather information to be attached to each node. """ - """ - return + node_dict = {'display_name': node} + breakpoint() + return node_dict + + def generate_node(self, G, node_dict): + return G def edit_node(): return \ No newline at end of file From 9433072437bf635685229df55682be569676f471 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 15 Mar 2023 15:23:59 -0700 Subject: [PATCH 012/239] WIP: add name conversions to utils --- schematic/utils/schema_util.py | 36 ++++++++++++++++++++++++++++++---- 1 file changed, 32 insertions(+), 4 deletions(-) diff --git a/schematic/utils/schema_util.py b/schematic/utils/schema_util.py index f0fff6fa2..8ede62bf9 100644 --- a/schematic/utils/schema_util.py +++ b/schematic/utils/schema_util.py @@ -3,8 +3,36 @@ ''' -def get_property_label_from_display_name(): - return +def get_property_label_from_display_name(display_name, strict_camel_case = False): + """Convert a given display name string into a proper property label string""" + """ + label = ''.join(x.capitalize() or ' ' for x in display_name.split(' ')) + label = label[:1].lower() + label[1:] if label else '' + """ + # This is the newer more strict method + if strict_camel_case: + display_name = display_name.strip().translate({ord(c): "_" for c in string.whitespace}) + label = inflection.camelize(display_name, uppercase_first_letter=False) -def get_class_label_from_display_name(): - return \ No newline at end of file + # This method remains for backwards compatibility + else: + display_name = display_name.translate({ord(c): None for c in string.whitespace}) + label = inflection.camelize(display_name.strip(), uppercase_first_letter=False) + + return label + +def get_class_label_from_display_name(display_name, strict_camel_case = False): + """Convert a given display name string into a proper class label string""" + """ + label = ''.join(x.capitalize() or ' ' for x in display_name.split(' '))""" + # This is the newer more strict method + if strict_camel_case: + display_name = display_name.strip().translate({ord(c): "_" for c in string.whitespace}) + label = inflection.camelize(display_name, uppercase_first_letter=True) + + # This method remains for backwards compatibility + else: + display_name = display_name.translate({ord(c): None for c in string.whitespace}) + label = inflection.camelize(display_name.strip(), uppercase_first_letter=True) + + return label \ No newline at end of file From 7fa2b36d6ea9e8afae3115adcf5808b3df8a5944 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Thu, 25 May 2023 13:37:13 -0700 Subject: [PATCH 013/239] WIP: work in adding functionality to create JSONLD, rework structure of relationships --- schematic/schemas/commands.py | 40 +++ schematic/schemas/data_model_edges.py | 31 +- schematic/schemas/data_model_graph.py | 378 +++++++++++++++++++--- schematic/schemas/data_model_jsonld.py | 277 +++++++++++++--- schematic/schemas/data_model_nodes.py | 171 +++++++--- schematic/schemas/data_model_parser.py | 102 +++--- schematic/schemas/data_model_validator.py | 77 +++-- schematic/utils/schema_util.py | 26 +- 8 files changed, 898 insertions(+), 204 deletions(-) diff --git a/schematic/schemas/commands.py b/schematic/schemas/commands.py index b8fb1c7da..91894fecf 100644 --- a/schematic/schemas/commands.py +++ b/schematic/schemas/commands.py @@ -8,7 +8,11 @@ from schematic.schemas.data_model_parser import DataModelParser from schematic.schemas.data_model_graph import DataModelGraph +from schematic.schemas.data_model_validator import DataModelValidator +from schematic.schemas.data_model_jsonld import DataModelJsonLD, convert_graph_to_jsonld + from schematic.utils.cli_utils import query_dict +from schematic.utils.schema_util import export_schema from schematic.help import schema_commands logger = logging.getLogger(__name__) @@ -62,12 +66,48 @@ def convert(schema, base_schema, output_jsonld): #Parse Model parsed_data_model = data_model_parser.parse_model() + breakpoint() + # Convert parsed model to graph # Instantiate DataModelGraph data_model_grapher = DataModelGraph(parsed_data_model) graph_data_model = data_model_grapher.generate_data_model_graph() + # Validate generated data model. + data_model_validator = DataModelValidator(data_model=graph_data_model) + data_model_errors = data_model_validator.run_checks() + + # If there are errors log them. + if data_model_errors: + for err in data_model_errors: + if isinstance(err, str): + logger.error(err) + elif isinstance(err, list): + for e in err: + logger.error(e) + # Actually raise error here with message. + + #data_model_jsonld_converter = DataModelJsonLD() + jsonld_data_model = convert_graph_to_jsonld(Graph=graph_data_model) + + # output JSON-LD file alongside CSV file by default + if output_jsonld is None: + csv_no_ext = re.sub("[.]csv$", "", schema) + output_jsonld = csv_no_ext + ".jsonld" + + logger.info( + "By default, the JSON-LD output will be stored alongside the first " + f"input CSV file. In this case, it will appear here: '{output_jsonld}'. " + "You can use the `--output_jsonld` argument to specify another file path." + ) + + # saving updated schema.org schema + try: + export_schema(jsonld_data_model, output_jsonld) + click.echo(f"The Data Model was created and saved to '{output_jsonld}' location.") + except: + click.echo(f"The Data Model could not be created by using '{output_jsonld}' location. Please check your file path again") ''' # convert RFC to Data Model diff --git a/schematic/schemas/data_model_edges.py b/schematic/schemas/data_model_edges.py index 5e97cef63..9cf8e4adc 100644 --- a/schematic/schemas/data_model_edges.py +++ b/schematic/schemas/data_model_edges.py @@ -1,10 +1,27 @@ class DataModelEdges(): - def __init__(self): - return + def __init__(self): + return - def generate_edge(G, attribute, relationship, node_dict): - - return + def generate_edge(self, G, node, all_node_dict, data_model, edge_relationships): + """ + Args: - def edit_edge(): - return \ No newline at end of file + Returns: + + Comment: + How do we best capture all the relationships we will be accounting for? + """ + for attribute_display_name, relationship in data_model.items(): + relationships = relationship['Relationships'] + + for key, val in edge_relationships.items(): + # For each relationship we are interested in + if key in relationships.keys(): + if node in relationships[key] and node != attribute_display_name: + print('Creating edge with node ' + node + ' and attribute ' + attribute_display_name) + G.add_edge(all_node_dict[node]['label'], all_node_dict[attribute_display_name]['label'], key=val) + + return G + + def edit_edge(): + return \ No newline at end of file diff --git a/schematic/schemas/data_model_graph.py b/schematic/schemas/data_model_graph.py index 00993d3d0..ca7536375 100644 --- a/schematic/schemas/data_model_graph.py +++ b/schematic/schemas/data_model_graph.py @@ -15,19 +15,19 @@ from schematic.schemas.data_model_edges import DataModelEdges from schematic.schemas.data_model_nodes import DataModelNodes +from schematic.schemas.data_model_relationships import ( + DataModelRelationships + ) + from schematic.utils.curie_utils import ( expand_curies_in_schema, uri2label, extract_name_from_uri_or_curie, ) from schematic.utils.general import find_duplicates + from schematic.utils.io_utils import load_default, load_json, load_schemaorg -from schematic.utils.schema_utils import ( - load_schema_into_networkx, - node_attrs_cleanup, - class_to_node, - relationship_edges, -) +from schematic.utils.schema_util import get_property_label_from_display_name, get_class_label_from_display_name from schematic.utils.general import dict2list, unlist from schematic.utils.viz_utils import visualize from schematic.utils.validate_utils import ( @@ -67,6 +67,7 @@ class DataModelGraph(): Create a singleton. ''' __metaclass__ = DataModelGraphMeta + def __init__(self, parsed_data_model): '''Load parsed data model. ''' @@ -74,6 +75,7 @@ def __init__(self, parsed_data_model): self.data_model = parsed_data_model self.dmn = DataModelNodes() self.dme = DataModelEdges() + self.data_model_relationships = DataModelRelationships() if not self.data_model: raise ValueError( @@ -85,69 +87,373 @@ def generate_data_model_graph(self): '''Generate NetworkX Graph from the Relationships/attributes dictionary ''' + # Get all relationships with edges + edge_relationships = self.data_model_relationships.define_edge_relationships() + # Instantiate NetworkX MultiDigraph G = nx.MultiDiGraph() - # Add nodes to the graph - ## Find nodes all nodes + # Find all nodes all_nodes = self.dmn.gather_all_nodes(self.data_model) - ## Generate Nodes + all_node_dict = {} + ## Fill in MultiDigraph with nodes and edges for node in all_nodes: + + # Gather information for each node node_dict = self.dmn.generate_node_dict(node, self.data_model) - G = self.dmn.generate_edges(G, attribute, relationship, node_dict) + + all_node_dict[node] = node_dict + # Generate node and attach information G = self.dmn.generate_node(G, node_dict) - breakpoint() + for node in all_nodes: + # Generate edges + G = self.dme.generate_edge(G, node, all_node_dict, self.data_model, edge_relationships) return G class DataModelGraphExporer(): - def __init__(): + def __init__(self, + G,): ''' Load data model graph as a singleton. ''' - #self.data_model_graph = DataModelGraph.generate_data_model_graph(data_model) + self.graph = G + + def find_properties(self): + properties=[] + for node_1, node_2, rel in self.graph.edges: + if rel == 'domainIncludes': + properties.append(node_1) + properties = set(properties) + return properties + + def find_classes(self): + nodes = self.graph.nodes + properties = self.find_properties() + classes = nodes - properties + return classes + + def get_adjacent_nodes_by_relationship(self, + node: str, + relationship: str) -> List[str]: + """Get a list of nodes that is / are adjacent to a given node, based on a relationship type. + + Args: + node: the node whose edges we need to look at. + relationship: the type of link(s) that the above node and its immediate neighbors share. + + Returns: + List of nodes that are adjacent to the given node. + """ + nodes = set() - def get_adjacent_nodes_by_relationship(): - return + for (u, v, key, c) in self.graph.out_edges(node, data=True, keys=True): + if key == relationship: + nodes.add(v) - def get_component_requirements(): - return + return list(nodes) - def get_component_requirements_graph(): - return + def get_component_requirements(self, + source_component: str, + requires_component_relationship: str = "requiresComponent") -> List[str]: + """Get all components that are associated with a given source component and are required by it. - def get_descendants_by_edge_type(): - return + Args: + source_component: source component for which we need to find all required downstream components. - def get_digraph_by_edge_type(): - return + Returns: + List of nodes that are descendants from the source component are are related to the source through a specific component relationship. + """ - def get_edges_by_relationship(): - return + req_components = list( + reversed( + self.get_descendants_by_edge_type( + source_component, requires_component_relationship, ordered=True + ) + ) + ) - def get_node_definition(): - return + return req_components - def get_node_dependencies(): - return + def get_component_requirements_graph(self, + source_component: str, + requires_component_relationship: str = "requiresComponent") -> nx.DiGraph: + """Get all components that are associated with a given source component and are required by it; return the components as a dependency graph (i.e. a DAG). - def get_node_label(): - return + Args: + source_component: source component for which we need to find all required downstream components. + + Returns: + A subgraph of the schema graph induced on nodes that are descendants from the source component and are related to the source through a specific component relationship. + """ + + # get a list of required component nodes + req_components = self.get_component_requirements(source_component) + + # get the subgraph induced on required component nodes + req_components_graph = self.get_subgraph_by_edge_type( + self.mm_graph, requires_component_relationship + ).subgraph(req_components) + + return req_components_graph + + def get_descendants_by_edge_type(self, + source_node: str, + relationship: str, + connected: bool = True, + ordered: bool = False, + ) -> List[str]: + """Get all nodes that are descendants of a given source node, based on a specific type of edge / relationship type. + + Args: + source_node: The node whose descendants need to be retreived. + relationship: Edge / link relationship type with possible values same as in above docs. + connected: If True, we need to ensure that all descendant nodes are reachable from the source node, i.e., they are part of the same connected component. + If False, the descendants could be in multiple connected components. + Default value is True. + ordered: If True, the list of descendants will be topologically ordered. + If False, the list has no particular order (depends on the order in which the descendats were traversed in the subgraph). + + Returns: + List of nodes that are descendants from a particular node (sorted / unsorted) + """ + + root_descendants = nx.descendants(self.mm_graph, source_node) + + subgraph_nodes = list(root_descendants) + subgraph_nodes.append(source_node) + descendants_subgraph = self.mm_graph.subgraph(subgraph_nodes) + + # prune the descendants subgraph so as to include only those edges that match the relationship type + rel_edges = [] + for (u, v, key, c) in descendants_subgraph.edges(data=True, keys=True): + if key == relationship: + rel_edges.append((u, v)) + + relationship_subgraph = nx.DiGraph() + relationship_subgraph.add_edges_from(rel_edges) + + descendants = relationship_subgraph.nodes() + + if not descendants: + # return empty list if there are no nodes that are reachable from the source node based on this relationship type + return [] + + if connected and ordered: + # get the set of reachable nodes from the source node + descendants = nx.descendants(relationship_subgraph, source_node) + descendants.add(source_node) + + # normally, the descendants from a node are unordered (peculiarity of nx descendants call) + # form the subgraph on descendants and order it topologically + # this assumes an acyclic subgraph + descendants = nx.topological_sort( + relationship_subgraph.subgraph(descendants) + ) + elif connected: + # get the nodes that are reachable from a given source node + # after the pruning process above some nodes in the root_descendants subgraph might have become disconnected and will be omitted + descendants = nx.descendants(relationship_subgraph, source_node) + descendants.add(source_node) + elif ordered: + # sort the nodes topologically + # this requires the graph to be an acyclic graph + descendants = nx.topological_sort(relationship_subgraph) + + return list(descendants) + + def get_digraph_by_edge_type(self): + + digraph = nx.DiGraph() + for (u, v, key, c) in self.mm_graph.edges(data=True, keys=True): + if key == edge_type: + digraph.add_edge(u, v) + + return digraph + + def get_edges_by_relationship(self, + class_label: str, + relationship: str, + ) -> List[str]: + """Get a list of out-edges of a node where the edges match a specifc type of relationship. + + i.e., the edges connecting a node to its neighbors are of relationship type -- "parentOf" (set of edges to children / sub-class nodes). + Note: possible edge relationships are -- parentOf, rangeValue, requiresDependency. + + Args: + node: the node whose edges we need to look at. + relationship: the type of link(s) that the above node and its immediate neighbors share. + + Returns: + List of edges that are connected to the node. + """ + edges = [] + + for (u, v, key, c) in self.mm_graph.out_edges(node, data=True, keys=True): + if key == relationship: + edges.append((u, v)) + + return edges + + def get_node_definition(self, node_display_name: str) -> str: + """Get the node definition, i.e., the "comment" associated with a given node display name. + + Args: + node_display_name: Display name of the node which you want to get the label for. + + Returns: + Comment associated with node, as a string. + """ + node_label = self.get_node_label(node_display_name) + + if not node_label: + return "" + + node_definition = self.mm_graph.nodes[node_label]["comment"] + return node_definition + + + def get_node_dependencies(self, + source_node: str, + display_names: bool = True, + schema_ordered: bool = True, + requires_dependency_relationship: str = "requiresDependency", + ) -> List[str]: + """Get the immediate dependencies that are related to a given source node. + + Args: + source_node: The node whose dependencies we need to compute. + display_names: if True, return list of display names of each of the dependencies. + if False, return list of node labels of each of the dependencies. + schema_ordered: if True, return the dependencies of the node following the order of the schema (slower). + if False, return dependencies from graph without guaranteeing schema order (faster) - def find_adjacent_child_classes(): + Returns: + List of nodes that are dependent on the source node. + """ + + # NOTE might not be necessary to move through explore_class in this refactored version. + if schema_ordered: + # get dependencies in the same order in which they are defined in the schema + required_dependencies = self.explore_class(source_node)["dependencies"] + else: + required_dependencies = self.get_adjacent_nodes_by_relationship( + source_node, self.requires_dependency_relationship + ) + + if display_names: + # get display names of dependencies + dependencies_display_names = [] + + for req in required_dependencies: + dependencies_display_names.append(self.mm_graph.nodes[req]["displayName"]) + + return dependencies_display_names + + return required_dependencies + + def get_node_label(self, node_display_name: str) -> str: + """Get the node label for a given display name. + + Args: + node_display_name: Display name of the node which you want to get the label for. + + Returns: + Node label associated with given node. + + Raises: + KeyError: If the node cannot be found in the graph. + """ + + node_class_label = SchemaUtils.get_class_label_from_display_name(node_display_name) + node_property_label = SchemaUtils.get_property_label_from_display_name( + node_display_name + ) + + if node_class_label in self.mm_graph.nodes: + node_label = node_class_label + elif node_property_label in self.mm_graph.nodes: + node_label = node_property_label + else: + node_label = "" + + return node_label + + def find_adjacent_child_classes(self, schema_class): + + return self.get_adjacent_nodes_by_relationship(schema_class, "parentOf") + + def find_all_class_properties(self): + """ + does not seem used. do not transfer now. + """ + breakpoint() return - def find_all_class_properties(): + def find_class_specific_properties(self, schema_class): + """Find properties specifically associated with a given class""" + + #This is called directly from the API + # Needs to be refactored no longer be JSONLD specific + + breakpoint() + schema_uri = self.mm_graph.nodes[schema_class]["uri"] + properties = [] + for record in self.schema["@graph"]: + if record["@type"] == "rdf:Property": + if ( + type(record["schema:domainIncludes"]) == dict + and record["schema:domainIncludes"]["@id"] == schema_uri + ): + properties.append(record["rdfs:label"]) + elif ( + type(record["schema:domainIncludes"]) == list + and [ + item + for item in record["schema:domainIncludes"] + if item["@id"] == schema_uri + ] + != [] + ): + properties.append(record["rdfs:label"]) + return properties return - def find_class_specific_properties(): + def find_class_usages(self): + """ + Does not look used, do not transfer for now. + """ return - def find_class_usages(): + def is_node_required(self, node_display_name: str) -> bool: + """Check if a given node is required or not. + + Note: The possible options that a node can be associated with -- "required" / "optional". + + Args: + node_display_name: Display name of the node which you want to get the label for. + + Returns: + True: If the given node is a "required" node. + False: If the given node is not a "required" (i.e., an "optional") node. + """ + node_label = self.get_node_label(node_display_name) + + node_required = self.mm_graph.nodes[node_label]["required"] + + return node_required + + def explore_class(self): + """ + nx specific version of this? This might not be necessary since each nx node should already contain all required information. + Put this here for now as a dummy function so this can be explored more. + """ + breakpoint() return - def is_node_required(): + def explore_property(self): + breakpoint() return \ No newline at end of file diff --git a/schematic/schemas/data_model_jsonld.py b/schematic/schemas/data_model_jsonld.py index 6a9bf6bcb..18bc72ee8 100644 --- a/schematic/schemas/data_model_jsonld.py +++ b/schematic/schemas/data_model_jsonld.py @@ -1,59 +1,250 @@ +from functools import wraps +from typing import Any, Dict, Optional, Text, List +import networkx as nx + +from schematic.schemas.data_model_graph import DataModelGraphExporer +from schematic.schemas.data_model_relationships import DataModelRelationships + + +class DataModelJsonLD(object): + ''' + #Interface to JSONLD_object + ''' + + def __init__(self, Graph: nx.MultiDiGraph): + # Setup + self.graph = Graph + self.dmr = DataModelRelationships() + ''' + self.jsonld_object = JSONLD_object(DataModelJsonLD) + self.jsonld_class = JSONLD_class(self.jsonld_object) + self.jsonld_property = JSONLD_property(self.jsonld_object) + ''' + self.DME = DataModelGraphExporer(self.graph) + + + def base_jsonld_template(self): + """ + #Base starter template, to be filled out with model. For entire file. + TODO: when done adding contexts fill out this section here. + """ + base_template = {"@context": {}, + "@graph": [], + "@id": "http://schema.biothings.io/#0.1", + } + return base_template + + def add_contexts(self): + breakpoint() + return + + def create_object(self, template, node): + """ + create a single JSONLD object per node + Use the relationship dictionary + """ + data_model_relationships = self.dmr.relationships_dictionary + + template_keys = list(template.keys()) + template_keys.remove('@type') + + # For each field in template fill out with information from the graph + for jsonld_key in template_keys: + + # Get column name linked to node. Need to do this now bc of relationship_dict structure + node_column_name = list(data_model_relationships[jsonld_key].keys())[0] + + # Fill edge information (done per edge type) + + if data_model_relationships[jsonld_key][node_column_name]['edge_rel']: + for node_1, node_2, rel in self.graph.edges: + key_context, key_rel = jsonld_key.split(':') + if rel == key_rel: + if rel in ['domainIncludes', 'subClassOf']: + if node_1 == node: + # use display names for the nodes + node_2_id = {'@id': 'context:'+node_2} + if isinstance(template[jsonld_key], list): + # TODO Format ids properly in future to take in proper context + template[jsonld_key].append(node_2_id) + else: + template[jsonld_key] == node_2 + else: + if node_2 == node: + # use display names for the nodes + node_1_id = {'@id': 'context:'+node_1} + if isinstance(template[jsonld_key], list): + # TODO Format ids properly in future to take in proper context + template[jsonld_key].append(node_1_id) + else: + template[jsonld_key] == node_1 + + + # Fill node information + else: + #if 'node_dict' in data_model_relationships[key][node_column_name].keys(): + # attribute here refers to node attibutes (come up with better name.) + node_attribute_name = list(data_model_relationships[jsonld_key][node_column_name]['node_dict'].keys())[0] + + # Get recorded info for current node, and the attribute type + node_info = nx.get_node_attributes(self.graph, node_attribute_name)[node] + + # Add this information to the template + template[jsonld_key] = node_info + return template + + def property_template(self): + ''' + TODO: Create this from relationship class + ''' + # Default required to False but add validation for this in the future. + # Only allowing a single class type, other models could have lists. + + # Domain includes needs to pull a dict id {'@id': 'mutations'} + + property_template = { + "@id": "", + "@type": "rdf:Property", + "rdfs:comment": "", + "rdfs:label": "", + "schema:domainIncludes": [], + "schema:rangeIncludes": [], + "schema:isPartOf": {}, + "sms:displayName": "", + "sms:required": False, + "sms:validationRules": [], + } + return property_template + + def class_template(self): + """ + Only allowing a single class type, other models could have lists. + """ + class_template = { + "@id": "", + "@type": "rdfs:Class", + "rdfs:comment": "", + "rdfs:label": "", + "rdfs:subClassOf": [], + "schema:isPartOf": {}, + "schema:rangeIncludes": [], + "sms:displayName": "", + "sms:required": False, + "sms:validationRules": [], + } + return class_template + + + def generate_jsonld_object(self): + ''' + #Will call JSONLD_object class to create properties and classes in the process. + ''' + + # Get properties. + properties = self.DME.find_properties() + #classes = self.DME.find_classes() + + # Get JSONLD Template + self.json_ld_object = self.base_jsonld_template() + + # Iterativly add graph nodes to json_ld_object as properties and classes + for node in self.graph.nodes: + if node in properties: + obj = self.create_object(template = self.property_template(), node = node) + else: + obj = self.create_object(template = self.class_template(), node = node) + self.json_ld_object['@graph'].append(obj) + return self.json_ld_object + +""" class DataModelJsonLD(object): - ''' - Interface to JSONLD_object - ''' + ''' + #Interface to JSONLD_object + ''' + + def __init__(self, Graph: nx.MultiDiGraph): + # Setup + self.graph = Graph + self.jsonld_object = JSONLD_object(DataModelJsonLD) + self.jsonld_class = JSONLD_class(self.jsonld_object) + self.jsonld_property = JSONLD_property(self.jsonld_object) + self.DME = DataModelGraphExporer(self.graph) + + def generate_jsonld_object(self): + ''' + #Will call JSONLD_object class to create properties and classes in the process. + ''' + + # Get properties and classes. + properties = self.DME.find_properties() + classes = self.DME.find_classes() - def __init__(data_model_graph): + # Get JSONLD Template + template = JSONLD_object + base + # Generate properties and classes and add to the template. - def generate_data_model_jsonld(self): - ''' - Will call JSONLD_object class to create properties and classes in the process. - ''' - pass + return -class JSONLD_object(): - ''' - Decorator class design - Base decorator class. - ''' - _template: template = None + def base_jsonld_template(self): + ''' + #Base starter template, to be filled out with model. + ''' + return - def __init__(self, to_template) -> None: - self.to_template() +class JSONLD_object(DataModelJsonLD): + ''' + #Decorator class design + #Base decorator class. + ''' + _DataModelJsonLD: DataModelJsonLD = None - def _create_template(self): - ''' - Returns jsonld_class_template or jsonld_property_template - ''' - return self._template + def __init__(self, DataModelJsonLD) -> None: + self.dataModelJsonLD = DataModelJsonLD - @property - def to_template(self): - return self._template.to_template() + def _create_template(self) -> DataModelJsonLD: + ''' + Returns jsonld_class_template or jsonld_property_template + ''' + return self._DataModelJsonLD + + @property + def to_template(self): + return self._DataModelJsonLD.to_template() + + class JSONLD_property(JSONLD_object): - ''' - Property Decorator - ''' - def to_template(self) - return JSONLD_property(self._template.to_template()) + ''' + Property Decorator + ''' + def to_template(self): + return JSONLD_property(self._DataModelJsonLD.to_template()) - def explore_property(): - return + def explore_property(): + return - def edit_property(): - return + def edit_property(): + return class JSONLD_class(JSONLD_object): - ''' - Class Decorator - ''' - def to_template(self) - return JSONLD_class(self._template.to_template()) + ''' + Class Decorator + ''' + def to_template(self): + return JSONLD_class(self._DataModelJsonLD.to_template()) + + def explore_class(): + return - def explore_class(): - return + def edit_class(): + return +""" +def convert_graph_to_jsonld(Graph): + # Make the JSONLD object + data_model_jsonld_converter = DataModelJsonLD(Graph=Graph) + jsonld_dm = data_model_jsonld_converter.generate_jsonld_object() + + return jsonld_dm - def edit_class(): - return diff --git a/schematic/schemas/data_model_nodes.py b/schematic/schemas/data_model_nodes.py index 621b1ba48..d695b82cf 100644 --- a/schematic/schemas/data_model_nodes.py +++ b/schematic/schemas/data_model_nodes.py @@ -1,7 +1,23 @@ -from utils.schema_util import get_property_label_from_display_name, get_class_label_from_display_name +from inspect import isfunction +from rdflib import Namespace + +from schematic.schemas.data_model_relationships import ( + DataModelRelationships + ) + +from schematic.utils.schema_util import get_property_label_from_display_name, get_class_label_from_display_name, get_display_name_from_label +from schematic.utils.validate_rules_utils import validate_schema_rules +from schematic.schemas.curie import uri2curie, curie2uri + class DataModelNodes(): def __init__(self): + self.namespaces = dict(rdf=Namespace("http://www.w3.org/1999/02/22-rdf-syntax-ns#")) + self.data_model_relationships = DataModelRelationships() + self.value_relationships = self.data_model_relationships.define_value_relationships() + self.edge_relationships_dictionary = self.data_model_relationships.define_edge_relationships() + + return def node_present(self, G, node_name): @@ -9,71 +25,34 @@ def node_present(self, G, node_name): return True else: return False - ''' + def gather_nodes(self, attr_info): """Take in a tuple containing attriute name and relationship dictionary, and find all nodes defined in attribute information. Args: Returns: - list, nodes defined by attribure_info as being related to that attribute. + list, nodes defined by attribute_info as being related to that attribute. """ - rel_w_nodes = [ - "Valid Values", - "DependsOn", - "Parent", - "Properties", - "DependsOn Component", - ] + # retrieve a list of relationship types that will produce nodes. + self.node_relationships =list(self.edge_relationships_dictionary.keys()) + attribute, relationship = attr_info relationship = relationship['Relationships'] nodes = [] if attribute not in nodes: nodes.append(attribute) - for rel in rel_w_nodes: + for rel in self.node_relationships: if rel in relationship.keys(): nodes.extend([node.strip() for node in relationship[rel]]) return nodes - ''' - def gather_nodes(self, attr_info): - """Take in a tuple containing attriute name and relationship dictionary, and find all nodes defined in attribute information. - Args: - - Returns: - list, nodes defined by attribure_info as being related to that attribute. - """ - rel_w_nodes = [ - "Valid Values", - "DependsOn", - "Parent", - "Properties", - "DependsOn Component", - ] - attribute, relationship = attr_info - relationship = relationship['Relationships'] - - nodes = {} - node = {} - if attribute not in nodes: - node['display_name'] = attribute - node['label'] = get_class_label_from_display_name(attribute) - nodes[node['label']] = node - for rel in rel_w_nodes: - if rel in relationship.keys(): - for n in relationship[rel]: - if rel == 'Properties': - node['type'] = 'Property' - node['label'] = get_property_label_from_display_name(n.strip()) - else: - node['type'] = 'Class' - node['label'] = get_class_label_from_display_name(n.strip()) - nodes[node['label']] = node - return nodes def gather_all_nodes(self, data_model): """ + Args: + Returns: """ all_nodes = [] for attr_info in data_model.items(): @@ -82,14 +61,108 @@ def gather_all_nodes(self, data_model): all_nodes = [*set(all_nodes)] return all_nodes - def generate_node_dict(self, node, data_model): + def get_rel_default_info(self, relationship): + """ + For each display name fill out defaults. Maybe skip default. + """ + for k,v in self.data_model_relationships.relationships_dictionary.items(): + for key, value in v.items(): + if key == relationship: + if 'node_dict' in value.keys(): + rel_key = list(value['node_dict'].keys())[0] + rel_default = value['node_dict'][rel_key] + return rel_key, rel_default + + def run_rel_functions(self, rel_func, node_display_name='', attr_relationships={}): + ''' This function exists to centralzie handling of functions for filling out node information. + TODO: and an ending else statement to alert to no func being caught. + ''' + func_output = '' + + if rel_func == get_display_name_from_label: + func_output = get_display_name_from_label(node_display_name, attr_relationships) + elif rel_func == get_class_label_from_display_name: + func_output = get_class_label_from_display_name(node_display_name) + elif rel_func == get_property_label_from_display_name: + func_output = get_property_label_from_display_name(node_display_name) + elif rel_func == uri2curie: + func_output = uri2curie(node_display_name, self.namespaces) + return func_output + + def generate_node_dict(self, node_display_name, data_model): """Gather information to be attached to each node. + Args: + node_display_name: display name for current node + data_model: + + Returns: + node_dict + Note: + If the default calls function, call that function for the default or alternate implementation. + May need to update this logic for varying function calls. (for example the current function takes in the node display name + ould need to update if new function took in something else.) """ - node_dict = {'display_name': node} - breakpoint() + + # Strip whitespace from node display name + node_display_name = node_display_name.strip() + + # If the node is an attribute, find its relationships. + attr_relationships = {} + if node_display_name in data_model.keys(): + attr_relationships = data_model[node_display_name]['Relationships'] + + # Initialize node_dict + node_dict = {} + + # Look through relationship types that represent values (i.e. do not define edges) + for k, v in self.value_relationships.items(): + # Get key and defalt values current relationship type. + rel_key, rel_default = self.get_rel_default_info(k) + + # If we have information to add about this particular node + if attr_relationships and k in attr_relationships.keys(): + # Check if the default specifies calling a function. + if type(rel_default) == dict and 'default' in rel_default.keys() and isfunction(rel_default['default']): + # Add to node_dict The value comes from the standard function call. + # TODO UPDATE TO USE FUNCTION FUNCTION + #breakpoint() + node_dict.update({rel_key: self.run_rel_functions(rel_default['standard'], node_display_name, attr_relationships)}) + ''' + try: + node_dict.update({rel_key: rel_default['standard'](node_display_name)}) + except: + node_dict.update({rel_key: rel_default['standard'](node_display_name, self.namespaces)}) + ''' + else: + # For standard entries, get information from attr_relationship dictionary + node_dict.update({rel_key: attr_relationships[k]}) + # else, add default values + else: + # Check if the default specifies calling a function. + if type(rel_default) == dict and 'default' in rel_default.keys() and isfunction(rel_default['default']): + #breakpoint() + node_dict.update({rel_key: self.run_rel_functions(rel_default['default'], node_display_name, attr_relationships)}) + + # Add to node_dict. The value comes from the standard function call. + # TODO UPDATE TO USE FUNCTION FUNCTION + ''' + try: + node_dict.update({rel_key: rel_default['default'](node_display_name)}) + except: + node_dict.update({rel_key: rel_default['default'](node_display_name, self.namespaces)}) + ''' + else: + # Set value to defaults. + node_dict.update({rel_key: rel_default}) return node_dict def generate_node(self, G, node_dict): + """ + Args: + + Returns: + """ + G.add_node(node_dict['label'], **node_dict) return G def edit_node(): diff --git a/schematic/schemas/data_model_parser.py b/schematic/schemas/data_model_parser.py index 5bfefae73..9fc996882 100644 --- a/schematic/schemas/data_model_parser.py +++ b/schematic/schemas/data_model_parser.py @@ -8,6 +8,10 @@ from schematic.utils.df_utils import load_df from schematic.utils.io_utils import load_json +from schematic.schemas.data_model_relationships import ( + DataModelRelationships + ) + from schematic import LOADER logger = logging.getLogger(__name__) @@ -21,6 +25,7 @@ class DataModelParser(): may be added in the future. TODO: + Change all naming to fit what we will be using with the graph later. Dictionary in data_model_edges. Make sure to build with namespace contexts in mind! @@ -101,21 +106,9 @@ class DataModelCSVParser(): def __init__( self ): + self.dmr = DataModelRelationships() + self.required_headers = self.dmr.define_required_csv_headers() - self.required_headers = set( - [ - "Attribute", - "Description", - "Valid Values", - "DependsOn", - "Required", - "Parent", - "Properties", - "DependsOn Component", - "Source", - "Validation Rules", - ] - ) def check_schema_definition(self, model_df: pd.DataFrame) -> bool: @@ -129,7 +122,7 @@ def check_schema_definition(self, model_df: pd.DataFrame) -> bool: Raises: Exception """ try: - if self.required_headers.issubset(set(list(model_df.columns))): + if set(self.required_headers).issubset(set(list(model_df.columns))): return elif "Requires" in list(model_df.columns) or "Requires Component" in list( model_df.columns @@ -141,6 +134,7 @@ def check_schema_definition(self, model_df: pd.DataFrame) -> bool: ) logger.debug("Schema definition csv ready for processing!") except: + breakpoint() raise ValueError( f"Schema extension headers: {set(list(model_df.columns))} " f"do not match required schema headers: {self.required_headers}" @@ -158,26 +152,57 @@ def gather_csv_attributes_relationships(self, model_df): # Check csv schema follows expectations. self.check_schema_definition(model_df) + # Load relationships dictionary. + self.rel_dict = self.dmr.define_data_model_relationships() + # Get the type for each value that needs to be submitted. + self.rel_val_types = {key:val['type']for k, v in self.rel_dict.items() for key, val in v.items() if 'type' in val.keys()} + #load into format that can be read by validator.py # get attributes from Attribute column attributes = model_df[list(self.required_headers)].to_dict("records") # Build attribute/relationship dictionary - relationship_types = list(self.required_headers) - relationship_types.remove("Attribute") - + relationship_types = self.required_headers + #relationship_types.remove("Attribute") + + # TODO: using an attr_rel_dictionary will strip the order that attributes were submitted from + # the user. Will need to account for ordering later so the JSONLD fields are in the correct order. + # This will ensure the manifest dependencies are in the correct order. + # For now, just record order with a counter. + position = 0 attr_rel_dictionary = {} for attr in attributes: - attr_rel_dictionary.update({attr['Attribute']: {'Relationships': {}}}) + # For each attribute, record its position in the data model and its relationships. + + attr_rel_dictionary.update({ + attr['Attribute']: { + 'Position': position, + 'Relationships': {}, + }, + + } + ) for relationship in relationship_types: + rel_val_type = self.rel_val_types[relationship] if not pd.isnull(attr[relationship]): - if type(attr[relationship]) == bool: + # Fill in relationships based on type: + # TODO Look for matching types and handle non matches in function. + # Add boolean type value + if rel_val_type == bool and type(attr[relationship]) == bool: rels = attr[relationship] + # Add other value types and adjust as needed. else: + # Move strings to list if they are comma separated. + # Order from CSV is preserved here. rels = attr[relationship].strip().split(',') + rels = [r.strip() for r in rels] + # Extract string from list if necessary. + # TODO Catch situation where len does not equal 1. Throw error. + if rel_val_type == str and len(rels) == 1: + rels = rels[0] attr_rel_dictionary[attr['Attribute']]['Relationships'].update({relationship:rels}) - + position += 1 return attr_rel_dictionary @@ -207,24 +232,10 @@ def __init__( self, ): ''' - Does not include anything like valid values or properties... - Need to add these. ''' - # Do not pull in label in this step so it can be determined - # later by our program to allow consisency. - - self.relationship_types = { - 'sms:requiresDependency': 'DependsOn', - 'sms:requiresComponent': 'DependsOn Component', - 'rdfs:subClassOf': 'Parent', - 'sms:validationRules': 'Validation Rules', - 'schema:rangeIncludes': 'Valid Values', - 'rdfs:comment': 'Description', - 'sms:required': 'Required'} - - + self.data_model_relationships = DataModelRelationships() def gather_jsonld_attributes_relationships( self, @@ -232,8 +243,11 @@ def gather_jsonld_attributes_relationships( ''' Note: unlike a CSV the JSONLD might already have the biothings schema attached to it. So the output may not initially look identical. + TODO Check relationship attribute types like in CSV + + Make sure we can take in list of types. ''' - model_ids = [v['@id'] for v in model_jsonld] + model_ids = [v['rdfs:label'] for v in model_jsonld] attr_rel_dictionary = {} # For each entry in the jsonld model for entry in model_jsonld: @@ -241,30 +255,32 @@ def gather_jsonld_attributes_relationships( if 'rdfs:subClassOf' in entry.keys(): # Checking if subclass type is list, actually gets rid of Biothings. + # TODO: Allow biothings in future. if type(entry['rdfs:subClassOf']) == list: # Determine if the id the entry has been assigned as a sublcass of is also recoreded # as a model id. If it is, then the entry is not an attribute itself, but a valid value. - subclass_id = entry['rdfs:subClassOf'][0]['@id'] + subclass_id = entry['rdfs:subClassOf'][0]['rdfs:label'] if not subclass_id in model_ids: - # Get the id of the entry - entry_id = entry['@id'].split(':')[1] + # Get the label of the entry + ## To allow for contexts split by the delimiter + entry_id = entry['rdfs:label'].split(':')[1] # If the entry is an attribute that has not already been added to the dictionary, add it. if entry_id not in attr_rel_dictionary.keys(): attr_rel_dictionary.update({entry_id: {'Relationships': {}}}) - for relationship in self.relationship_types.keys(): + for relationship in self.data_model_relationships.keys(): if relationship in entry.keys(): if entry[relationship] != []: if type(entry[relationship][0]) == dict: - rels = [r['@id'].split(':')[1] for r in entry[relationship]] + rels = [r['rdfs:label'].split(':')[1] for r in entry[relationship]] else: rels = entry[relationship] attr_rel_dictionary[ entry_id]['Relationships'].update( - {self.relationship_types[relationship]:rels}) + {k: rels for k in self.data_model_relationships[relationship].keys()}) return attr_rel_dictionary diff --git a/schematic/schemas/data_model_validator.py b/schematic/schemas/data_model_validator.py index 1277f905f..7c9c58c58 100644 --- a/schematic/schemas/data_model_validator.py +++ b/schematic/schemas/data_model_validator.py @@ -1,51 +1,78 @@ +import networkx as nx + class DataModelValidator(): ''' Check for consistency within data model. ''' def __init__( + self, data_model, - run_all_checks: bool = True, ): - data_model = self.data_model - if run_all_checks: - ''' - If there are errors log them. - ''' - errors = self.run_checks(data_model) + self.data_model = data_model def run_checks(self): checks = [ self.check_has_name(), self.check_is_dag(), - self.check_json_(), - self.check_name_is_valid(), - self.check_name_overlap() + self.check_namespace_overlap(), + self.check_for_orphan_attributes(), + self.check_namespace_similarity(), ] - errors = [error for check in checks] + errors = [error for error in checks if error] return errors def check_has_name(self): - error = None + '''Checks that each node is assigned a label. + ''' + error = [] + + # Check that nodes have labels + node_labels = nx.get_node_attributes(self.data_model, "label") + for k, v in node_labels.items(): + if not v: + error.append(f'Node {k} does not have a label attached.') + breakpoint() return error def check_is_dag(self): - error = None - return - - def check_json(self): ''' - Standard JSON validation. + TODO: + - Check with Milen. This might be too simple of a check. + - Try wit topological sort as well. Benchmark against current approach. + - Add unit test to verify this works properly. + ''' - error = None - return + if nx.number_of_selfloops(self.data_model)!=0 and nx.is_directed(self.data_model) == False: + error = f'Schematic requires that data models are Directed Acyclic Graphs (DAGs). ' \ + f'Model supplied is not a DAG, please check your model.' + return error - def check_name_is_valid(self): - error = None - return - def check_name_overlap(self): + def check_namespace_overlap(self): ''' - Check if name is repeated in a valid value + Check if name is repeated. + TODO: + - Add unit test to verify this works properly. ''' - error = None + error = [] + if len(self.data_model.nodes.keys()) != set(list(self.data_model.nodes.keys())): + all_node_names = list(self.data_model.nodes.keys()) + for n_name in self.data_model.nodes.keys(): + all_node_names = [i for i in all_node_names if i != n_name] + if n_name in all_node_names: + error.append(f'There appears to be a namespace overlap, {n_name} appears at least twice.') + + return error + + def check_for_orphan_attributes(self): + error = [] + return error + + def check_namespace_similarity(self): + """ Checks to see if names are incredibly similar save for formatting. Raise warning not error. + """ + error=[] + return error + + def check_required_filled(self): return \ No newline at end of file diff --git a/schematic/utils/schema_util.py b/schematic/utils/schema_util.py index 8ede62bf9..dc082b713 100644 --- a/schematic/utils/schema_util.py +++ b/schematic/utils/schema_util.py @@ -1,6 +1,13 @@ +import json +import string +import inflection + ''' + General methods. +TODO: Type hinting + ''' def get_property_label_from_display_name(display_name, strict_camel_case = False): @@ -35,4 +42,21 @@ def get_class_label_from_display_name(display_name, strict_camel_case = False): display_name = display_name.translate({ord(c): None for c in string.whitespace}) label = inflection.camelize(display_name.strip(), uppercase_first_letter=True) - return label \ No newline at end of file + return label + +def get_display_name_from_label(node_name, attr_relationships): + ''' + TODO: if not display name raise error. + ''' + if 'Attribute' in attr_relationships.keys(): + display_name = attr_relationships['Attribute'] + else: + display_name = node_name + return display_name + +def get_json_key_from_context(): + return + +def export_schema(schema, file_path): + with open(file_path, "w") as f: + json.dump(schema, f, sort_keys=True, indent=4, ensure_ascii=False) From e0ab6eefc8e77626c69a007d49576d783663add4 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 12 Jun 2023 12:25:08 -0700 Subject: [PATCH 014/239] add logger statements --- schematic/schemas/commands.py | 21 ++++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/schematic/schemas/commands.py b/schematic/schemas/commands.py index 91894fecf..e1c1f7902 100644 --- a/schematic/schemas/commands.py +++ b/schematic/schemas/commands.py @@ -4,6 +4,8 @@ import click_log import logging import sys +#TODO Remove timing after development +import time import re from schematic.schemas.data_model_parser import DataModelParser @@ -59,22 +61,27 @@ def convert(schema, base_schema, output_jsonld): """ # TO DO: Throw these steps into their own function + + # get the start time + st = time.time() # Instantiate Parser data_model_parser = DataModelParser(schema, base_schema) #Parse Model + logger.info("Parsing data model.") parsed_data_model = data_model_parser.parse_model() - breakpoint() - # Convert parsed model to graph # Instantiate DataModelGraph data_model_grapher = DataModelGraph(parsed_data_model) - + + # Generate graph + logger.info("Generating data model graph.") graph_data_model = data_model_grapher.generate_data_model_graph() # Validate generated data model. + logger.info("Validating the data model internally.") data_model_validator = DataModelValidator(data_model=graph_data_model) data_model_errors = data_model_validator.run_checks() @@ -89,6 +96,7 @@ def convert(schema, base_schema, output_jsonld): # Actually raise error here with message. #data_model_jsonld_converter = DataModelJsonLD() + logger.info("Converting data model to JSON-LD") jsonld_data_model = convert_graph_to_jsonld(Graph=graph_data_model) # output JSON-LD file alongside CSV file by default @@ -109,6 +117,13 @@ def convert(schema, base_schema, output_jsonld): except: click.echo(f"The Data Model could not be created by using '{output_jsonld}' location. Please check your file path again") + # get the end time + et = time.time() + + # get the execution time + elapsed_time = (et - st)/.60 + click.echo(f"'Execution time: {elapsed_time} minutes") + ''' # convert RFC to Data Model base_se = _convert_csv_to_data_model(schema_csv, base_schema) From 7eea0a75b1fa2056c0697f26f4795adce0442e05 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 12 Jun 2023 12:26:52 -0700 Subject: [PATCH 015/239] Fix generate_edge() to add edges for all valid values --- schematic/schemas/data_model_edges.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/schematic/schemas/data_model_edges.py b/schematic/schemas/data_model_edges.py index 9cf8e4adc..bb1ddc8f2 100644 --- a/schematic/schemas/data_model_edges.py +++ b/schematic/schemas/data_model_edges.py @@ -7,20 +7,20 @@ def generate_edge(self, G, node, all_node_dict, data_model, edge_relationships): Args: Returns: - - Comment: - How do we best capture all the relationships we will be accounting for? """ + # For each attribute in the model. for attribute_display_name, relationship in data_model.items(): + # Get the relationships for the current attribure relationships = relationship['Relationships'] - - for key, val in edge_relationships.items(): - # For each relationship we are interested in - if key in relationships.keys(): - if node in relationships[key] and node != attribute_display_name: - print('Creating edge with node ' + node + ' and attribute ' + attribute_display_name) - G.add_edge(all_node_dict[node]['label'], all_node_dict[attribute_display_name]['label'], key=val) - + # For each edge relationship + for key, csv_header in edge_relationships.items(): + # For a given relationship in the model + if csv_header in relationships.keys(): + # if the current node is part of that relationship and is not the current node + if node in relationships[csv_header] and node != attribute_display_name: + #print('Creating edge relationship \"' + csv_header +'\" with node ' + node + ' and attribute ' + attribute_display_name) + # Connect node to attribute as an edge. + G.add_edge(all_node_dict[node]['label'], all_node_dict[attribute_display_name]['label'], key=key) return G def edit_edge(): From e91eb2e1353c47c666fc247fa4c95628fd978613 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 12 Jun 2023 12:29:41 -0700 Subject: [PATCH 016/239] fix issues caused by changing data_model_relationships form --- schematic/schemas/data_model_graph.py | 3 +- schematic/schemas/data_model_jsonld.py | 60 +++++++++++++++----------- 2 files changed, 35 insertions(+), 28 deletions(-) diff --git a/schematic/schemas/data_model_graph.py b/schematic/schemas/data_model_graph.py index ca7536375..b6bfffe3e 100644 --- a/schematic/schemas/data_model_graph.py +++ b/schematic/schemas/data_model_graph.py @@ -95,14 +95,13 @@ def generate_data_model_graph(self): # Find all nodes all_nodes = self.dmn.gather_all_nodes(self.data_model) - all_node_dict = {} ## Fill in MultiDigraph with nodes and edges for node in all_nodes: # Gather information for each node node_dict = self.dmn.generate_node_dict(node, self.data_model) - + all_node_dict[node] = node_dict # Generate node and attach information G = self.dmn.generate_node(G, node_dict) diff --git a/schematic/schemas/data_model_jsonld.py b/schematic/schemas/data_model_jsonld.py index 18bc72ee8..8155162ec 100644 --- a/schematic/schemas/data_model_jsonld.py +++ b/schematic/schemas/data_model_jsonld.py @@ -45,52 +45,58 @@ def create_object(self, template, node): """ data_model_relationships = self.dmr.relationships_dictionary - template_keys = list(template.keys()) - template_keys.remove('@type') + #template_keys = list(template.keys()) + #template_keys.remove('@type') # For each field in template fill out with information from the graph - for jsonld_key in template_keys: - + #for jsonld_key in template_keys: + for rel, rel_vals in data_model_relationships.items(): # Get column name linked to node. Need to do this now bc of relationship_dict structure - node_column_name = list(data_model_relationships[jsonld_key].keys())[0] + #node_column_name = list(data_model_relationships[jsonld_key].keys())[0] # Fill edge information (done per edge type) - - if data_model_relationships[jsonld_key][node_column_name]['edge_rel']: - for node_1, node_2, rel in self.graph.edges: - key_context, key_rel = jsonld_key.split(':') - if rel == key_rel: - if rel in ['domainIncludes', 'subClassOf']: + if rel_vals['edge_rel']: + #if data_model_relationships[jsonld_key][node_column_name]['edge_rel']: + for node_1, node_2, relationship in self.graph.edges: + key_context, key_rel = rel_vals['jsonld_key'].split(':') + if relationship == key_rel: + if relationship in ['domainIncludes', 'subClassOf']: if node_1 == node: # use display names for the nodes node_2_id = {'@id': 'context:'+node_2} - if isinstance(template[jsonld_key], list): - # TODO Format ids properly in future to take in proper context - template[jsonld_key].append(node_2_id) - else: - template[jsonld_key] == node_2 + try: + if isinstance(template[rel_vals['jsonld_key']], list): + # TODO Format ids properly in future to take in proper context + template[rel_vals['jsonld_key']].append(node_2_id) + else: + template[rel_vals['jsonld_key']] == node_2 + except: + breakpoint() else: if node_2 == node: # use display names for the nodes node_1_id = {'@id': 'context:'+node_1} - if isinstance(template[jsonld_key], list): - # TODO Format ids properly in future to take in proper context - template[jsonld_key].append(node_1_id) - else: - template[jsonld_key] == node_1 + try: + if isinstance(template[rel_vals['jsonld_key']], list): + # TODO Format ids properly in future to take in proper context + template[rel_vals['jsonld_key']].append(node_1_id) + else: + template[rel_vals['jsonld_key']] == node_1 + except: + breakpoint() # Fill node information else: #if 'node_dict' in data_model_relationships[key][node_column_name].keys(): # attribute here refers to node attibutes (come up with better name.) - node_attribute_name = list(data_model_relationships[jsonld_key][node_column_name]['node_dict'].keys())[0] - + #node_attribute_name = list(data_model_relationships[jsonld_key][node_column_name]['node_dict'].keys())[0] + node_attribute_name = rel_vals['node_label'] # Get recorded info for current node, and the attribute type node_info = nx.get_node_attributes(self.graph, node_attribute_name)[node] # Add this information to the template - template[jsonld_key] = node_info + template[rel_vals['jsonld_key']] = node_info return template def property_template(self): @@ -111,7 +117,7 @@ def property_template(self): "schema:rangeIncludes": [], "schema:isPartOf": {}, "sms:displayName": "", - "sms:required": False, + "sms:required": "False", "sms:validationRules": [], } return property_template @@ -129,7 +135,9 @@ def class_template(self): "schema:isPartOf": {}, "schema:rangeIncludes": [], "sms:displayName": "", - "sms:required": False, + "sms:required": "False", + "sms:requiresDependency": [], + "sms:requiresComponent": [], "sms:validationRules": [], } return class_template From c936c8f3a8e2af28287723ac587b3f80542774dc Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 12 Jun 2023 12:31:42 -0700 Subject: [PATCH 017/239] Fix to handle changes to data_model_relationships --- schematic/schemas/data_model_nodes.py | 73 +++++++++++--------------- schematic/schemas/data_model_parser.py | 12 +++-- 2 files changed, 38 insertions(+), 47 deletions(-) diff --git a/schematic/schemas/data_model_nodes.py b/schematic/schemas/data_model_nodes.py index d695b82cf..99963f4b0 100644 --- a/schematic/schemas/data_model_nodes.py +++ b/schematic/schemas/data_model_nodes.py @@ -34,18 +34,19 @@ def gather_nodes(self, attr_info): list, nodes defined by attribute_info as being related to that attribute. """ # retrieve a list of relationship types that will produce nodes. - self.node_relationships =list(self.edge_relationships_dictionary.keys()) + self.node_relationships =list(self.edge_relationships_dictionary.values()) + # Extract attribure and relationship dictionary attribute, relationship = attr_info - relationship = relationship['Relationships'] + relationships = relationship['Relationships'] nodes = [] if attribute not in nodes: nodes.append(attribute) for rel in self.node_relationships: - if rel in relationship.keys(): + if rel in relationships.keys(): nodes.extend([node.strip() - for node in relationship[rel]]) + for node in relationships[rel]]) return nodes def gather_all_nodes(self, data_model): @@ -66,27 +67,30 @@ def get_rel_default_info(self, relationship): For each display name fill out defaults. Maybe skip default. """ for k,v in self.data_model_relationships.relationships_dictionary.items(): - for key, value in v.items(): - if key == relationship: - if 'node_dict' in value.keys(): - rel_key = list(value['node_dict'].keys())[0] - rel_default = value['node_dict'][rel_key] - return rel_key, rel_default - - def run_rel_functions(self, rel_func, node_display_name='', attr_relationships={}): + if k == relationship: + if 'node_attr_dict' in v.keys(): + rel_key = v['node_label'] + rel_default = v['node_attr_dict'] + return rel_key, rel_default + + def run_rel_functions(self, rel_func, node_display_name='', key='', attr_relationships=''): ''' This function exists to centralzie handling of functions for filling out node information. TODO: and an ending else statement to alert to no func being caught. ''' func_output = '' - if rel_func == get_display_name_from_label: func_output = get_display_name_from_label(node_display_name, attr_relationships) + elif key == 'id' and rel_func == get_class_label_from_display_name: + func_output = 'bts:' + get_class_label_from_display_name(node_display_name) + elif key == 'id' and rel_func == get_property_label_from_display_name: + func_output = 'bts:' + get_property_label_from_display_name(node_display_name) elif rel_func == get_class_label_from_display_name: func_output = get_class_label_from_display_name(node_display_name) elif rel_func == get_property_label_from_display_name: func_output = get_property_label_from_display_name(node_display_name) - elif rel_func == uri2curie: - func_output = uri2curie(node_display_name, self.namespaces) + else: + # raise error here to catch non valid function. + breakpoint() return func_output def generate_node_dict(self, node_display_name, data_model): @@ -115,45 +119,30 @@ def generate_node_dict(self, node_display_name, data_model): node_dict = {} # Look through relationship types that represent values (i.e. do not define edges) - for k, v in self.value_relationships.items(): + for key, csv_header in self.value_relationships.items(): + # Get key and defalt values current relationship type. - rel_key, rel_default = self.get_rel_default_info(k) + rel_key, rel_default = self.get_rel_default_info(key) # If we have information to add about this particular node - if attr_relationships and k in attr_relationships.keys(): - # Check if the default specifies calling a function. - if type(rel_default) == dict and 'default' in rel_default.keys() and isfunction(rel_default['default']): + if csv_header in attr_relationships.keys(): + # Check if the default specifies calling a function. + if 'standard' in rel_default.keys() and isfunction(rel_default['standard']): # Add to node_dict The value comes from the standard function call. - # TODO UPDATE TO USE FUNCTION FUNCTION #breakpoint() - node_dict.update({rel_key: self.run_rel_functions(rel_default['standard'], node_display_name, attr_relationships)}) - ''' - try: - node_dict.update({rel_key: rel_default['standard'](node_display_name)}) - except: - node_dict.update({rel_key: rel_default['standard'](node_display_name, self.namespaces)}) - ''' + node_dict.update({rel_key: self.run_rel_functions(rel_default['standard'], node_display_name=node_display_name, key=key, attr_relationships=attr_relationships)}) else: # For standard entries, get information from attr_relationship dictionary - node_dict.update({rel_key: attr_relationships[k]}) + node_dict.update({rel_key: attr_relationships[csv_header]}) # else, add default values else: # Check if the default specifies calling a function. - if type(rel_default) == dict and 'default' in rel_default.keys() and isfunction(rel_default['default']): + if 'default' in rel_default.keys() and isfunction(rel_default['default']): #breakpoint() - node_dict.update({rel_key: self.run_rel_functions(rel_default['default'], node_display_name, attr_relationships)}) - - # Add to node_dict. The value comes from the standard function call. - # TODO UPDATE TO USE FUNCTION FUNCTION - ''' - try: - node_dict.update({rel_key: rel_default['default'](node_display_name)}) - except: - node_dict.update({rel_key: rel_default['default'](node_display_name, self.namespaces)}) - ''' + node_dict.update({rel_key: self.run_rel_functions(rel_default['default'], node_display_name=node_display_name, key=key, attr_relationships=attr_relationships)}) else: # Set value to defaults. - node_dict.update({rel_key: rel_default}) + node_dict.update({rel_key: rel_default['default']}) return node_dict def generate_node(self, G, node_dict): @@ -162,7 +151,7 @@ def generate_node(self, G, node_dict): Returns: """ - G.add_node(node_dict['label'], **node_dict) + G.add_node(node_dict['label'], **node_dict) return G def edit_node(): diff --git a/schematic/schemas/data_model_parser.py b/schematic/schemas/data_model_parser.py index 9fc996882..cf34987e0 100644 --- a/schematic/schemas/data_model_parser.py +++ b/schematic/schemas/data_model_parser.py @@ -134,7 +134,6 @@ def check_schema_definition(self, model_df: pd.DataFrame) -> bool: ) logger.debug("Schema definition csv ready for processing!") except: - breakpoint() raise ValueError( f"Schema extension headers: {set(list(model_df.columns))} " f"do not match required schema headers: {self.required_headers}" @@ -154,8 +153,10 @@ def gather_csv_attributes_relationships(self, model_df): # Load relationships dictionary. self.rel_dict = self.dmr.define_data_model_relationships() + # Get the type for each value that needs to be submitted. - self.rel_val_types = {key:val['type']for k, v in self.rel_dict.items() for key, val in v.items() if 'type' in val.keys()} + # using csv_headers as keys to match required_headers/relationship_types + self.rel_val_types = {v['csv_header']:v['type']for k, v in self.rel_dict.items() if 'type' in v.keys()} #load into format that can be read by validator.py @@ -192,15 +193,16 @@ def gather_csv_attributes_relationships(self, model_df): if rel_val_type == bool and type(attr[relationship]) == bool: rels = attr[relationship] # Add other value types and adjust as needed. - else: + elif rel_val_type == list: # Move strings to list if they are comma separated. # Order from CSV is preserved here. rels = attr[relationship].strip().split(',') rels = [r.strip() for r in rels] # Extract string from list if necessary. # TODO Catch situation where len does not equal 1. Throw error. - if rel_val_type == str and len(rels) == 1: - rels = rels[0] + elif rel_val_type == str: + rels = str(attr[relationship]).strip() + #rels = attr[relationship].strip() attr_rel_dictionary[attr['Attribute']]['Relationships'].update({relationship:rels}) position += 1 return attr_rel_dictionary From 20187056b393434ec1a016bb8e675343fe6e59b5 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Thu, 22 Jun 2023 12:23:48 -0700 Subject: [PATCH 018/239] update jsonld and edge processing of subclassOf and validvalue relationships --- schematic/schemas/commands.py | 4 ++-- schematic/schemas/data_model_edges.py | 8 ++++++++ schematic/schemas/data_model_jsonld.py | 24 ++++++++++++++++++++---- 3 files changed, 30 insertions(+), 6 deletions(-) diff --git a/schematic/schemas/commands.py b/schematic/schemas/commands.py index e1c1f7902..093680dab 100644 --- a/schematic/schemas/commands.py +++ b/schematic/schemas/commands.py @@ -121,8 +121,8 @@ def convert(schema, base_schema, output_jsonld): et = time.time() # get the execution time - elapsed_time = (et - st)/.60 - click.echo(f"'Execution time: {elapsed_time} minutes") + elapsed_time = time.strftime("%M:%S", time.gmtime(et - st)) + click.echo(f"Execution time: {elapsed_time} (M:S)") ''' # convert RFC to Data Model diff --git a/schematic/schemas/data_model_edges.py b/schematic/schemas/data_model_edges.py index bb1ddc8f2..dd96c6aa1 100644 --- a/schematic/schemas/data_model_edges.py +++ b/schematic/schemas/data_model_edges.py @@ -13,7 +13,10 @@ def generate_edge(self, G, node, all_node_dict, data_model, edge_relationships): # Get the relationships for the current attribure relationships = relationship['Relationships'] # For each edge relationship + for key, csv_header in edge_relationships.items(): + #if node == 'Patient' and attribute_display_name == 'HTAN Participant ID' and csv_header == 'Parent': + # breakpoint() # For a given relationship in the model if csv_header in relationships.keys(): # if the current node is part of that relationship and is not the current node @@ -21,6 +24,11 @@ def generate_edge(self, G, node, all_node_dict, data_model, edge_relationships): #print('Creating edge relationship \"' + csv_header +'\" with node ' + node + ' and attribute ' + attribute_display_name) # Connect node to attribute as an edge. G.add_edge(all_node_dict[node]['label'], all_node_dict[attribute_display_name]['label'], key=key) + # Add additional valid value edges + if key == 'rangeIncludes': + G.add_edge(all_node_dict[node]['label'], all_node_dict[attribute_display_name]['label'], key='subClassOf') + G.add_edge(all_node_dict[attribute_display_name]['label'], all_node_dict[node]['label'], key='subClassOf') + return G def edit_edge(): diff --git a/schematic/schemas/data_model_jsonld.py b/schematic/schemas/data_model_jsonld.py index 8155162ec..ecd5cbfa1 100644 --- a/schematic/schemas/data_model_jsonld.py +++ b/schematic/schemas/data_model_jsonld.py @@ -60,10 +60,14 @@ def create_object(self, template, node): for node_1, node_2, relationship in self.graph.edges: key_context, key_rel = rel_vals['jsonld_key'].split(':') if relationship == key_rel: + ''' if relationship in ['domainIncludes', 'subClassOf']: + #if relationship in ['domainIncludes', 'subClassOf']: if node_1 == node: + if node_1 == 'Patient' and node_2 == 'HTANParticipantID': + breakpoint() # use display names for the nodes - node_2_id = {'@id': 'context:'+node_2} + node_2_id = {'@id': 'bts:'+ node_2} try: if isinstance(template[rel_vals['jsonld_key']], list): # TODO Format ids properly in future to take in proper context @@ -75,7 +79,7 @@ def create_object(self, template, node): else: if node_2 == node: # use display names for the nodes - node_1_id = {'@id': 'context:'+node_1} + node_1_id = {'@id': 'bts:'+node_1} try: if isinstance(template[rel_vals['jsonld_key']], list): # TODO Format ids properly in future to take in proper context @@ -84,6 +88,18 @@ def create_object(self, template, node): template[rel_vals['jsonld_key']] == node_1 except: breakpoint() + ''' + if node_2 == node: + # use display names for the nodes + node_1_id = {'@id': 'bts:'+node_1} + try: + if isinstance(template[rel_vals['jsonld_key']], list): + # TODO Format ids properly in future to take in proper context + template[rel_vals['jsonld_key']].append(node_1_id) + else: + template[rel_vals['jsonld_key']] == node_1 + except: + breakpoint() # Fill node information @@ -117,7 +133,7 @@ def property_template(self): "schema:rangeIncludes": [], "schema:isPartOf": {}, "sms:displayName": "", - "sms:required": "False", + "sms:required": "sms:false", "sms:validationRules": [], } return property_template @@ -135,7 +151,7 @@ def class_template(self): "schema:isPartOf": {}, "schema:rangeIncludes": [], "sms:displayName": "", - "sms:required": "False", + "sms:required": "sms:false", "sms:requiresDependency": [], "sms:requiresComponent": [], "sms:validationRules": [], From 3fddf77690b0b6c8f8a4dcf20bcbffad2200b14c Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Fri, 23 Jun 2023 11:10:01 -0700 Subject: [PATCH 019/239] fix direction of edge for adding subclass of relationship for rangeIncludes values --- schematic/schemas/data_model_edges.py | 1 - 1 file changed, 1 deletion(-) diff --git a/schematic/schemas/data_model_edges.py b/schematic/schemas/data_model_edges.py index dd96c6aa1..c9800074f 100644 --- a/schematic/schemas/data_model_edges.py +++ b/schematic/schemas/data_model_edges.py @@ -26,7 +26,6 @@ def generate_edge(self, G, node, all_node_dict, data_model, edge_relationships): G.add_edge(all_node_dict[node]['label'], all_node_dict[attribute_display_name]['label'], key=key) # Add additional valid value edges if key == 'rangeIncludes': - G.add_edge(all_node_dict[node]['label'], all_node_dict[attribute_display_name]['label'], key='subClassOf') G.add_edge(all_node_dict[attribute_display_name]['label'], all_node_dict[node]['label'], key='subClassOf') return G From 495320ee5ffce3fd7d80a6d344e6e656acc423bb Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Fri, 23 Jun 2023 11:12:01 -0700 Subject: [PATCH 020/239] fix jsonld generation to make sure relationships are added in proper order and template is cleaned up --- schematic/schemas/data_model_jsonld.py | 60 +++++++++++--------------- 1 file changed, 24 insertions(+), 36 deletions(-) diff --git a/schematic/schemas/data_model_jsonld.py b/schematic/schemas/data_model_jsonld.py index ecd5cbfa1..5e5954159 100644 --- a/schematic/schemas/data_model_jsonld.py +++ b/schematic/schemas/data_model_jsonld.py @@ -60,47 +60,18 @@ def create_object(self, template, node): for node_1, node_2, relationship in self.graph.edges: key_context, key_rel = rel_vals['jsonld_key'].split(':') if relationship == key_rel: - ''' - if relationship in ['domainIncludes', 'subClassOf']: - #if relationship in ['domainIncludes', 'subClassOf']: - if node_1 == node: - if node_1 == 'Patient' and node_2 == 'HTANParticipantID': - breakpoint() - # use display names for the nodes - node_2_id = {'@id': 'bts:'+ node_2} - try: - if isinstance(template[rel_vals['jsonld_key']], list): - # TODO Format ids properly in future to take in proper context - template[rel_vals['jsonld_key']].append(node_2_id) - else: - template[rel_vals['jsonld_key']] == node_2 - except: - breakpoint() - else: - if node_2 == node: - # use display names for the nodes - node_1_id = {'@id': 'bts:'+node_1} + if node_2 == node: + node_1_id = {'@id': 'bts:'+node_1} + # Make sure the key is in the template (differs between properties and classes) + if rel_vals['jsonld_key'] in template.keys(): + # TODO Move this to a helper function to clear up. try: if isinstance(template[rel_vals['jsonld_key']], list): - # TODO Format ids properly in future to take in proper context template[rel_vals['jsonld_key']].append(node_1_id) else: template[rel_vals['jsonld_key']] == node_1 except: breakpoint() - ''' - if node_2 == node: - # use display names for the nodes - node_1_id = {'@id': 'bts:'+node_1} - try: - if isinstance(template[rel_vals['jsonld_key']], list): - # TODO Format ids properly in future to take in proper context - template[rel_vals['jsonld_key']].append(node_1_id) - else: - template[rel_vals['jsonld_key']] == node_1 - except: - breakpoint() - # Fill node information else: @@ -113,6 +84,23 @@ def create_object(self, template, node): # Add this information to the template template[rel_vals['jsonld_key']] = node_info + + # Clean up template + template = self.clean_template(template=template, + data_model_relationships=data_model_relationships, + ) + return template + + def clean_template(self, template, data_model_relationships): + ''' + Get rid of empty k:v pairs. Fill with a default if specified in the relationships dictionary. + ''' + for rels in data_model_relationships.values(): + if rels['jsonld_key'] in template.keys() and not template[rels['jsonld_key']]: + if 'jsonld_default' in rels.keys(): + template[rels['jsonld_key']] = rels['jsonld_default'] + else: + del template[rels['jsonld_key']] return template def property_template(self): @@ -167,10 +155,8 @@ def generate_jsonld_object(self): # Get properties. properties = self.DME.find_properties() #classes = self.DME.find_classes() - # Get JSONLD Template self.json_ld_object = self.base_jsonld_template() - # Iterativly add graph nodes to json_ld_object as properties and classes for node in self.graph.nodes: if node in properties: @@ -178,6 +164,8 @@ def generate_jsonld_object(self): else: obj = self.create_object(template = self.class_template(), node = node) self.json_ld_object['@graph'].append(obj) + if node in properties: + breakpoint() return self.json_ld_object """ From 0eb5b0716db82a9130dd23f5d95655730ab3b930 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Fri, 23 Jun 2023 11:14:04 -0700 Subject: [PATCH 021/239] add extra relationship function and fix how repeated nodes are removed so order is preserved, this helps preserve function in networkx and jsonld --- schematic/schemas/data_model_nodes.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/schematic/schemas/data_model_nodes.py b/schematic/schemas/data_model_nodes.py index 99963f4b0..db4b7c0fd 100644 --- a/schematic/schemas/data_model_nodes.py +++ b/schematic/schemas/data_model_nodes.py @@ -5,7 +5,7 @@ DataModelRelationships ) -from schematic.utils.schema_util import get_property_label_from_display_name, get_class_label_from_display_name, get_display_name_from_label +from schematic.utils.schema_util import get_property_label_from_display_name, get_class_label_from_display_name, get_display_name_from_label, convert_bool from schematic.utils.validate_rules_utils import validate_schema_rules from schematic.schemas.curie import uri2curie, curie2uri @@ -59,7 +59,7 @@ def gather_all_nodes(self, data_model): for attr_info in data_model.items(): nodes = self.gather_nodes(attr_info=attr_info) all_nodes.extend(nodes) - all_nodes = [*set(all_nodes)] + all_nodes = list(dict.fromkeys(all_nodes).keys()) return all_nodes def get_rel_default_info(self, relationship): @@ -73,9 +73,11 @@ def get_rel_default_info(self, relationship): rel_default = v['node_attr_dict'] return rel_key, rel_default - def run_rel_functions(self, rel_func, node_display_name='', key='', attr_relationships=''): + def run_rel_functions(self, rel_func, node_display_name='', key='', attr_relationships='', csv_header=''): ''' This function exists to centralzie handling of functions for filling out node information. TODO: and an ending else statement to alert to no func being caught. + - Implement using a factory pattern. + ''' func_output = '' if rel_func == get_display_name_from_label: @@ -88,6 +90,8 @@ def run_rel_functions(self, rel_func, node_display_name='', key='', attr_relatio func_output = get_class_label_from_display_name(node_display_name) elif rel_func == get_property_label_from_display_name: func_output = get_property_label_from_display_name(node_display_name) + elif rel_func == convert_bool: + func_output == 'sms:' + convert_bool(attr_relationships[csv_header]).lower() else: # raise error here to catch non valid function. breakpoint() @@ -129,8 +133,7 @@ def generate_node_dict(self, node_display_name, data_model): # Check if the default specifies calling a function. if 'standard' in rel_default.keys() and isfunction(rel_default['standard']): # Add to node_dict The value comes from the standard function call. - #breakpoint() - node_dict.update({rel_key: self.run_rel_functions(rel_default['standard'], node_display_name=node_display_name, key=key, attr_relationships=attr_relationships)}) + node_dict.update({rel_key: self.run_rel_functions(rel_default['standard'], node_display_name=node_display_name, key=key, attr_relationships=attr_relationships, csv_header=csv_header)}) else: # For standard entries, get information from attr_relationship dictionary node_dict.update({rel_key: attr_relationships[csv_header]}) @@ -138,11 +141,11 @@ def generate_node_dict(self, node_display_name, data_model): else: # Check if the default specifies calling a function. if 'default' in rel_default.keys() and isfunction(rel_default['default']): - #breakpoint() - node_dict.update({rel_key: self.run_rel_functions(rel_default['default'], node_display_name=node_display_name, key=key, attr_relationships=attr_relationships)}) + node_dict.update({rel_key: self.run_rel_functions(rel_default['default'], node_display_name=node_display_name, key=key, attr_relationships=attr_relationships, csv_header=csv_header)}) else: # Set value to defaults. node_dict.update({rel_key: rel_default['default']}) + return node_dict def generate_node(self, G, node_dict): From 30186c140f999507667721720f08c6362443ce9c Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Fri, 23 Jun 2023 11:15:42 -0700 Subject: [PATCH 022/239] add todo for validator --- schematic/schemas/data_model_validator.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/schematic/schemas/data_model_validator.py b/schematic/schemas/data_model_validator.py index 7c9c58c58..1fddde117 100644 --- a/schematic/schemas/data_model_validator.py +++ b/schematic/schemas/data_model_validator.py @@ -53,6 +53,10 @@ def check_namespace_overlap(self): Check if name is repeated. TODO: - Add unit test to verify this works properly. + - The way this looks, it wont find namespace overlaps, + Have to go back to loading the csv and looking before overlaps have been removed. + Look for duplicate attributes. + Look for valid values that overlap with attributes and flag. ''' error = [] if len(self.data_model.nodes.keys()) != set(list(self.data_model.nodes.keys())): From c978228ea5aee7ee94432f33eb917fa035708da5 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Fri, 23 Jun 2023 11:16:20 -0700 Subject: [PATCH 023/239] add simple convert_bool func to utils --- schematic/utils/schema_util.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/schematic/utils/schema_util.py b/schematic/utils/schema_util.py index dc082b713..c0a5732d7 100644 --- a/schematic/utils/schema_util.py +++ b/schematic/utils/schema_util.py @@ -52,10 +52,10 @@ def get_display_name_from_label(node_name, attr_relationships): display_name = attr_relationships['Attribute'] else: display_name = node_name - return display_name + return display_name -def get_json_key_from_context(): - return +def convert_bool(provided_bool): + return str(provided_bool) def export_schema(schema, file_path): with open(file_path, "w") as f: From 678c7c47de5a9665073a4350bd4a812202c784ec Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Fri, 23 Jun 2023 12:11:46 -0700 Subject: [PATCH 024/239] add back specific handling for domain includes in jsonld conversion --- schematic/schemas/data_model_jsonld.py | 26 ++++++++++++++++---------- schematic/schemas/data_model_parser.py | 3 +-- 2 files changed, 17 insertions(+), 12 deletions(-) diff --git a/schematic/schemas/data_model_jsonld.py b/schematic/schemas/data_model_jsonld.py index 5e5954159..94eab283f 100644 --- a/schematic/schemas/data_model_jsonld.py +++ b/schematic/schemas/data_model_jsonld.py @@ -60,18 +60,26 @@ def create_object(self, template, node): for node_1, node_2, relationship in self.graph.edges: key_context, key_rel = rel_vals['jsonld_key'].split(':') if relationship == key_rel: - if node_2 == node: - node_1_id = {'@id': 'bts:'+node_1} - # Make sure the key is in the template (differs between properties and classes) - if rel_vals['jsonld_key'] in template.keys(): - # TODO Move this to a helper function to clear up. - try: + if key_rel == 'domainIncludes': + if node_1 == node: + node_2_id = {'@id': 'bts:'+node_2} + # Make sure the key is in the template (differs between properties and classes) + if rel_vals['jsonld_key'] in template.keys(): + # TODO Move this to a helper function to clear up. + if isinstance(template[rel_vals['jsonld_key']], list): + template[rel_vals['jsonld_key']].append(node_2_id) + else: + template[rel_vals['jsonld_key']] == node_2 + else: + if node_2 == node: + node_1_id = {'@id': 'bts:'+node_1} + # Make sure the key is in the template (differs between properties and classes) + if rel_vals['jsonld_key'] in template.keys(): + # TODO Move this to a helper function to clear up. if isinstance(template[rel_vals['jsonld_key']], list): template[rel_vals['jsonld_key']].append(node_1_id) else: template[rel_vals['jsonld_key']] == node_1 - except: - breakpoint() # Fill node information else: @@ -164,8 +172,6 @@ def generate_jsonld_object(self): else: obj = self.create_object(template = self.class_template(), node = node) self.json_ld_object['@graph'].append(obj) - if node in properties: - breakpoint() return self.json_ld_object """ diff --git a/schematic/schemas/data_model_parser.py b/schematic/schemas/data_model_parser.py index cf34987e0..6d9723641 100644 --- a/schematic/schemas/data_model_parser.py +++ b/schematic/schemas/data_model_parser.py @@ -188,8 +188,6 @@ def gather_csv_attributes_relationships(self, model_df): rel_val_type = self.rel_val_types[relationship] if not pd.isnull(attr[relationship]): # Fill in relationships based on type: - # TODO Look for matching types and handle non matches in function. - # Add boolean type value if rel_val_type == bool and type(attr[relationship]) == bool: rels = attr[relationship] # Add other value types and adjust as needed. @@ -205,6 +203,7 @@ def gather_csv_attributes_relationships(self, model_df): #rels = attr[relationship].strip() attr_rel_dictionary[attr['Attribute']]['Relationships'].update({relationship:rels}) position += 1 + return attr_rel_dictionary From 2a5b0e28c117ee313bb8039834a2a43e7040cc0c Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 28 Jun 2023 11:23:09 -0700 Subject: [PATCH 025/239] add weights to edges --- schematic/schemas/data_model_edges.py | 28 ++++++++++++++++++--------- 1 file changed, 19 insertions(+), 9 deletions(-) diff --git a/schematic/schemas/data_model_edges.py b/schematic/schemas/data_model_edges.py index c9800074f..b21695db5 100644 --- a/schematic/schemas/data_model_edges.py +++ b/schematic/schemas/data_model_edges.py @@ -13,20 +13,30 @@ def generate_edge(self, G, node, all_node_dict, data_model, edge_relationships): # Get the relationships for the current attribure relationships = relationship['Relationships'] # For each edge relationship - for key, csv_header in edge_relationships.items(): - #if node == 'Patient' and attribute_display_name == 'HTAN Participant ID' and csv_header == 'Parent': - # breakpoint() # For a given relationship in the model if csv_header in relationships.keys(): - # if the current node is part of that relationship and is not the current node - if node in relationships[csv_header] and node != attribute_display_name: - #print('Creating edge relationship \"' + csv_header +'\" with node ' + node + ' and attribute ' + attribute_display_name) - # Connect node to attribute as an edge. - G.add_edge(all_node_dict[node]['label'], all_node_dict[attribute_display_name]['label'], key=key) + # If the current node is part of that relationship and is not the current node + # Connect node to attribute as an edge. + if node in relationships[csv_header] and node != attribute_display_name: + # Find position of node in the list, this is the weight + # This will help us ensure things like valid values, or depends on are preserved in the proper order. + + # TODO: Move adding weights to its own helper. + # TODO: create a new attribute in the rel dictionary looking for directionality. Save as out for domainIncludes, save as in for others. + if key == 'domainIncludes': + # Get weight from the order of the attributes. + weight = list(data_model.keys()).index(attribute_display_name) + elif type(relationships[csv_header]) == list: + weight = relationships[csv_header].index(node) + else: + weight = 0 + # Here the first added node to the edge is the value that would be the valid value to the second node which is the attribute. + G.add_edge(all_node_dict[node]['label'], all_node_dict[attribute_display_name]['label'], key=key, weight=weight) # Add additional valid value edges if key == 'rangeIncludes': - G.add_edge(all_node_dict[attribute_display_name]['label'], all_node_dict[node]['label'], key='subClassOf') + # Add this relationships for classes. + G.add_edge(all_node_dict[attribute_display_name]['label'], all_node_dict[node]['label'], key='subClassOf', weight=weight) return G From 600fdd625c1010b833088db5415da4b2b45e8235 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 28 Jun 2023 11:31:58 -0700 Subject: [PATCH 026/239] fix convert_bool function processing --- schematic/schemas/data_model_nodes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic/schemas/data_model_nodes.py b/schematic/schemas/data_model_nodes.py index db4b7c0fd..506fdfb67 100644 --- a/schematic/schemas/data_model_nodes.py +++ b/schematic/schemas/data_model_nodes.py @@ -91,7 +91,7 @@ def run_rel_functions(self, rel_func, node_display_name='', key='', attr_relatio elif rel_func == get_property_label_from_display_name: func_output = get_property_label_from_display_name(node_display_name) elif rel_func == convert_bool: - func_output == 'sms:' + convert_bool(attr_relationships[csv_header]).lower() + func_output = 'sms:' + convert_bool(attr_relationships[csv_header]).lower() else: # raise error here to catch non valid function. breakpoint() From 04904be93b3c3b4b558584db69203cb312258187 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 28 Jun 2023 11:33:09 -0700 Subject: [PATCH 027/239] add TODO --- schematic/schemas/data_model_graph.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/schematic/schemas/data_model_graph.py b/schematic/schemas/data_model_graph.py index b6bfffe3e..dc46f9b24 100644 --- a/schematic/schemas/data_model_graph.py +++ b/schematic/schemas/data_model_graph.py @@ -120,6 +120,9 @@ def __init__(self, self.graph = G def find_properties(self): + """ + TODO: handle 'domainIncludes' with relationship edge parameters. + """ properties=[] for node_1, node_2, rel in self.graph.edges: if rel == 'domainIncludes': From 292522ef5b19fdfb34077bbc95fcd69ec02f87da Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 28 Jun 2023 11:34:36 -0700 Subject: [PATCH 028/239] fix jsonld processsing, optimize, reorder entries --- schematic/schemas/data_model_jsonld.py | 157 ++++++++++++++++--------- 1 file changed, 102 insertions(+), 55 deletions(-) diff --git a/schematic/schemas/data_model_jsonld.py b/schematic/schemas/data_model_jsonld.py index 94eab283f..73314cba3 100644 --- a/schematic/schemas/data_model_jsonld.py +++ b/schematic/schemas/data_model_jsonld.py @@ -33,70 +33,67 @@ def base_jsonld_template(self): "@id": "http://schema.biothings.io/#0.1", } return base_template - - def add_contexts(self): - breakpoint() - return def create_object(self, template, node): - """ - create a single JSONLD object per node - Use the relationship dictionary - """ data_model_relationships = self.dmr.relationships_dictionary - #template_keys = list(template.keys()) - #template_keys.remove('@type') - # For each field in template fill out with information from the graph - #for jsonld_key in template_keys: for rel, rel_vals in data_model_relationships.items(): - # Get column name linked to node. Need to do this now bc of relationship_dict structure - #node_column_name = list(data_model_relationships[jsonld_key].keys())[0] + + key_context, key_rel = self.strip_context(context_value=rel_vals['jsonld_key']) # Fill edge information (done per edge type) if rel_vals['edge_rel']: - #if data_model_relationships[jsonld_key][node_column_name]['edge_rel']: - for node_1, node_2, relationship in self.graph.edges: - key_context, key_rel = rel_vals['jsonld_key'].split(':') - if relationship == key_rel: - if key_rel == 'domainIncludes': - if node_1 == node: - node_2_id = {'@id': 'bts:'+node_2} - # Make sure the key is in the template (differs between properties and classes) - if rel_vals['jsonld_key'] in template.keys(): - # TODO Move this to a helper function to clear up. - if isinstance(template[rel_vals['jsonld_key']], list): - template[rel_vals['jsonld_key']].append(node_2_id) - else: - template[rel_vals['jsonld_key']] == node_2 - else: - if node_2 == node: - node_1_id = {'@id': 'bts:'+node_1} - # Make sure the key is in the template (differs between properties and classes) - if rel_vals['jsonld_key'] in template.keys(): - # TODO Move this to a helper function to clear up. - if isinstance(template[rel_vals['jsonld_key']], list): - template[rel_vals['jsonld_key']].append(node_1_id) - else: - template[rel_vals['jsonld_key']] == node_1 - - # Fill node information + # Get all edges associated with the current node + node_edges = list(self.graph.in_edges(node, data=True)) + node_edges.extend(list(self.graph.out_edges(node,data=True))) + + for node_1, node_2, weight in node_edges: + # Get 'AtlasView'('relationship':{weight:value}) of edge + node_edge_relationships = self.graph[node_1][node_2] + + # Check if key_rel is even one of the relationships for this node pair. + if key_rel in node_edge_relationships: + for relationship, weight_dict in node_edge_relationships.items(): + if relationship == key_rel: + if key_rel == 'domainIncludes': + if node_1 == node: + # Make sure the key is in the template (differs between properties and classes) + if rel_vals['jsonld_key'] in template.keys(): + node_2_id = {'@id': 'bts:'+node_2} + # TODO Move this to a helper function to clear up. + if (isinstance(template[rel_vals['jsonld_key']], list) and + node_2_id not in template[rel_vals['jsonld_key']]): + template[rel_vals['jsonld_key']].append(node_2_id) + else: + template[rel_vals['jsonld_key']] == node_2 + else: + if node_2 == node: + # Make sure the key is in the template (differs between properties and classes) + if rel_vals['jsonld_key'] in template.keys(): + node_1_id = {'@id': 'bts:'+node_1} + # TODO Move this to a helper function to clear up. + if (isinstance(template[rel_vals['jsonld_key']], list) and + node_1_id not in template[rel_vals['jsonld_key']]): + # could possibly keep track of weights here but that might slow things down + template[rel_vals['jsonld_key']].append(node_1_id) + else: + template[rel_vals['jsonld_key']] == node_1 else: - #if 'node_dict' in data_model_relationships[key][node_column_name].keys(): # attribute here refers to node attibutes (come up with better name.) - #node_attribute_name = list(data_model_relationships[jsonld_key][node_column_name]['node_dict'].keys())[0] node_attribute_name = rel_vals['node_label'] # Get recorded info for current node, and the attribute type node_info = nx.get_node_attributes(self.graph, node_attribute_name)[node] - # Add this information to the template template[rel_vals['jsonld_key']] = node_info - + # Clean up template template = self.clean_template(template=template, data_model_relationships=data_model_relationships, ) + # Reorder lists based on weights: + template = self.reorder_entries(template=template,) + return template def clean_template(self, template, data_model_relationships): @@ -111,15 +108,65 @@ def clean_template(self, template, data_model_relationships): del template[rels['jsonld_key']] return template - def property_template(self): - ''' - TODO: Create this from relationship class + def strip_context(self, context_value): + if ':' in context_value: + context, v = context_value.split(':') + elif '@' in context_value: + context, v = context_value.split('@') + return context, v + + def reorder_entries(self, template): + '''In JSONLD some classes or property keys have list values. We want to make sure these lists are ordered according to the order supplied by the user. + This will look specically in lists and reorder those. + Args: + template (dict): + Returns: + template (dict): list entries re-ordered to match user supplied order. + ''' - # Default required to False but add validation for this in the future. - # Only allowing a single class type, other models could have lists. - - # Domain includes needs to pull a dict id {'@id': 'mutations'} + data_model_relationships = self.dmr.relationships_dictionary + + # user order only matters for nodes that are also attributes + template_id = template['rdfs:label'] + + for jsonld_key, entry in template.items(): + #if the entry is of type list and theres more than one value in the list attempt to reorder + if isinstance(entry, list) and len(entry)>1: + # Get relationship key from JSONLD Key: + key = [k for k, v in data_model_relationships.items() if jsonld_key == v['jsonld_key']][0] + # TODO: + # Get edge weights for values in the list. + if data_model_relationships[key]['jsonld_direction'] == 'out': + #use outedges + original_edge_weights_dict = {attached_node:self.graph[template_node][attached_node][key]['weight'] + for template_node, attached_node in self.graph.out_edges(template_id) + if key in self.graph[template_node][attached_node] + } + else: + #use inedges + original_edge_weights_dict = {attached_node:self.graph[attached_node][template_node][key]['weight'] + for attached_node, template_node in self.graph.in_edges(template_id) + if key in self.graph[attached_node][template_node] + } + + # TODO: MOVE TO HELPER + sorted_edges = list(dict(sorted(original_edge_weights_dict.items(), key=lambda item: item[1])).keys()) + edge_weights_dict={edge:i for i, edge in enumerate(sorted_edges)} + ordered_edges = [0]*len(edge_weights_dict.keys()) + + for k,v in edge_weights_dict.items(): + ordered_edges[v] = {'@id': 'bts:' + k} + + # TODO: Throw an error if ordered_edges does not get fully filled as expected. + if 0 in ordered_edges: + breakpoint() + + template[jsonld_key] = ordered_edges + return template + def property_template(self): + ''' + ''' property_template = { "@id": "", "@type": "rdf:Property", @@ -136,7 +183,6 @@ def property_template(self): def class_template(self): """ - Only allowing a single class type, other models could have lists. """ class_template = { "@id": "", @@ -157,20 +203,21 @@ def class_template(self): def generate_jsonld_object(self): ''' - #Will call JSONLD_object class to create properties and classes in the process. - ''' - + ''' # Get properties. properties = self.DME.find_properties() #classes = self.DME.find_classes() # Get JSONLD Template self.json_ld_object = self.base_jsonld_template() + # Iterativly add graph nodes to json_ld_object as properties and classes for node in self.graph.nodes: if node in properties: obj = self.create_object(template = self.property_template(), node = node) + #obj = self.create_object_optimized(template=self.property_template(), node=node) else: obj = self.create_object(template = self.class_template(), node = node) + #obj = self.create_object_optimized(template=self.class_template(), node=node) self.json_ld_object['@graph'].append(obj) return self.json_ld_object From c557c91cec0fb6af259ae466e4e36cdb8fd7f785 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 28 Jun 2023 13:07:47 -0700 Subject: [PATCH 029/239] Fix error with getting property and class labels from display names --- schematic/schemas/data_model_graph.py | 14 ++++---- schematic/schemas/data_model_jsonld.py | 1 - schematic/schemas/data_model_nodes.py | 50 ++++++++++++++++++-------- schematic/utils/schema_util.py | 11 +++++- 4 files changed, 53 insertions(+), 23 deletions(-) diff --git a/schematic/schemas/data_model_graph.py b/schematic/schemas/data_model_graph.py index dc46f9b24..ef6f0bd67 100644 --- a/schematic/schemas/data_model_graph.py +++ b/schematic/schemas/data_model_graph.py @@ -68,16 +68,16 @@ class DataModelGraph(): ''' __metaclass__ = DataModelGraphMeta - def __init__(self, parsed_data_model): + def __init__(self, attribute_relationships_dict): '''Load parsed data model. ''' - self.data_model = parsed_data_model - self.dmn = DataModelNodes() + self.attribute_relationships_dict = attribute_relationships_dict + self.dmn = DataModelNodes(self.attribute_relationships_dict) self.dme = DataModelEdges() self.data_model_relationships = DataModelRelationships() - if not self.data_model: + if not self.attribute_relationships_dict: raise ValueError( "Something has gone wrong, a data model was not loaded into the DataModelGraph Class. Please check that your paths are correct" ) @@ -94,13 +94,13 @@ def generate_data_model_graph(self): G = nx.MultiDiGraph() # Find all nodes - all_nodes = self.dmn.gather_all_nodes(self.data_model) + all_nodes = self.dmn.gather_all_nodes(self.attribute_relationships_dict) all_node_dict = {} ## Fill in MultiDigraph with nodes and edges for node in all_nodes: # Gather information for each node - node_dict = self.dmn.generate_node_dict(node, self.data_model) + node_dict = self.dmn.generate_node_dict(node, self.attribute_relationships_dict) all_node_dict[node] = node_dict # Generate node and attach information @@ -108,7 +108,7 @@ def generate_data_model_graph(self): for node in all_nodes: # Generate edges - G = self.dme.generate_edge(G, node, all_node_dict, self.data_model, edge_relationships) + G = self.dme.generate_edge(G, node, all_node_dict, self.attribute_relationships_dict, edge_relationships) return G class DataModelGraphExporer(): diff --git a/schematic/schemas/data_model_jsonld.py b/schematic/schemas/data_model_jsonld.py index 73314cba3..19ed66d5b 100644 --- a/schematic/schemas/data_model_jsonld.py +++ b/schematic/schemas/data_model_jsonld.py @@ -206,7 +206,6 @@ def generate_jsonld_object(self): ''' # Get properties. properties = self.DME.find_properties() - #classes = self.DME.find_classes() # Get JSONLD Template self.json_ld_object = self.base_jsonld_template() diff --git a/schematic/schemas/data_model_nodes.py b/schematic/schemas/data_model_nodes.py index 506fdfb67..5e269ae3e 100644 --- a/schematic/schemas/data_model_nodes.py +++ b/schematic/schemas/data_model_nodes.py @@ -5,17 +5,20 @@ DataModelRelationships ) -from schematic.utils.schema_util import get_property_label_from_display_name, get_class_label_from_display_name, get_display_name_from_label, convert_bool +from schematic.utils.schema_util import get_label_from_display_name, get_display_name_from_label, convert_bool from schematic.utils.validate_rules_utils import validate_schema_rules from schematic.schemas.curie import uri2curie, curie2uri class DataModelNodes(): - def __init__(self): + def __init__(self, attribute_relationships_dict): self.namespaces = dict(rdf=Namespace("http://www.w3.org/1999/02/22-rdf-syntax-ns#")) self.data_model_relationships = DataModelRelationships() self.value_relationships = self.data_model_relationships.define_value_relationships() self.edge_relationships_dictionary = self.data_model_relationships.define_edge_relationships() + self.ar_dict = attribute_relationships_dict + # Identify all properties + self.properties = self.get_data_model_properties(ar_dict=self.ar_dict) return @@ -73,23 +76,39 @@ def get_rel_default_info(self, relationship): rel_default = v['node_attr_dict'] return rel_key, rel_default - def run_rel_functions(self, rel_func, node_display_name='', key='', attr_relationships='', csv_header=''): + def get_data_model_properties(self, ar_dict): + properties=[] + for attribute, relationships in ar_dict.items(): + if 'Properties' in relationships['Relationships'].keys(): + properties.extend(relationships['Relationships']['Properties']) + properties = list(set(properties)) + return properties + + def get_entry_type(self, node_display_name): + if node_display_name in self.properties: + entry_type = 'property' + else: + entry_type = 'class' + return entry_type + + def run_rel_functions(self, rel_func, node_display_name='', key='', attr_relationships='', csv_header='', entry_type=''): ''' This function exists to centralzie handling of functions for filling out node information. TODO: and an ending else statement to alert to no func being caught. - Implement using a factory pattern. - - ''' - func_output = '' - if rel_func == get_display_name_from_label: - func_output = get_display_name_from_label(node_display_name, attr_relationships) - elif key == 'id' and rel_func == get_class_label_from_display_name: - func_output = 'bts:' + get_class_label_from_display_name(node_display_name) elif key == 'id' and rel_func == get_property_label_from_display_name: func_output = 'bts:' + get_property_label_from_display_name(node_display_name) + elif rel_func == get_class_label_from_display_name: func_output = get_class_label_from_display_name(node_display_name) - elif rel_func == get_property_label_from_display_name: - func_output = get_property_label_from_display_name(node_display_name) + ''' + + func_output = '' + if rel_func == get_display_name_from_label: + func_output = get_display_name_from_label(node_display_name, attr_relationships) + elif key == 'id' and rel_func == get_label_from_display_name: + func_output = 'bts:' + get_label_from_display_name(display_name =node_display_name, entry_type=entry_type) + elif rel_func == get_label_from_display_name: + func_output = get_label_from_display_name(display_name =node_display_name, entry_type=entry_type) elif rel_func == convert_bool: func_output = 'sms:' + convert_bool(attr_relationships[csv_header]).lower() else: @@ -113,6 +132,9 @@ def generate_node_dict(self, node_display_name, data_model): # Strip whitespace from node display name node_display_name = node_display_name.strip() + + # Determine if property or class + entry_type = self.get_entry_type(node_display_name=node_display_name) # If the node is an attribute, find its relationships. attr_relationships = {} @@ -133,7 +155,7 @@ def generate_node_dict(self, node_display_name, data_model): # Check if the default specifies calling a function. if 'standard' in rel_default.keys() and isfunction(rel_default['standard']): # Add to node_dict The value comes from the standard function call. - node_dict.update({rel_key: self.run_rel_functions(rel_default['standard'], node_display_name=node_display_name, key=key, attr_relationships=attr_relationships, csv_header=csv_header)}) + node_dict.update({rel_key: self.run_rel_functions(rel_default['standard'], node_display_name=node_display_name, key=key, attr_relationships=attr_relationships, csv_header=csv_header, entry_type=entry_type)}) else: # For standard entries, get information from attr_relationship dictionary node_dict.update({rel_key: attr_relationships[csv_header]}) @@ -141,7 +163,7 @@ def generate_node_dict(self, node_display_name, data_model): else: # Check if the default specifies calling a function. if 'default' in rel_default.keys() and isfunction(rel_default['default']): - node_dict.update({rel_key: self.run_rel_functions(rel_default['default'], node_display_name=node_display_name, key=key, attr_relationships=attr_relationships, csv_header=csv_header)}) + node_dict.update({rel_key: self.run_rel_functions(rel_default['default'], node_display_name=node_display_name, key=key, attr_relationships=attr_relationships, csv_header=csv_header, entry_type=entry_type)}) else: # Set value to defaults. node_dict.update({rel_key: rel_default['default']}) diff --git a/schematic/utils/schema_util.py b/schematic/utils/schema_util.py index c0a5732d7..b294094be 100644 --- a/schematic/utils/schema_util.py +++ b/schematic/utils/schema_util.py @@ -52,7 +52,16 @@ def get_display_name_from_label(node_name, attr_relationships): display_name = attr_relationships['Attribute'] else: display_name = node_name - return display_name + return display_name + +def get_label_from_display_name(display_name, entry_type, strict_camel_case = False): + + if entry_type.lower()=='class': + label = get_class_label_from_display_name(display_name=display_name, strict_camel_case=strict_camel_case) + + elif entry_type.lower()=='property': + label=get_property_label_from_display_name(display_name=display_name, strict_camel_case=strict_camel_case) + return label def convert_bool(provided_bool): return str(provided_bool) From e4c98115cd0d3e7b4373bfaf2c637246134c4fcf Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Thu, 27 Jul 2023 12:23:08 -0700 Subject: [PATCH 030/239] merge and add relationships class --- schematic/schemas/data_model_relationships.py | 227 ++++++++++++++++++ 1 file changed, 227 insertions(+) create mode 100644 schematic/schemas/data_model_relationships.py diff --git a/schematic/schemas/data_model_relationships.py b/schematic/schemas/data_model_relationships.py new file mode 100644 index 000000000..db231f0ef --- /dev/null +++ b/schematic/schemas/data_model_relationships.py @@ -0,0 +1,227 @@ +from typing import Dict +from schematic.utils.schema_util import get_label_from_display_name, get_display_name_from_label, convert_bool +from schematic.schemas.curie import uri2curie, curie2uri + +class DataModelRelationships(): + def __init__(self) -> None: + self.relationships_dictionary = self.define_data_model_relationships() + #self.delimiters = ['@', ':'] + return + + def define_data_model_relationships(self) -> Dict: + """ Define the relationships in the model so they can be accessed in a central location. + If adding anew relationship make sure to follow the conventions closely. + key:{ + jsonld_key:, + + csv_header: + jsonld_default: if at the end of processing there is no value present, this is the value we want to fill. + can also fill with type to ensure the key does not get deleted. + edge_rel: True, if this relationship defines an edge + False, if is a value relationship + required_header: True, if relationship header is required for the csv + node_dict: set default values for this relationship + key is the node relationship name, value is the default value. + If want to set default as a function create a nested dictionary. + {'default': default_function, + 'standard': alternative function to call if relationship is present for a node} + } If adding new functions to node_dict will + need to modify data_model_nodes.generate_node_dict in + } + TODO: + Key: + jsonld_key: get_json_key_from_context + csv_header: + jsonld_default: if at the end of processing there is no + edge_rel: + required_header: + node_label: + node_attr_dict: + + TODO: + - Functionally implement jsonld_edge key + - Add JSONLD Directionality: + Default Forward: + Reverse Domain Includes + - Add edge directionality: + Default in. + Out domainIncludes. + TODO: + - Use class inheritance to set up relationships. + """ + map_data_model_relationships = { + + 'displayName': { + 'jsonld_key': 'sms:displayName', + 'csv_header': 'Attribute', + 'node_label': 'displayName', + 'type': str, + 'edge_rel': False, + 'required_header': True, + 'node_attr_dict':{'default': get_display_name_from_label, + 'standard': get_display_name_from_label, + }, + }, + 'label':{ + 'jsonld_key': 'rdfs:label', + 'csv_header': None, + 'node_label': 'label', + 'type': str, + 'edge_rel': False, + 'required_header': False, + 'node_attr_dict':{'default': get_label_from_display_name, + 'standard': get_label_from_display_name, + }, + }, + 'comment': { + 'jsonld_key': 'rdfs:comment', + 'csv_header': 'Description', + 'node_label': 'comment', + 'type': str, + 'edge_rel': False, + 'required_header': True, + 'node_attr_dict':{'default': 'TBD'}, + }, + 'rangeIncludes': { + 'jsonld_key': 'schema:rangeIncludes', + 'csv_header': 'Valid Values', + 'edge_key': 'rangeValue', + 'jsonld_direction': 'in', + 'edge_dir': 'out', + 'type': list, + 'edge_rel': True, + 'required_header': True, + }, + 'requiresDependency': { + 'jsonld_key': 'sms:requiresDependency', + 'csv_header': 'DependsOn', + 'edge_key': 'requiresDependency', + 'jsonld_direction': 'in', + 'edge_dir': 'in', + 'type': list, + 'edge_rel': True, + 'required_header': True, + }, + 'requiresComponent': { + 'jsonld_key': 'sms:requiresComponent', + 'csv_header': 'DependsOn Component', + 'edge_key': 'requiresComponent', + 'jsonld_direction': 'in', + 'edge_dir': 'in', + 'type': list, + 'edge_rel': True, + 'required_header': True, + }, + 'required': { + 'jsonld_key': 'sms:required', + 'csv_header': 'Required', + 'node_label': 'required', + 'type': str, + 'edge_rel': False, + 'required_header': True, + 'node_attr_dict':{'default': 'sms:false', + 'standard': convert_bool, + }, + }, + 'subClassOf': { + 'jsonld_key': 'rdfs:subClassOf', + 'csv_header': 'Parent', + 'edge_key': 'parentOf', + 'jsonld_direction': 'in', + 'edge_dir': 'in', + 'jsonld_default': [{"@id": "schema:Thing"}], + 'type': list, + 'edge_rel': True, + 'required_header': True, + }, + 'validationRules': { + 'jsonld_key': 'sms:validationRules', + 'csv_header': 'Validation Rules', + 'node_label': 'validationRules', + 'jsonld_direction': 'in', + 'edge_dir': 'in', + 'jsonld_default': [], + 'type': list, + 'edge_rel': False, + 'required_header': True, + 'node_attr_dict':{'default': [], + }, + }, + 'domainIncludes': { + 'jsonld_key': 'schema:domainIncludes', + 'csv_header': 'Properties', + 'edge_key': 'domainValue', + 'jsonld_direction': 'out', + 'edge_dir': 'out', + 'type': list, + 'edge_rel': True, + 'required_header': True, + }, + 'isPartOf': { + 'jsonld_key': 'schema:isPartOf', + 'csv_header': None, + 'node_label': 'isPartOf', + 'type': dict, + 'edge_rel': False, + 'required_header': False, + 'node_attr_dict':{'default': {"@id": "http://schema.biothings.io"}, + }, + }, + 'id': { + 'jsonld_key': '@id', + 'csv_header': 'Source', + 'node_label': 'uri', + 'type': str, + 'edge_rel': False, + 'required_header': True, + 'node_attr_dict':{'default': get_label_from_display_name, + 'standard': get_label_from_display_name, + }, + }, + } + + return map_data_model_relationships + + def define_required_csv_headers(self): + required_headers = [] + for k, v in self.relationships_dictionary.items(): + try: + if v['required_header']: + required_headers.append(v['csv_header']) + except KeyError: + print(f"Did not provide a 'required_header' key, value pair for the nested dictionary {k} : {key}") + + return required_headers + + def define_edge_relationships(self): + edge_relationships = {} + for k, v in self.relationships_dictionary.items(): + try: + if v['edge_rel']: + edge_relationships.update({k:v['csv_header']}) + except KeyError: + print(f"Did not provide a 'edge_rel' key, value pair for the nested dictionary {k} : {key}") + + return edge_relationships + + def define_value_relationships(self): + """ + Think about changing outputs. + """ + value_relationships = {} + for k, v in self.relationships_dictionary.items(): + try: + if not v['edge_rel']: + value_relationships.update({k:v['csv_header']}) + ''' + if ':' in v['jsonld_key']: + value_relationships.update({k:v['jsonld_key'].split(':')[1]}) + elif '@' in v['jsonld_key']: + value_relationships.update({k:v['jsonld_key'].split('@')[1]}) + ''' + except KeyError: + print(f"Did not provide a 'edge_rel' for key {k}") + + return value_relationships + + From 6a9590f62901c043c0b72ac8ccf794a49a55dfe6 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Sat, 29 Jul 2023 11:27:02 -0700 Subject: [PATCH 031/239] update schemas to change node order to match develop, add edge_key --- schematic/schemas/data_model_edges.py | 15 ++++-- schematic/schemas/data_model_graph.py | 30 +++++++----- schematic/schemas/data_model_jsonld.py | 49 +++++++++++++++++-- schematic/schemas/data_model_relationships.py | 26 +++++----- 4 files changed, 87 insertions(+), 33 deletions(-) diff --git a/schematic/schemas/data_model_edges.py b/schematic/schemas/data_model_edges.py index b21695db5..f1ab341a5 100644 --- a/schematic/schemas/data_model_edges.py +++ b/schematic/schemas/data_model_edges.py @@ -1,6 +1,11 @@ +from schematic.schemas.data_model_relationships import ( + DataModelRelationships + ) + class DataModelEdges(): def __init__(self): - return + self.dmr = DataModelRelationships() + self.data_model_relationships = self.dmr.relationships_dictionary def generate_edge(self, G, node, all_node_dict, data_model, edge_relationships): """ @@ -32,11 +37,15 @@ def generate_edge(self, G, node, all_node_dict, data_model, edge_relationships): else: weight = 0 # Here the first added node to the edge is the value that would be the valid value to the second node which is the attribute. - G.add_edge(all_node_dict[node]['label'], all_node_dict[attribute_display_name]['label'], key=key, weight=weight) + edge_key = self.data_model_relationships[key]['edge_key'] + if key in ['subClassOf', 'domainIncludes']: + G.add_edge(all_node_dict[node]['label'], all_node_dict[attribute_display_name]['label'], key=edge_key, weight=weight) + else: + G.add_edge(all_node_dict[attribute_display_name]['label'], all_node_dict[node]['label'], key=edge_key, weight=weight) # Add additional valid value edges if key == 'rangeIncludes': # Add this relationships for classes. - G.add_edge(all_node_dict[attribute_display_name]['label'], all_node_dict[node]['label'], key='subClassOf', weight=weight) + G.add_edge(all_node_dict[attribute_display_name]['label'], all_node_dict[node]['label'], key=edge_key, weight=weight) return G diff --git a/schematic/schemas/data_model_graph.py b/schematic/schemas/data_model_graph.py index ef6f0bd67..18ef74501 100644 --- a/schematic/schemas/data_model_graph.py +++ b/schematic/schemas/data_model_graph.py @@ -81,6 +81,7 @@ def __init__(self, attribute_relationships_dict): raise ValueError( "Something has gone wrong, a data model was not loaded into the DataModelGraph Class. Please check that your paths are correct" ) + self.graph = self.generate_data_model_graph() def generate_data_model_graph(self): @@ -121,16 +122,17 @@ def __init__(self, def find_properties(self): """ - TODO: handle 'domainIncludes' with relationship edge parameters. + TODO: handle 'domainValue' with relationship edge parameters. """ properties=[] for node_1, node_2, rel in self.graph.edges: - if rel == 'domainIncludes': + if rel == 'domainValue': properties.append(node_1) properties = set(properties) return properties def find_classes(self): + #checked nodes = self.graph.nodes properties = self.find_properties() classes = nodes - properties @@ -147,6 +149,7 @@ def get_adjacent_nodes_by_relationship(self, Returns: List of nodes that are adjacent to the given node. + #checked """ nodes = set() @@ -195,7 +198,7 @@ def get_component_requirements_graph(self, # get the subgraph induced on required component nodes req_components_graph = self.get_subgraph_by_edge_type( - self.mm_graph, requires_component_relationship + self.graph, requires_component_relationship ).subgraph(req_components) return req_components_graph @@ -221,11 +224,12 @@ def get_descendants_by_edge_type(self, List of nodes that are descendants from a particular node (sorted / unsorted) """ - root_descendants = nx.descendants(self.mm_graph, source_node) + root_descendants = nx.descendants(self.graph, source_node) + breakpoint() subgraph_nodes = list(root_descendants) subgraph_nodes.append(source_node) - descendants_subgraph = self.mm_graph.subgraph(subgraph_nodes) + descendants_subgraph = self.graph.subgraph(subgraph_nodes) # prune the descendants subgraph so as to include only those edges that match the relationship type rel_edges = [] @@ -268,7 +272,7 @@ def get_descendants_by_edge_type(self, def get_digraph_by_edge_type(self): digraph = nx.DiGraph() - for (u, v, key, c) in self.mm_graph.edges(data=True, keys=True): + for (u, v, key, c) in self.graph.edges(data=True, keys=True): if key == edge_type: digraph.add_edge(u, v) @@ -292,7 +296,7 @@ def get_edges_by_relationship(self, """ edges = [] - for (u, v, key, c) in self.mm_graph.out_edges(node, data=True, keys=True): + for (u, v, key, c) in self.graph.out_edges(node, data=True, keys=True): if key == relationship: edges.append((u, v)) @@ -312,7 +316,7 @@ def get_node_definition(self, node_display_name: str) -> str: if not node_label: return "" - node_definition = self.mm_graph.nodes[node_label]["comment"] + node_definition = self.graph.nodes[node_label]["comment"] return node_definition @@ -349,7 +353,7 @@ def get_node_dependencies(self, dependencies_display_names = [] for req in required_dependencies: - dependencies_display_names.append(self.mm_graph.nodes[req]["displayName"]) + dependencies_display_names.append(self.graph.nodes[req]["displayName"]) return dependencies_display_names @@ -373,9 +377,9 @@ def get_node_label(self, node_display_name: str) -> str: node_display_name ) - if node_class_label in self.mm_graph.nodes: + if node_class_label in self.graph.nodes: node_label = node_class_label - elif node_property_label in self.mm_graph.nodes: + elif node_property_label in self.graph.nodes: node_label = node_property_label else: node_label = "" @@ -400,7 +404,7 @@ def find_class_specific_properties(self, schema_class): # Needs to be refactored no longer be JSONLD specific breakpoint() - schema_uri = self.mm_graph.nodes[schema_class]["uri"] + schema_uri = self.graph.nodes[schema_class]["uri"] properties = [] for record in self.schema["@graph"]: if record["@type"] == "rdf:Property": @@ -442,7 +446,7 @@ def is_node_required(self, node_display_name: str) -> bool: """ node_label = self.get_node_label(node_display_name) - node_required = self.mm_graph.nodes[node_label]["required"] + node_required = self.graph.nodes[node_label]["required"] return node_required diff --git a/schematic/schemas/data_model_jsonld.py b/schematic/schemas/data_model_jsonld.py index 19ed66d5b..ce3c432bf 100644 --- a/schematic/schemas/data_model_jsonld.py +++ b/schematic/schemas/data_model_jsonld.py @@ -36,6 +36,7 @@ def base_jsonld_template(self): def create_object(self, template, node): data_model_relationships = self.dmr.relationships_dictionary + #edge_to_jsonld_keys = {rel_vals['edge_key']: rel_vals['jsonld_key'] for rel, rel_vals in data_model_relationships.items() if rel_vals['edge_rel']} # For each field in template fill out with information from the graph for rel, rel_vals in data_model_relationships.items(): @@ -50,13 +51,51 @@ def create_object(self, template, node): for node_1, node_2, weight in node_edges: # Get 'AtlasView'('relationship':{weight:value}) of edge + # need to convert this relationship back to the JSONLD key_rel node_edge_relationships = self.graph[node_1][node_2] + edge_rel = rel_vals['edge_key'] + + #node_edge_key_rels = [for rel in node_edge_relationships.keys] # Check if key_rel is even one of the relationships for this node pair. - if key_rel in node_edge_relationships: + #if key_rel in node_edge_relationships: + if edge_rel in node_edge_relationships: + for relationship, weight_dict in node_edge_relationships.items(): - if relationship == key_rel: + #if relationship == key_rel: + if relationship == edge_rel: + #if edge_rel == 'parentOf': + # breakpoint() + #if key_rel in ['domainIncludes']: + if edge_rel in ['domainIncludes', 'parentOf']: + #breakpoint() + if node_2 == node: + # Make sure the key is in the template (differs between properties and classes) + if rel_vals['jsonld_key'] in template.keys(): + node_1_id = {'@id': 'bts:'+node_1} + # TODO Move this to a helper function to clear up. + if (isinstance(template[rel_vals['jsonld_key']], list) and + node_1_id not in template[rel_vals['jsonld_key']]): + template[rel_vals['jsonld_key']].append(node_1_id) + else: + template[rel_vals['jsonld_key']] == node_1 + else: + if node_1 == node: + # Make sure the key is in the template (differs between properties and classes) + if rel_vals['jsonld_key'] in template.keys(): + node_2_id = {'@id': 'bts:'+node_2} + # TODO Move this to a helper function to clear up. + if (isinstance(template[rel_vals['jsonld_key']], list) and + node_2_id not in template[rel_vals['jsonld_key']]): + # could possibly keep track of weights here but that might slow things down + template[rel_vals['jsonld_key']].append(node_2_id) + else: + template[rel_vals['jsonld_key']] == node_2 + #elif node_2 == node: + # breakpoint() + ''' if key_rel == 'domainIncludes': + breakpoint() if node_1 == node: # Make sure the key is in the template (differs between properties and classes) if rel_vals['jsonld_key'] in template.keys(): @@ -68,6 +107,7 @@ def create_object(self, template, node): else: template[rel_vals['jsonld_key']] == node_2 else: + breakpoint() if node_2 == node: # Make sure the key is in the template (differs between properties and classes) if rel_vals['jsonld_key'] in template.keys(): @@ -79,14 +119,14 @@ def create_object(self, template, node): template[rel_vals['jsonld_key']].append(node_1_id) else: template[rel_vals['jsonld_key']] == node_1 - else: + ''' + else: # attribute here refers to node attibutes (come up with better name.) node_attribute_name = rel_vals['node_label'] # Get recorded info for current node, and the attribute type node_info = nx.get_node_attributes(self.graph, node_attribute_name)[node] # Add this information to the template template[rel_vals['jsonld_key']] = node_info - # Clean up template template = self.clean_template(template=template, data_model_relationships=data_model_relationships, @@ -136,6 +176,7 @@ def reorder_entries(self, template): key = [k for k, v in data_model_relationships.items() if jsonld_key == v['jsonld_key']][0] # TODO: # Get edge weights for values in the list. + #breakpoint() if data_model_relationships[key]['jsonld_direction'] == 'out': #use outedges original_edge_weights_dict = {attached_node:self.graph[template_node][attached_node][key]['weight'] diff --git a/schematic/schemas/data_model_relationships.py b/schematic/schemas/data_model_relationships.py index db231f0ef..84ab212d6 100644 --- a/schematic/schemas/data_model_relationships.py +++ b/schematic/schemas/data_model_relationships.py @@ -47,7 +47,7 @@ def define_data_model_relationships(self) -> Dict: Default in. Out domainIncludes. TODO: - - Use class inheritance to set up relationships. + - Use class inheritance to set up """ map_data_model_relationships = { @@ -86,7 +86,7 @@ def define_data_model_relationships(self) -> Dict: 'jsonld_key': 'schema:rangeIncludes', 'csv_header': 'Valid Values', 'edge_key': 'rangeValue', - 'jsonld_direction': 'in', + 'jsonld_direction': 'out', 'edge_dir': 'out', 'type': list, 'edge_rel': True, @@ -96,8 +96,8 @@ def define_data_model_relationships(self) -> Dict: 'jsonld_key': 'sms:requiresDependency', 'csv_header': 'DependsOn', 'edge_key': 'requiresDependency', - 'jsonld_direction': 'in', - 'edge_dir': 'in', + 'jsonld_direction': 'out', + 'edge_dir': 'out', 'type': list, 'edge_rel': True, 'required_header': True, @@ -106,8 +106,8 @@ def define_data_model_relationships(self) -> Dict: 'jsonld_key': 'sms:requiresComponent', 'csv_header': 'DependsOn Component', 'edge_key': 'requiresComponent', - 'jsonld_direction': 'in', - 'edge_dir': 'in', + 'jsonld_direction': 'out', + 'edge_dir': 'out', 'type': list, 'edge_rel': True, 'required_header': True, @@ -127,9 +127,9 @@ def define_data_model_relationships(self) -> Dict: 'jsonld_key': 'rdfs:subClassOf', 'csv_header': 'Parent', 'edge_key': 'parentOf', - 'jsonld_direction': 'in', - 'edge_dir': 'in', - 'jsonld_default': [{"@id": "schema:Thing"}], + 'jsonld_direction': 'out', + 'edge_dir': 'out', + 'jsonld_default': [{"@id": "bts:Thing"}], 'type': list, 'edge_rel': True, 'required_header': True, @@ -138,8 +138,8 @@ def define_data_model_relationships(self) -> Dict: 'jsonld_key': 'sms:validationRules', 'csv_header': 'Validation Rules', 'node_label': 'validationRules', - 'jsonld_direction': 'in', - 'edge_dir': 'in', + 'jsonld_direction': 'out', + 'edge_dir': 'out', 'jsonld_default': [], 'type': list, 'edge_rel': False, @@ -151,8 +151,8 @@ def define_data_model_relationships(self) -> Dict: 'jsonld_key': 'schema:domainIncludes', 'csv_header': 'Properties', 'edge_key': 'domainValue', - 'jsonld_direction': 'out', - 'edge_dir': 'out', + 'jsonld_direction': 'in', + 'edge_dir': 'in', 'type': list, 'edge_rel': True, 'required_header': True, From d1fbeb0fa46f9a3468b42ff055a078f0a695b9df Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 31 Jul 2023 18:03:59 -0700 Subject: [PATCH 032/239] finish fixing jsonld to work with flipped edges that match dev --- schematic/schemas/data_model_edges.py | 2 +- schematic/schemas/data_model_graph.py | 1 - schematic/schemas/data_model_jsonld.py | 24 +++++++++---------- schematic/schemas/data_model_relationships.py | 3 ++- 4 files changed, 15 insertions(+), 15 deletions(-) diff --git a/schematic/schemas/data_model_edges.py b/schematic/schemas/data_model_edges.py index f1ab341a5..351d2316a 100644 --- a/schematic/schemas/data_model_edges.py +++ b/schematic/schemas/data_model_edges.py @@ -45,7 +45,7 @@ def generate_edge(self, G, node, all_node_dict, data_model, edge_relationships): # Add additional valid value edges if key == 'rangeIncludes': # Add this relationships for classes. - G.add_edge(all_node_dict[attribute_display_name]['label'], all_node_dict[node]['label'], key=edge_key, weight=weight) + G.add_edge(all_node_dict[attribute_display_name]['label'], all_node_dict[node]['label'], key='parentOf', weight=weight) return G diff --git a/schematic/schemas/data_model_graph.py b/schematic/schemas/data_model_graph.py index 18ef74501..9dd7a2b52 100644 --- a/schematic/schemas/data_model_graph.py +++ b/schematic/schemas/data_model_graph.py @@ -225,7 +225,6 @@ def get_descendants_by_edge_type(self, """ root_descendants = nx.descendants(self.graph, source_node) - breakpoint() subgraph_nodes = list(root_descendants) subgraph_nodes.append(source_node) diff --git a/schematic/schemas/data_model_jsonld.py b/schematic/schemas/data_model_jsonld.py index ce3c432bf..1feb09f4c 100644 --- a/schematic/schemas/data_model_jsonld.py +++ b/schematic/schemas/data_model_jsonld.py @@ -54,6 +54,8 @@ def create_object(self, template, node): # need to convert this relationship back to the JSONLD key_rel node_edge_relationships = self.graph[node_1][node_2] edge_rel = rel_vals['edge_key'] + + #node_edge_key_rels = [for rel in node_edge_relationships.keys] @@ -64,9 +66,7 @@ def create_object(self, template, node): for relationship, weight_dict in node_edge_relationships.items(): #if relationship == key_rel: if relationship == edge_rel: - #if edge_rel == 'parentOf': - # breakpoint() - #if key_rel in ['domainIncludes']: + if edge_rel in ['domainIncludes', 'parentOf']: #breakpoint() if node_2 == node: @@ -172,25 +172,27 @@ def reorder_entries(self, template): for jsonld_key, entry in template.items(): #if the entry is of type list and theres more than one value in the list attempt to reorder if isinstance(entry, list) and len(entry)>1: - # Get relationship key from JSONLD Key: - key = [k for k, v in data_model_relationships.items() if jsonld_key == v['jsonld_key']][0] + # Get edge key from data_model_relationships using the jsonld_key: + key, edge_key = [(k, v['edge_key']) for k, v in data_model_relationships.items() if jsonld_key == v['jsonld_key']][0] # TODO: # Get edge weights for values in the list. - #breakpoint() + if data_model_relationships[key]['jsonld_direction'] == 'out': #use outedges - original_edge_weights_dict = {attached_node:self.graph[template_node][attached_node][key]['weight'] + + original_edge_weights_dict = {attached_node:self.graph[template_node][attached_node][edge_key]['weight'] for template_node, attached_node in self.graph.out_edges(template_id) - if key in self.graph[template_node][attached_node] + if edge_key in self.graph[template_node][attached_node] } else: #use inedges - original_edge_weights_dict = {attached_node:self.graph[attached_node][template_node][key]['weight'] + original_edge_weights_dict = {attached_node:self.graph[attached_node][template_node][edge_key]['weight'] for attached_node, template_node in self.graph.in_edges(template_id) - if key in self.graph[attached_node][template_node] + if edge_key in self.graph[attached_node][template_node] } # TODO: MOVE TO HELPER + # would topological sort work here? sorted_edges = list(dict(sorted(original_edge_weights_dict.items(), key=lambda item: item[1])).keys()) edge_weights_dict={edge:i for i, edge in enumerate(sorted_edges)} ordered_edges = [0]*len(edge_weights_dict.keys()) @@ -254,10 +256,8 @@ def generate_jsonld_object(self): for node in self.graph.nodes: if node in properties: obj = self.create_object(template = self.property_template(), node = node) - #obj = self.create_object_optimized(template=self.property_template(), node=node) else: obj = self.create_object(template = self.class_template(), node = node) - #obj = self.create_object_optimized(template=self.class_template(), node=node) self.json_ld_object['@graph'].append(obj) return self.json_ld_object diff --git a/schematic/schemas/data_model_relationships.py b/schematic/schemas/data_model_relationships.py index 84ab212d6..e2cb9bba6 100644 --- a/schematic/schemas/data_model_relationships.py +++ b/schematic/schemas/data_model_relationships.py @@ -48,6 +48,7 @@ def define_data_model_relationships(self) -> Dict: Out domainIncludes. TODO: - Use class inheritance to set up + - Check 'subClassOf' edge_dir """ map_data_model_relationships = { @@ -127,7 +128,7 @@ def define_data_model_relationships(self) -> Dict: 'jsonld_key': 'rdfs:subClassOf', 'csv_header': 'Parent', 'edge_key': 'parentOf', - 'jsonld_direction': 'out', + 'jsonld_direction': 'in', 'edge_dir': 'out', 'jsonld_default': [{"@id": "bts:Thing"}], 'type': list, From ab7cab1387c34e595509a766f2cfce1669dcf451 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 31 Jul 2023 18:04:37 -0700 Subject: [PATCH 033/239] add json validation code --- schematic/schemas/data_model_json_schema.py | 274 +++++++++++++++++++- 1 file changed, 272 insertions(+), 2 deletions(-) diff --git a/schematic/schemas/data_model_json_schema.py b/schematic/schemas/data_model_json_schema.py index 6afdfe9b4..98c6a2748 100644 --- a/schematic/schemas/data_model_json_schema.py +++ b/schematic/schemas/data_model_json_schema.py @@ -1,8 +1,278 @@ class DataModelJSONSchema: def __init__(): - def get_json_validation_schema(): + def get_json_validation_schema(self, source_node: str, schema_name: str, graph: Nx.MultiDiGraph) -> Dict: ''' A refactor of get_json_schema_requirements() from the schema generator. - ''' \ No newline at end of file + Consolidated method that aims to gather dependencies and value constraints across terms / nodes in a schema.org schema and store them in a jsonschema /JSON Schema schema. + + It does so for any given node in the schema.org schema (recursively) using the given node as starting point in the following manner: + 1) Find all the nodes / terms this node depends on (which are required as "additional metadata" given this node is "required"). + 2) Find all the allowable metadata values / nodes that can be assigned to a particular node (if such a constraint is specified on the schema). + + Args: + source_node: Node from which we can start recursive dependancy traversal (as mentioned above). + schema_name: Name assigned to JSON-LD schema (to uniquely identify it via URI when it is hosted on the Internet). + + Returns: + JSON Schema as a dictionary. + ''' + json_schema = { + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "http://example.com/" + schema_name, + "title": schema_name, + "type": "object", + "properties": {}, + "required": [], + "allOf": [], + } + + # get graph corresponding to data model schema + #mm_graph = self.se.get_nx_schema() + + nodes_to_process = ( + [] + ) # list of nodes to be checked for dependencies, starting with the source node + processed_nodes = ( + [] + ) # keep of track of nodes whose dependencies have been processed + reverse_dependencies = ( + {} + ) # maintain a map between conditional nodes and their dependencies (reversed) -- {dependency : conditional_node} + range_domain_map = ( + {} + ) # maintain a map between range nodes and their domain nodes {range_value : domain_value} + # the domain node is very likely the parentof ("parentOf" relationship) of the range node + + root_dependencies = self.get_adjacent_nodes_by_relationship( + source_node, self.requires_dependency_relationship + ) + + # if root_dependencies is empty it means that a class with name 'source_node' exists + # in the schema, but it is not a valid component + if not root_dependencies: + raise ValueError(f"'{source_node}' is not a valid component in the schema.") + + nodes_to_process += root_dependencies + + process_node = nodes_to_process.pop(0) + + while process_node: + + if not process_node in processed_nodes: + # node is being processed + node_is_processed = True + + node_range = self.get_adjacent_nodes_by_relationship( + process_node, self.range_value_relationship + ) + + # get node range display name + node_range_d = self.get_nodes_display_names(node_range, graph) + + node_dependencies = self.get_adjacent_nodes_by_relationship( + process_node, self.requires_dependency_relationship + ) + + # get process node display name + node_display_name = graph.nodes[process_node]["displayName"] + + # updating map between node and node's valid values + for n in node_range_d: + if not n in range_domain_map: + range_domain_map[n] = [] + range_domain_map[n].append(node_display_name) + + # can this node be map to the empty set (if required no; if not required yes) + # TODO: change "required" to different term, required may be a bit misleading (i.e. is the node required in the schema) + node_required = self.is_node_required(process_node, graph) + + # get any additional validation rules associated with this node (e.g. can this node be mapped to a list of other nodes) + node_validation_rules = self.get_node_validation_rules( + node_display_name + ) + + if node_display_name in reverse_dependencies: + # if node has conditionals set schema properties and conditional dependencies + # set schema properties + if node_range: + # if process node has valid value range set it in schema properties + schema_valid_vals = self.get_range_schema( + node_range_d, node_display_name, blank=True + ) + + if node_validation_rules: + # if this node has extra validation rules process them + # TODO: abstract this into its own validation rule constructor/generator module/class + if rule_in_rule_list("list", node_validation_rules): + # if this node can be mapped to a list of nodes + # set its schema accordingly + schema_valid_vals = self.get_array_schema( + node_range_d, node_display_name, blank=True + ) + + else: + # otherwise, by default allow any values + schema_valid_vals = {node_display_name: {}} + + json_schema["properties"].update(schema_valid_vals) + + # set schema conditional dependencies + for node in reverse_dependencies[node_display_name]: + # set all of the conditional nodes that require this process node + + # get node domain if any + # ow this node is a conditional requirement + if node in range_domain_map: + domain_nodes = range_domain_map[node] + conditional_properties = {} + + for domain_node in domain_nodes: + + # set range of conditional node schema + conditional_properties.update( + { + "properties": {domain_node: {"enum": [node]}}, + "required": [domain_node], + } + ) + + # given node conditional are satisfied, this process node (which is dependent on these conditionals) has to be set or not depending on whether it is required + if node_range: + dependency_properties = self.get_range_schema( + node_range_d, + node_display_name, + blank=not node_required, + ) + + if node_validation_rules: + if rule_in_rule_list("list", node_validation_rules): + # TODO: get_range_schema and get_range_schema have similar behavior - combine in one module + dependency_properties = self.get_array_schema( + node_range_d, + node_display_name, + blank=not node_required, + ) + + else: + if node_required: + dependency_properties = self.get_non_blank_schema( + node_display_name + ) + else: + dependency_properties = {node_display_name: {}} + schema_conditional_dependencies = { + "if": conditional_properties, + "then": { + "properties": dependency_properties, + "required": [node_display_name], + }, + } + + # update conditional-dependency rules in json schema + json_schema["allOf"].append( + schema_conditional_dependencies + ) + + else: + # node doesn't have conditionals + if node_required: + if node_range: + schema_valid_vals = self.get_range_schema( + node_range_d, node_display_name, blank=False + ) + + if node_validation_rules: + # If there are valid values AND they are expected to be a list, + # reformat the Valid Values. + if rule_in_rule_list("list", node_validation_rules): + schema_valid_vals = self.get_array_schema( + node_range_d, node_display_name, blank=False + ) + else: + schema_valid_vals = self.get_non_blank_schema( + node_display_name + ) + + json_schema["properties"].update(schema_valid_vals) + # add node to required fields + json_schema["required"] += [node_display_name] + + elif process_node in root_dependencies: + # node doesn't have conditionals and is not required; it belongs in the schema only if it is in root's dependencies + + if node_range: + schema_valid_vals = self.get_range_schema( + node_range_d, node_display_name, blank=True + ) + + if node_validation_rules: + if rule_in_rule_list("list", node_validation_rules): + schema_valid_vals = self.get_array_schema( + node_range_d, node_display_name, blank=True + ) + + else: + schema_valid_vals = {node_display_name: {}} + + json_schema["properties"].update(schema_valid_vals) + + else: + # node doesn't have conditionals and it is not required and it is not a root dependency + # the node doesn't belong in the schema + # do not add to processed nodes since its conditional may be traversed at a later iteration (though unlikely for most schemas we consider) + node_is_processed = False + + # add process node as a conditional to its dependencies + node_dependencies_d = self.get_nodes_display_names( + node_dependencies, graph + ) + + for dep in node_dependencies_d: + if not dep in reverse_dependencies: + reverse_dependencies[dep] = [] + + reverse_dependencies[dep].append(node_display_name) + + # add nodes found as dependencies and range of this processed node + # to the list of nodes to be processed + nodes_to_process += node_range + nodes_to_process += node_dependencies + + # if the node is processed add it to the processed nodes set + if node_is_processed: + processed_nodes.append(process_node) + + # if the list of nodes to process is not empty + # set the process node the next remaining node to process + if nodes_to_process: + process_node = nodes_to_process.pop(0) + else: + # no more nodes to process + # exit the loop + break + + logger.info("JSON schema successfully generated from schema.org schema!") + + # if no conditional dependencies were added we can't have an empty 'AllOf' block in the schema, so remove it + if not json_schema["allOf"]: + del json_schema["allOf"] + + # If no config value and SchemaGenerator was initialized with + # a JSON-LD path, construct + if self.jsonld_path is not None: + prefix = self.jsonld_path_root + prefix_root, prefix_ext = os.path.splitext(prefix) + if prefix_ext == ".model": + prefix = prefix_root + json_schema_log_file = f"{prefix}.{source_node}.schema.json" + + logger.info( + "The JSON schema file can be inspected by setting the following " + "nested key in the configuration: (model > input > log_location)." + ) + + logger.info(f"JSON schema file log stored as {json_schema_log_file}") + + return json_schema \ No newline at end of file From c8da3524d1bb1136d0b4e568c81681dea858de7e Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 2 Aug 2023 15:38:16 -0700 Subject: [PATCH 034/239] changes to allow json schema validation and manifest generation --- schematic/manifest/generator.py | 42 +- schematic/schemas/data_model_graph.py | 165 +++-- schematic/schemas/data_model_json_schema.py | 637 ++++++++++-------- schematic/schemas/data_model_jsonld.py | 88 +-- schematic/schemas/data_model_nodes.py | 12 +- schematic/schemas/data_model_parser.py | 76 ++- schematic/schemas/data_model_relationships.py | 4 +- 7 files changed, 613 insertions(+), 411 deletions(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index eb68be3fb..eb113ba17 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -1,6 +1,7 @@ from collections import OrderedDict import json import logging +import networkx as nx from openpyxl.styles import Font, Alignment, PatternFill from openpyxl import load_workbook from openpyxl.utils.dataframe import dataframe_to_rows @@ -11,7 +12,8 @@ from tempfile import NamedTemporaryFile from typing import Dict, List, Optional, Tuple, Union -from schematic.schemas.generator import SchemaGenerator +from schematic.schemas.data_model_graph import DataModelGraphExporer +from schematic.schemas.data_model_json_schema import DataModelJSONSchema from schematic.utils.google_api_utils import ( execute_google_api_requests, build_service_account_creds, @@ -35,6 +37,7 @@ class ManifestGenerator(object): def __init__( self, path_to_json_ld: str, # JSON-LD file to be used for generating the manifest + graph: nx.MultiDiGraph, alphabetize_valid_values: str = 'ascending', title: str = None, # manifest sheet title root: str = None, @@ -53,6 +56,11 @@ def __init__( # google service credentials object self.creds = services_creds["creds"] + # Path to jsonld + self.jsonld_path = path_to_json_ld + # Graph + self.graph = graph + # schema root self.root = root @@ -74,8 +82,8 @@ def __init__( "when there is no manifest file for the dataset in question." ) - # SchemaGenerator() object - self.sg = SchemaGenerator(path_to_json_ld) + # Data Model Explorer object + self.DME = DataModelGraphExporer(self.graph) # additional metadata to add to manifest self.additional_metadata = additional_metadata @@ -83,7 +91,7 @@ def __init__( # Determine whether current data type is file-based is_file_based = False if self.root: - is_file_based = "Filename" in self.sg.get_node_dependencies(self.root) + is_file_based = "Filename" in self.DME.get_node_dependencies(self.root) self.is_file_based = is_file_based def _attribute_to_letter(self, attribute, manifest_fields): @@ -352,13 +360,16 @@ def _get_json_schema(self, json_schema_filepath: str) -> Dict: json_schema_filepath(str): path to json schema file Returns: Dictionary, containing portions of the json schema + TODO: Do we even allow people to provide a json_schema_filepath anyore? """ if not json_schema_filepath: # if no json schema is provided; there must be # schema explorer defined for schema.org schema # o.w. this will throw an error # TODO: catch error - json_schema = self.sg.get_json_schema_requirements(self.root, self.title) + data_model_js = DataModelJSONSchema(jsonld_path=self.jsonld_path, graph=self.graph) + json_schema = data_model_js.get_json_validation_schema(source_node=self.root, schema_name=self.title) + breakpoint() else: with open(json_schema_filepath) as jsonfile: json_schema = json.load(jsonfile) @@ -802,9 +813,9 @@ def _request_row_format(self, i, req): notes_body["requests"] (dict): with information on note to add to the column header. This notes body will be added to a request. """ - if self.sg.se: + if self.DME: # get node definition - note = self.sg.get_node_definition(req) + note = self.DME.get_node_comment(node_display_name = req) notes_body = { "requests": [ @@ -1003,8 +1014,7 @@ def _dependency_formatting( dependency_formatting_body = {"requests": []} for j, val_dep in enumerate(val_dependencies): is_required = False - - if self.sg.is_node_required(val_dep): + if self.DME.get_node_required(node_display_name=val_dep): is_required = True else: is_required = False @@ -1047,13 +1057,13 @@ def _request_dependency_formatting( for req_val in req_vals: # get this required/valid value's node label in schema, based on display name (i.e. shown to the user in a dropdown to fill in) req_val = req_val["userEnteredValue"] - req_val_node_label = self.sg.get_node_label(req_val) + req_val_node_label = self.DME.get_node_label(req_val) if not req_val_node_label: # if this node is not in the graph # continue - there are no dependencies for it continue # check if this required/valid value has additional dependency attributes - val_dependencies = self.sg.get_node_dependencies( + val_dependencies = self.DME.get_node_dependencies( req_val_node_label, schema_ordered=False ) @@ -1106,7 +1116,7 @@ def _create_requests_body( requests_body["requests"] = [] for i, req in enumerate(ordered_metadata_fields[0]): # Gather validation rules and valid values for attribute. - validation_rules = self.sg.get_node_validation_rules(req) + validation_rules = self.DME.get_node_validation_rules(node_display_name=req) # Add regex match validaiton rule to Google Sheets. if validation_rules and sheet_url: @@ -1353,7 +1363,7 @@ def map_annotation_names_to_display_names( pd.DataFrame: Annotations table with updated column headers. """ # Get list of attribute nodes from data model - model_nodes = self.sg.se.get_nx_schema().nodes + model_nodes = self.graph.nodes # Subset annotations to those appearing as a label in the model labels = filter(lambda x: x in model_nodes, annotations.columns) @@ -1524,7 +1534,7 @@ def get_manifest( # Get manifest file associated with given dataset (if applicable) # populate manifest with set of new files (if applicable) - manifest_record = store.updateDatasetManifestFiles(self.sg, datasetId = dataset_id, store = False) + manifest_record = store.updateDatasetManifestFiles(self.DME, datasetId = dataset_id, store = False) # get URL of an empty manifest file created based on schema component empty_manifest_url = self.get_empty_manifest(strict=strict, sheet_url=sheet_url) @@ -1752,9 +1762,9 @@ def sort_manifest_fields(self, manifest_fields, order="schema"): # order manifest fields based on data-model schema if order == "schema": - if self.sg and self.root: + if self.DME and self.root: # get display names of dependencies - dependencies_display_names = self.sg.get_node_dependencies(self.root) + dependencies_display_names = self.DME.get_node_dependencies(self.root) # reorder manifest fields so that root dependencies are first and follow schema order manifest_fields = sorted( diff --git a/schematic/schemas/data_model_graph.py b/schematic/schemas/data_model_graph.py index 9dd7a2b52..6c42d24ce 100644 --- a/schematic/schemas/data_model_graph.py +++ b/schematic/schemas/data_model_graph.py @@ -75,7 +75,7 @@ def __init__(self, attribute_relationships_dict): self.attribute_relationships_dict = attribute_relationships_dict self.dmn = DataModelNodes(self.attribute_relationships_dict) self.dme = DataModelEdges() - self.data_model_relationships = DataModelRelationships() + self.dmr = DataModelRelationships() if not self.attribute_relationships_dict: raise ValueError( @@ -89,7 +89,7 @@ def generate_data_model_graph(self): ''' # Get all relationships with edges - edge_relationships = self.data_model_relationships.define_edge_relationships() + edge_relationships = self.dmr.define_edge_relationships() # Instantiate NetworkX MultiDigraph G = nx.MultiDiGraph() @@ -119,14 +119,30 @@ def __init__(self, Load data model graph as a singleton. ''' self.graph = G + self.dmr = DataModelRelationships() + self.rel_dict = self.dmr.relationships_dictionary + + # TODO: Clean up to create variables within a loop. + # Creating variables here so its cleaner to know all the references at the top of the class + # Get node labels and edge keys for all referenced relationships + # Edge Keys + self.domainIncludes_ek = self.rel_dict['domainIncludes']['edge_key'] + self.reqComp_ek = self.rel_dict['requiresComponent']['edge_key'] + self.reqDep_ek = self.rel_dict['requiresDependency']['edge_key'] + self.subClassOf_ek = self.rel_dict['subClassOf']['edge_key'] + + # Node Labels + self.displayName_nl = self.rel_dict['displayName']['node_label'] + self.comment_nl = self.rel_dict['comment']['node_label'] + self.validationRules_nl = self.rel_dict['validationRules']['node_label'] def find_properties(self): """ - TODO: handle 'domainValue' with relationship edge parameters. """ + properties=[] for node_1, node_2, rel in self.graph.edges: - if rel == 'domainValue': + if rel == self.domainIncludes_ek: properties.append(node_1) properties = set(properties) return properties @@ -161,7 +177,7 @@ def get_adjacent_nodes_by_relationship(self, def get_component_requirements(self, source_component: str, - requires_component_relationship: str = "requiresComponent") -> List[str]: + ) -> List[str]: """Get all components that are associated with a given source component and are required by it. Args: @@ -169,12 +185,13 @@ def get_component_requirements(self, Returns: List of nodes that are descendants from the source component are are related to the source through a specific component relationship. + # Tested """ req_components = list( reversed( self.get_descendants_by_edge_type( - source_component, requires_component_relationship, ordered=True + source_component, self.reqComp_ek, ordered=True ) ) ) @@ -183,7 +200,7 @@ def get_component_requirements(self, def get_component_requirements_graph(self, source_component: str, - requires_component_relationship: str = "requiresComponent") -> nx.DiGraph: + ) -> nx.DiGraph: """Get all components that are associated with a given source component and are required by it; return the components as a dependency graph (i.e. a DAG). Args: @@ -198,7 +215,7 @@ def get_component_requirements_graph(self, # get the subgraph induced on required component nodes req_components_graph = self.get_subgraph_by_edge_type( - self.graph, requires_component_relationship + self.graph, self.reqComp_ek, ).subgraph(req_components) return req_components_graph @@ -222,6 +239,7 @@ def get_descendants_by_edge_type(self, Returns: List of nodes that are descendants from a particular node (sorted / unsorted) + # Tested """ root_descendants = nx.descendants(self.graph, source_node) @@ -301,7 +319,34 @@ def get_edges_by_relationship(self, return edges - def get_node_definition(self, node_display_name: str) -> str: + + + def get_ordered_entry(self, key: str, source_node_label:str): + + # Check if node is in the graph, if not throw an error. + edge_key = self.rel_dict[key]['edge_key'] + if self.rel_dict[key]['jsonld_direction'] == 'out': + #use outedges + + original_edge_weights_dict = {attached_node:self.graph[source_node][attached_node][edge_key]['weight'] + for source_node, attached_node in self.graph.out_edges(source_node_label) + if edge_key in self.graph[source_node][attached_node] + } + else: + #use inedges + original_edge_weights_dict = {attached_node:self.graph[attached_node][source_node][edge_key]['weight'] + for attached_node, source_node in self.graph.in_edges(source_node_label) + if edge_key in self.graph[attached_node][source_node] + } + + sorted_edges = list(dict(sorted(original_edge_weights_dict.items(), key=lambda item: item[1])).keys()) + + return sorted_edges + + # Get values associated with a node + # TODO: make sure all these gets follow the same pattern for clarity + + def get_node_comment(self, node_display_name: str = None, node_label: str= None) -> str: """Get the node definition, i.e., the "comment" associated with a given node display name. Args: @@ -309,21 +354,22 @@ def get_node_definition(self, node_display_name: str) -> str: Returns: Comment associated with node, as a string. + TODO: add to args """ - node_label = self.get_node_label(node_display_name) + if not node_label: + node_label = self.get_node_label(node_display_name) if not node_label: return "" - node_definition = self.graph.nodes[node_label]["comment"] + node_definition = self.graph.nodes[node_label][self.comment_nl] return node_definition def get_node_dependencies(self, source_node: str, display_names: bool = True, - schema_ordered: bool = True, - requires_dependency_relationship: str = "requiresDependency", + schema_ordered: bool = True, ) -> List[str]: """Get the immediate dependencies that are related to a given source node. @@ -338,26 +384,41 @@ def get_node_dependencies(self, List of nodes that are dependent on the source node. """ - # NOTE might not be necessary to move through explore_class in this refactored version. if schema_ordered: # get dependencies in the same order in which they are defined in the schema - required_dependencies = self.explore_class(source_node)["dependencies"] + required_dependencies = self.get_ordered_entry(key=self.reqDep_ek, source_node_label=source_node) else: required_dependencies = self.get_adjacent_nodes_by_relationship( - source_node, self.requires_dependency_relationship - ) + node = source_node, relationship = self.reqDep_ek) if display_names: # get display names of dependencies dependencies_display_names = [] for req in required_dependencies: - dependencies_display_names.append(self.graph.nodes[req]["displayName"]) + dependencies_display_names.append(self.graph.nodes[req][self.displayName_nl]) return dependencies_display_names return required_dependencies + def get_nodes_display_names( + self, node_list: List[str], + ) -> List[str]: + """Get display names associated with the given list of nodes. + + Args: + node_list: List of nodes whose display names we need to retrieve. + + Returns: + List of display names. + """ + node_list_display_names = [ + self.graph.nodes[node][self.displayName_nl] for node in node_list + ] + + return node_list_display_names + def get_node_label(self, node_display_name: str) -> str: """Get the node label for a given display name. @@ -371,9 +432,9 @@ def get_node_label(self, node_display_name: str) -> str: KeyError: If the node cannot be found in the graph. """ - node_class_label = SchemaUtils.get_class_label_from_display_name(node_display_name) - node_property_label = SchemaUtils.get_property_label_from_display_name( - node_display_name + node_class_label = get_class_label_from_display_name(display_name = node_display_name) + node_property_label = get_property_label_from_display_name( + display_name = node_display_name ) if node_class_label in self.graph.nodes: @@ -385,9 +446,47 @@ def get_node_label(self, node_display_name: str) -> str: return node_label - def find_adjacent_child_classes(self, schema_class): + def get_node_required(self, node_display_name: str = None, node_label:str = None) -> bool: + """Check if a given node is required or not. + + Note: The possible options that a node can be associated with -- "required" / "optional". + + Args: + node_display_name: Display name of the node which you want to get the label for. - return self.get_adjacent_nodes_by_relationship(schema_class, "parentOf") + Returns: + True: If the given node is a "required" node. + False: If the given node is not a "required" (i.e., an "optional") node. + """ + if not node_label: + node_label = self.get_node_label(node_display_name) + + rel_node_label = self.rel_dict["required"]["node_label"] + node_required = self.graph.nodes[node_label][rel_node_label] + return node_required + + def get_node_validation_rules(self, node_display_name: str = None, node_label: str = None) -> str: + """Get validation rules associated with a node, + + Args: + node_display_name: Display name of the node which you want to get the label for. + + Returns: + A set of validation rules associated with node, as a list. + """ + if not node_label: + node_label = self.get_node_label(node_display_name) + + if not node_label: + return [] + + node_validation_rules = self.graph.nodes[node_label]["validationRules"] + + return node_validation_rules + + + def find_adjacent_child_classes(self, schema_class): + return self.get_adjacent_nodes_by_relationship(node = schema_class, relationship = self.subClassOf_ek) def find_all_class_properties(self): """ @@ -431,25 +530,9 @@ def find_class_usages(self): """ return - def is_node_required(self, node_display_name: str) -> bool: - """Check if a given node is required or not. - - Note: The possible options that a node can be associated with -- "required" / "optional". - - Args: - node_display_name: Display name of the node which you want to get the label for. - - Returns: - True: If the given node is a "required" node. - False: If the given node is not a "required" (i.e., an "optional") node. - """ - node_label = self.get_node_label(node_display_name) - - node_required = self.graph.nodes[node_label]["required"] - - return node_required + - def explore_class(self): + def explore_class(self, source_node): """ nx specific version of this? This might not be necessary since each nx node should already contain all required information. Put this here for now as a dummy function so this can be explored more. diff --git a/schematic/schemas/data_model_json_schema.py b/schematic/schemas/data_model_json_schema.py index 98c6a2748..638da02ba 100644 --- a/schematic/schemas/data_model_json_schema.py +++ b/schematic/schemas/data_model_json_schema.py @@ -1,278 +1,371 @@ -class DataModelJSONSchema: - def __init__(): +import logging +import networkx as nx +import os +from typing import Any, Dict, Optional, Text, List + +from schematic.schemas.data_model_graph import DataModelGraphExporer +from schematic.schemas.data_model_relationships import DataModelRelationships - def get_json_validation_schema(self, source_node: str, schema_name: str, graph: Nx.MultiDiGraph) -> Dict: +from schematic.utils.validate_utils import rule_in_rule_list +logger = logging.getLogger(__name__) + +class DataModelJSONSchema: + def __init__(self, jsonld_path: str, graph:nx.MultiDiGraph, + ): + self.jsonld_path = jsonld_path + self.graph = graph + self.DME = DataModelGraphExporer(self.graph) + self.dmr = DataModelRelationships() + self.rel_dict = self.dmr.relationships_dictionary + + self.reqDep_ek = self.rel_dict['requiresDependency']['edge_key'] + self.rangeIncludes_ek = self.rel_dict['rangeIncludes']['edge_key'] + self.reqComp_ek = self.rel_dict['requiresComponent']['edge_key'] + + # Node Labels + self.displayName_nl = self.rel_dict['displayName']['node_label'] + + def get_array_schema( + self, node_range: List[str], node_name: str, blank=False + ) -> Dict[str, Dict[str, List[str]]]: + """Add a list of nodes to the "enum" key in a given JSON schema object. + Allow a node to be mapped to any subset of the list + + Args: + node_name: Name of the "main" / "head" key in the JSON schema / object. + node_range: List of nodes to be added to the JSON object. + blank: If True, add empty node to end of node list. + If False, do not add empty node to end of node list. + + Returns: + JSON object with array validation rule. + TODO: used? + """ + + schema_node_range_array = { + node_name: { + "type": "array", + "items": {"enum": node_range + [""] if blank else node_range}, + "maxItems": len(node_range), + } + } + + return schema_node_range_array + + def get_non_blank_schema( + self, node_name: str + ) -> Dict: # can't define heterogenous Dict generic types + """Get a schema rule that does not allow null or empty values. + + Args: + node_name: Name of the node on which the schema rule is to be applied. + + Returns: + Schema rule as a JSON object. + TODO: + Used? + """ + non_blank_schema = {node_name: {"not": {"type": "null"}, "minLength": 1}} + + return non_blank_schema + + def get_range_schema( + self, node_range: List[str], node_name: str, blank=False + ) -> Dict[str, Dict[str, List[str]]]: + """Add a list of nodes to the "enum" key in a given JSON schema object. + + Args: + node_name: Name of the "main" / "head" key in the JSON schema / object. + node_range: List of nodes to be added to the JSON object. + blank: If True, add empty node to end of node list. + If False, do not add empty node to end of node list. + + Returns: + JSON object with nodes. + TODO: + Used? + """ + if blank: + schema_node_range = {node_name: {"enum": node_range + [""]}} + else: + schema_node_range = {node_name: {"enum": node_range}} + + return schema_node_range + + def get_json_validation_schema(self, source_node: str, schema_name: str) -> Dict: ''' A refactor of get_json_schema_requirements() from the schema generator. Consolidated method that aims to gather dependencies and value constraints across terms / nodes in a schema.org schema and store them in a jsonschema /JSON Schema schema. - It does so for any given node in the schema.org schema (recursively) using the given node as starting point in the following manner: - 1) Find all the nodes / terms this node depends on (which are required as "additional metadata" given this node is "required"). - 2) Find all the allowable metadata values / nodes that can be assigned to a particular node (if such a constraint is specified on the schema). - - Args: - source_node: Node from which we can start recursive dependancy traversal (as mentioned above). - schema_name: Name assigned to JSON-LD schema (to uniquely identify it via URI when it is hosted on the Internet). - - Returns: - JSON Schema as a dictionary. - ''' - json_schema = { - "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "http://example.com/" + schema_name, - "title": schema_name, - "type": "object", - "properties": {}, - "required": [], - "allOf": [], - } - - # get graph corresponding to data model schema - #mm_graph = self.se.get_nx_schema() - - nodes_to_process = ( - [] - ) # list of nodes to be checked for dependencies, starting with the source node - processed_nodes = ( - [] - ) # keep of track of nodes whose dependencies have been processed - reverse_dependencies = ( - {} - ) # maintain a map between conditional nodes and their dependencies (reversed) -- {dependency : conditional_node} - range_domain_map = ( - {} - ) # maintain a map between range nodes and their domain nodes {range_value : domain_value} - # the domain node is very likely the parentof ("parentOf" relationship) of the range node - - root_dependencies = self.get_adjacent_nodes_by_relationship( - source_node, self.requires_dependency_relationship - ) - - # if root_dependencies is empty it means that a class with name 'source_node' exists - # in the schema, but it is not a valid component - if not root_dependencies: - raise ValueError(f"'{source_node}' is not a valid component in the schema.") - - nodes_to_process += root_dependencies - - process_node = nodes_to_process.pop(0) - - while process_node: - - if not process_node in processed_nodes: - # node is being processed - node_is_processed = True - - node_range = self.get_adjacent_nodes_by_relationship( - process_node, self.range_value_relationship - ) - - # get node range display name - node_range_d = self.get_nodes_display_names(node_range, graph) - - node_dependencies = self.get_adjacent_nodes_by_relationship( - process_node, self.requires_dependency_relationship - ) - - # get process node display name - node_display_name = graph.nodes[process_node]["displayName"] - - # updating map between node and node's valid values - for n in node_range_d: - if not n in range_domain_map: - range_domain_map[n] = [] - range_domain_map[n].append(node_display_name) - - # can this node be map to the empty set (if required no; if not required yes) - # TODO: change "required" to different term, required may be a bit misleading (i.e. is the node required in the schema) - node_required = self.is_node_required(process_node, graph) - - # get any additional validation rules associated with this node (e.g. can this node be mapped to a list of other nodes) - node_validation_rules = self.get_node_validation_rules( - node_display_name - ) - - if node_display_name in reverse_dependencies: - # if node has conditionals set schema properties and conditional dependencies - # set schema properties - if node_range: - # if process node has valid value range set it in schema properties - schema_valid_vals = self.get_range_schema( - node_range_d, node_display_name, blank=True - ) - - if node_validation_rules: - # if this node has extra validation rules process them - # TODO: abstract this into its own validation rule constructor/generator module/class - if rule_in_rule_list("list", node_validation_rules): - # if this node can be mapped to a list of nodes - # set its schema accordingly - schema_valid_vals = self.get_array_schema( - node_range_d, node_display_name, blank=True - ) - - else: - # otherwise, by default allow any values - schema_valid_vals = {node_display_name: {}} - - json_schema["properties"].update(schema_valid_vals) - - # set schema conditional dependencies - for node in reverse_dependencies[node_display_name]: - # set all of the conditional nodes that require this process node - - # get node domain if any - # ow this node is a conditional requirement - if node in range_domain_map: - domain_nodes = range_domain_map[node] - conditional_properties = {} - - for domain_node in domain_nodes: - - # set range of conditional node schema - conditional_properties.update( - { - "properties": {domain_node: {"enum": [node]}}, - "required": [domain_node], - } - ) - - # given node conditional are satisfied, this process node (which is dependent on these conditionals) has to be set or not depending on whether it is required - if node_range: - dependency_properties = self.get_range_schema( - node_range_d, - node_display_name, - blank=not node_required, - ) - - if node_validation_rules: - if rule_in_rule_list("list", node_validation_rules): - # TODO: get_range_schema and get_range_schema have similar behavior - combine in one module - dependency_properties = self.get_array_schema( - node_range_d, - node_display_name, - blank=not node_required, - ) - - else: - if node_required: - dependency_properties = self.get_non_blank_schema( - node_display_name - ) - else: - dependency_properties = {node_display_name: {}} - schema_conditional_dependencies = { - "if": conditional_properties, - "then": { - "properties": dependency_properties, - "required": [node_display_name], - }, - } - - # update conditional-dependency rules in json schema - json_schema["allOf"].append( - schema_conditional_dependencies - ) - - else: - # node doesn't have conditionals - if node_required: - if node_range: - schema_valid_vals = self.get_range_schema( - node_range_d, node_display_name, blank=False - ) - - if node_validation_rules: - # If there are valid values AND they are expected to be a list, - # reformat the Valid Values. - if rule_in_rule_list("list", node_validation_rules): - schema_valid_vals = self.get_array_schema( - node_range_d, node_display_name, blank=False - ) - else: - schema_valid_vals = self.get_non_blank_schema( - node_display_name - ) - - json_schema["properties"].update(schema_valid_vals) - # add node to required fields - json_schema["required"] += [node_display_name] - - elif process_node in root_dependencies: - # node doesn't have conditionals and is not required; it belongs in the schema only if it is in root's dependencies - - if node_range: - schema_valid_vals = self.get_range_schema( - node_range_d, node_display_name, blank=True - ) - - if node_validation_rules: - if rule_in_rule_list("list", node_validation_rules): - schema_valid_vals = self.get_array_schema( - node_range_d, node_display_name, blank=True - ) - - else: - schema_valid_vals = {node_display_name: {}} - - json_schema["properties"].update(schema_valid_vals) - - else: - # node doesn't have conditionals and it is not required and it is not a root dependency - # the node doesn't belong in the schema - # do not add to processed nodes since its conditional may be traversed at a later iteration (though unlikely for most schemas we consider) - node_is_processed = False - - # add process node as a conditional to its dependencies - node_dependencies_d = self.get_nodes_display_names( - node_dependencies, graph - ) - - for dep in node_dependencies_d: - if not dep in reverse_dependencies: - reverse_dependencies[dep] = [] - - reverse_dependencies[dep].append(node_display_name) - - # add nodes found as dependencies and range of this processed node - # to the list of nodes to be processed - nodes_to_process += node_range - nodes_to_process += node_dependencies - - # if the node is processed add it to the processed nodes set - if node_is_processed: - processed_nodes.append(process_node) - - # if the list of nodes to process is not empty - # set the process node the next remaining node to process - if nodes_to_process: - process_node = nodes_to_process.pop(0) - else: - # no more nodes to process - # exit the loop - break - - logger.info("JSON schema successfully generated from schema.org schema!") - - # if no conditional dependencies were added we can't have an empty 'AllOf' block in the schema, so remove it - if not json_schema["allOf"]: - del json_schema["allOf"] - - # If no config value and SchemaGenerator was initialized with - # a JSON-LD path, construct - if self.jsonld_path is not None: - prefix = self.jsonld_path_root - prefix_root, prefix_ext = os.path.splitext(prefix) - if prefix_ext == ".model": - prefix = prefix_root - json_schema_log_file = f"{prefix}.{source_node}.schema.json" - - logger.info( - "The JSON schema file can be inspected by setting the following " - "nested key in the configuration: (model > input > log_location)." - ) - - logger.info(f"JSON schema file log stored as {json_schema_log_file}") - - return json_schema \ No newline at end of file + It does so for any given node in the schema.org schema (recursively) using the given node as starting point in the following manner: + 1) Find all the nodes / terms this node depends on (which are required as "additional metadata" given this node is "required"). + 2) Find all the allowable metadata values / nodes that can be assigned to a particular node (if such a constraint is specified on the schema). + + Args: + source_node: Node from which we can start recursive dependancy traversal (as mentioned above). + schema_name: Name assigned to JSON-LD schema (to uniquely identify it via URI when it is hosted on the Internet). + + Returns: + JSON Schema as a dictionary. + ''' + json_schema = { + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "http://example.com/" + schema_name, + "title": schema_name, + "type": "object", + "properties": {}, + "required": [], + "allOf": [], + } + + # get graph corresponding to data model schema + #mm_graph = self.se.get_nx_schema() + + nodes_to_process = ( + [] + ) # list of nodes to be checked for dependencies, starting with the source node + processed_nodes = ( + [] + ) # keep of track of nodes whose dependencies have been processed + reverse_dependencies = ( + {} + ) # maintain a map between conditional nodes and their dependencies (reversed) -- {dependency : conditional_node} + range_domain_map = ( + {} + ) # maintain a map between range nodes and their domain nodes {range_value : domain_value} + # the domain node is very likely the parentof ("parentOf" relationship) of the range node + + root_dependencies = self.DME.get_adjacent_nodes_by_relationship( + node = source_node, relationship=self.reqDep_ek, + ) + + # if root_dependencies is empty it means that a class with name 'source_node' exists + # in the schema, but it is not a valid component + if not root_dependencies: + raise ValueError(f"'{source_node}' is not a valid component in the schema.") + + nodes_to_process += root_dependencies + + process_node = nodes_to_process.pop(0) + + while process_node: + + if not process_node in processed_nodes: + # node is being processed + node_is_processed = True + + node_range = self.DME.get_adjacent_nodes_by_relationship( + node=process_node, relationship=self.rangeIncludes_ek, + ) + + + # get node range display name + node_range_d = self.DME.get_nodes_display_names(node_list=node_range) + + node_dependencies = self.DME.get_adjacent_nodes_by_relationship( + node=process_node, relationship=self.reqDep_ek, + ) + + # get process node display name + node_display_name = self.graph.nodes[process_node][self.displayName_nl] + + # updating map between node and node's valid values + for n in node_range_d: + if not n in range_domain_map: + range_domain_map[n] = [] + range_domain_map[n].append(node_display_name) + + # can this node be map to the empty set (if required no; if not required yes) + # TODO: change "required" to different term, required may be a bit misleading (i.e. is the node required in the schema) + node_required = self.DME.get_node_required(node_label=process_node) + + # get any additional validation rules associated with this node (e.g. can this node be mapped to a list of other nodes) + node_validation_rules = self.DME.get_node_validation_rules( + node_display_name = node_display_name + ) + + if node_display_name in reverse_dependencies: + # if node has conditionals set schema properties and conditional dependencies + # set schema properties + if node_range: + # if process node has valid value range set it in schema properties + schema_valid_vals = self.get_range_schema( + node_range = node_range_d, node_name=node_display_name, blank=True + ) + + if node_validation_rules: + # if this node has extra validation rules process them + # TODO: abstract this into its own validation rule constructor/generator module/class + if rule_in_rule_list("list", node_validation_rules): + # if this node can be mapped to a list of nodes + # set its schema accordingly + schema_valid_vals = self.get_array_schema( + node_range=node_range_d, node_name=node_display_name, blank=True + ) + + else: + # otherwise, by default allow any values + schema_valid_vals = {node_display_name: {}} + + json_schema["properties"].update(schema_valid_vals) + + # set schema conditional dependencies + for node in reverse_dependencies[node_display_name]: + # set all of the conditional nodes that require this process node + + # get node domain if any + # ow this node is a conditional requirement + if node in range_domain_map: + domain_nodes = range_domain_map[node] + conditional_properties = {} + + for domain_node in domain_nodes: + + # set range of conditional node schema + conditional_properties.update( + { + "properties": {domain_node: {"enum": [node]}}, + "required": [domain_node], + } + ) + + # given node conditional are satisfied, this process node (which is dependent on these conditionals) has to be set or not depending on whether it is required + if node_range: + dependency_properties = self.get_range_schema( + node_range=node_range_d, + node_name=node_display_name, + blank=not node_required, + ) + + if node_validation_rules: + if rule_in_rule_list("list", node_validation_rules): + # TODO: get_range_schema and get_range_schema have similar behavior - combine in one module + dependency_properties = self.get_array_schema( + node_range=node_range_d, + node_name=node_display_name, + blank=not node_required, + ) + + else: + if node_required: + dependency_properties = self.get_non_blank_schema( + node_name=node_display_name + ) + else: + dependency_properties = {node_display_name: {}} + schema_conditional_dependencies = { + "if": conditional_properties, + "then": { + "properties": dependency_properties, + "required": [node_display_name], + }, + } + + # update conditional-dependency rules in json schema + json_schema["allOf"].append( + schema_conditional_dependencies + ) + + else: + # node doesn't have conditionals + if node_required: + if node_range: + schema_valid_vals = self.get_range_schema( + node_range=node_range_d, node_name=node_display_name, blank=False + ) + + if node_validation_rules: + # If there are valid values AND they are expected to be a list, + # reformat the Valid Values. + if rule_in_rule_list("list", node_validation_rules): + schema_valid_vals = self.get_array_schema( + node_range=node_range_d, node_name=node_display_name, blank=False + ) + else: + schema_valid_vals = self.get_non_blank_schema( + node_name=node_display_name + ) + + json_schema["properties"].update(schema_valid_vals) + # add node to required fields + json_schema["required"] += [node_display_name] + + elif process_node in root_dependencies: + # node doesn't have conditionals and is not required; it belongs in the schema only if it is in root's dependencies + + if node_range: + schema_valid_vals = self.get_range_schema( + node_range=node_range_d, node_name=node_display_name, blank=True + ) + + if node_validation_rules: + if rule_in_rule_list("list", node_validation_rules): + schema_valid_vals = self.get_array_schema( + node_range=node_range_d, node_name=node_display_name, blank=True + ) + + else: + schema_valid_vals = {node_display_name: {}} + + json_schema["properties"].update(schema_valid_vals) + + else: + # node doesn't have conditionals and it is not required and it is not a root dependency + # the node doesn't belong in the schema + # do not add to processed nodes since its conditional may be traversed at a later iteration (though unlikely for most schemas we consider) + node_is_processed = False + + # add process node as a conditional to its dependencies + node_dependencies_d = self.DME.get_nodes_display_names( + node_list=node_dependencies + ) + + for dep in node_dependencies_d: + if not dep in reverse_dependencies: + reverse_dependencies[dep] = [] + + reverse_dependencies[dep].append(node_display_name) + + # add nodes found as dependencies and range of this processed node + # to the list of nodes to be processed + nodes_to_process += node_range + nodes_to_process += node_dependencies + + # if the node is processed add it to the processed nodes set + if node_is_processed: + processed_nodes.append(process_node) + + # if the list of nodes to process is not empty + # set the process node the next remaining node to process + if nodes_to_process: + process_node = nodes_to_process.pop(0) + else: + # no more nodes to process + # exit the loop + break + + logger.info("JSON schema successfully generated from schema.org schema!") + + # if no conditional dependencies were added we can't have an empty 'AllOf' block in the schema, so remove it + if not json_schema["allOf"]: + del json_schema["allOf"] + + # If no config value and SchemaGenerator was initialized with + # a JSON-LD path, construct + if self.jsonld_path is not None: + self.jsonld_path_root, jsonld_ext = os.path.splitext(self.jsonld_path) + prefix = self.jsonld_path_root + prefix_root, prefix_ext = os.path.splitext(prefix) + if prefix_ext == ".model": + prefix = prefix_root + json_schema_log_file = f"{prefix}.{source_node}.schema.json" + + logger.info( + "The JSON schema file can be inspected by setting the following " + "nested key in the configuration: (model > input > log_location)." + ) + + logger.info(f"JSON schema file log stored as {json_schema_log_file}") + + return json_schema \ No newline at end of file diff --git a/schematic/schemas/data_model_jsonld.py b/schematic/schemas/data_model_jsonld.py index 1feb09f4c..1c12c91d0 100644 --- a/schematic/schemas/data_model_jsonld.py +++ b/schematic/schemas/data_model_jsonld.py @@ -4,6 +4,7 @@ from schematic.schemas.data_model_graph import DataModelGraphExporer from schematic.schemas.data_model_relationships import DataModelRelationships +from schematic.utils.schema_util import get_label_from_display_name, get_display_name_from_label, convert_bool class DataModelJsonLD(object): @@ -15,6 +16,7 @@ def __init__(self, Graph: nx.MultiDiGraph): # Setup self.graph = Graph self.dmr = DataModelRelationships() + self.rel_dict = self.dmr.relationships_dictionary ''' self.jsonld_object = JSONLD_object(DataModelJsonLD) self.jsonld_class = JSONLD_class(self.jsonld_object) @@ -91,35 +93,6 @@ def create_object(self, template, node): template[rel_vals['jsonld_key']].append(node_2_id) else: template[rel_vals['jsonld_key']] == node_2 - #elif node_2 == node: - # breakpoint() - ''' - if key_rel == 'domainIncludes': - breakpoint() - if node_1 == node: - # Make sure the key is in the template (differs between properties and classes) - if rel_vals['jsonld_key'] in template.keys(): - node_2_id = {'@id': 'bts:'+node_2} - # TODO Move this to a helper function to clear up. - if (isinstance(template[rel_vals['jsonld_key']], list) and - node_2_id not in template[rel_vals['jsonld_key']]): - template[rel_vals['jsonld_key']].append(node_2_id) - else: - template[rel_vals['jsonld_key']] == node_2 - else: - breakpoint() - if node_2 == node: - # Make sure the key is in the template (differs between properties and classes) - if rel_vals['jsonld_key'] in template.keys(): - node_1_id = {'@id': 'bts:'+node_1} - # TODO Move this to a helper function to clear up. - if (isinstance(template[rel_vals['jsonld_key']], list) and - node_1_id not in template[rel_vals['jsonld_key']]): - # could possibly keep track of weights here but that might slow things down - template[rel_vals['jsonld_key']].append(node_1_id) - else: - template[rel_vals['jsonld_key']] == node_1 - ''' else: # attribute here refers to node attibutes (come up with better name.) node_attribute_name = rel_vals['node_label'] @@ -132,8 +105,30 @@ def create_object(self, template, node): data_model_relationships=data_model_relationships, ) # Reorder lists based on weights: - template = self.reorder_entries(template=template,) + template = self.reorder_template_entries(template=template,) + + # Add contexts back + template = self.add_contexts_to_entries(template=template,) + + return template + def add_contexts_to_entries(self, template): + for jsonld_key, entry in template.items(): + try: + key= [k for k, v in self.rel_dict.items() if jsonld_key == v['jsonld_key']][0] + except: + continue + if 'node_attr_dict' in self.rel_dict[key].keys(): + # Changes to data_model_relationships may mean this part will need to be updated. + try: + rel_func = self.rel_dict[key]['node_attr_dict']['standard'] + except: + rel_func = self.rel_dict[key]['node_attr_dict']['default'] + if key == 'id' and rel_func == get_label_from_display_name: + template[jsonld_key] = 'bts:' + template[jsonld_key] + elif key == 'required' and rel_func == convert_bool: + #clean up use of convert bool here. + template[jsonld_key] = 'sms:' + str(template[jsonld_key]).lower() return template def clean_template(self, template, data_model_relationships): @@ -155,7 +150,7 @@ def strip_context(self, context_value): context, v = context_value.split('@') return context, v - def reorder_entries(self, template): + def reorder_template_entries(self, template): '''In JSONLD some classes or property keys have list values. We want to make sure these lists are ordered according to the order supplied by the user. This will look specically in lists and reorder those. Args: @@ -164,46 +159,27 @@ def reorder_entries(self, template): template (dict): list entries re-ordered to match user supplied order. ''' - data_model_relationships = self.dmr.relationships_dictionary + # user order only matters for nodes that are also attributes - template_id = template['rdfs:label'] + template_label = template['rdfs:label'] for jsonld_key, entry in template.items(): #if the entry is of type list and theres more than one value in the list attempt to reorder if isinstance(entry, list) and len(entry)>1: # Get edge key from data_model_relationships using the jsonld_key: - key, edge_key = [(k, v['edge_key']) for k, v in data_model_relationships.items() if jsonld_key == v['jsonld_key']][0] - # TODO: - # Get edge weights for values in the list. - - if data_model_relationships[key]['jsonld_direction'] == 'out': - #use outedges - - original_edge_weights_dict = {attached_node:self.graph[template_node][attached_node][edge_key]['weight'] - for template_node, attached_node in self.graph.out_edges(template_id) - if edge_key in self.graph[template_node][attached_node] - } - else: - #use inedges - original_edge_weights_dict = {attached_node:self.graph[attached_node][template_node][edge_key]['weight'] - for attached_node, template_node in self.graph.in_edges(template_id) - if edge_key in self.graph[attached_node][template_node] - } - - # TODO: MOVE TO HELPER - # would topological sort work here? - sorted_edges = list(dict(sorted(original_edge_weights_dict.items(), key=lambda item: item[1])).keys()) + key, edge_key = [(k, v['edge_key']) for k, v in self.rel_dict.items() if jsonld_key == v['jsonld_key']][0] + + # Order edges + sorted_edges = self.DME.get_ordered_entry(key=key, source_node_label=template_label) edge_weights_dict={edge:i for i, edge in enumerate(sorted_edges)} ordered_edges = [0]*len(edge_weights_dict.keys()) - for k,v in edge_weights_dict.items(): ordered_edges[v] = {'@id': 'bts:' + k} # TODO: Throw an error if ordered_edges does not get fully filled as expected. if 0 in ordered_edges: breakpoint() - template[jsonld_key] = ordered_edges return template diff --git a/schematic/schemas/data_model_nodes.py b/schematic/schemas/data_model_nodes.py index 5e269ae3e..bf9c87743 100644 --- a/schematic/schemas/data_model_nodes.py +++ b/schematic/schemas/data_model_nodes.py @@ -106,11 +106,19 @@ def run_rel_functions(self, rel_func, node_display_name='', key='', attr_relatio if rel_func == get_display_name_from_label: func_output = get_display_name_from_label(node_display_name, attr_relationships) elif key == 'id' and rel_func == get_label_from_display_name: - func_output = 'bts:' + get_label_from_display_name(display_name =node_display_name, entry_type=entry_type) + #func_output = 'bts:' + get_label_from_display_name(display_name =node_display_name, entry_type=entry_type) + func_output = get_label_from_display_name(display_name =node_display_name, entry_type=entry_type) elif rel_func == get_label_from_display_name: func_output = get_label_from_display_name(display_name =node_display_name, entry_type=entry_type) elif rel_func == convert_bool: - func_output = 'sms:' + convert_bool(attr_relationships[csv_header]).lower() + #func_output = 'sms:' + convert_bool(attr_relationships[csv_header]).lower() + if type(attr_relationships[csv_header]) == str: + if attr_relationships[csv_header].lower() == 'true': + func_output = True + elif attr_relationships[csv_header].lower() == 'false': + func_output = False + elif type(attr_relationships[csv_header]) == bool: + func_output = attr_relationships[csv_header] else: # raise error here to catch non valid function. breakpoint() diff --git a/schematic/schemas/data_model_parser.py b/schematic/schemas/data_model_parser.py index 6d9723641..f18185b10 100644 --- a/schematic/schemas/data_model_parser.py +++ b/schematic/schemas/data_model_parser.py @@ -33,7 +33,7 @@ class DataModelParser(): def __init__( self, path_to_data_model: str, - base_schema_path: None, + base_schema_path: str = None, ) -> None: self.path_to_data_model = path_to_data_model @@ -73,7 +73,7 @@ def parse_base_model(self): ''' - if self.base_schema_path == 'No base model': + if not self.base_schema_path: return else: # determine base schema path @@ -106,10 +106,15 @@ class DataModelCSVParser(): def __init__( self ): + # Instantiate DataModelRelationships self.dmr = DataModelRelationships() + # Load relationships dictionary. + self.rel_dict = self.dmr.define_data_model_relationships() + # Load required csv headers self.required_headers = self.dmr.define_required_csv_headers() + def check_schema_definition(self, model_df: pd.DataFrame) -> bool: """Checks if a schema definition data frame contains the right required headers. @@ -151,8 +156,6 @@ def gather_csv_attributes_relationships(self, model_df): # Check csv schema follows expectations. self.check_schema_definition(model_df) - # Load relationships dictionary. - self.rel_dict = self.dmr.define_data_model_relationships() # Get the type for each value that needs to be submitted. # using csv_headers as keys to match required_headers/relationship_types @@ -203,7 +206,6 @@ def gather_csv_attributes_relationships(self, model_df): #rels = attr[relationship].strip() attr_rel_dictionary[attr['Attribute']]['Relationships'].update({relationship:rels}) position += 1 - return attr_rel_dictionary @@ -236,7 +238,10 @@ def __init__( ''' - self.data_model_relationships = DataModelRelationships() + # Instantiate DataModelRelationships + self.dmr = DataModelRelationships() + # Load relationships dictionary. + self.rel_dict = self.dmr.define_data_model_relationships() def gather_jsonld_attributes_relationships( self, @@ -248,41 +253,68 @@ def gather_jsonld_attributes_relationships( Make sure we can take in list of types. ''' - model_ids = [v['rdfs:label'] for v in model_jsonld] + #label_jsonld_key = self.rel_dict['label']['jsonld_key'] + #subclassof_jsonld_key = self.rel_dict['subClassOf']['jsonld_key'] + + jsonld_keys_to_extract = ['label', 'subClassOf', 'id'] + label_jsonld_key, subclassof_jsonld_key, id_jsonld_key = [self.rel_dict[key]['jsonld_key'] + for key in jsonld_keys_to_extract ] + + model_ids = [v[label_jsonld_key] for v in model_jsonld] attr_rel_dictionary = {} # For each entry in the jsonld model for entry in model_jsonld: # Check to see if it has been assigned as a subclass as an attribute or parent. - if 'rdfs:subClassOf' in entry.keys(): + if subclassof_jsonld_key in entry.keys(): # Checking if subclass type is list, actually gets rid of Biothings. - # TODO: Allow biothings in future. - if type(entry['rdfs:subClassOf']) == list: + # TODO: Allow biothings in future (would need to handle as a dictionary) + if type(entry[subclassof_jsonld_key]) == list and entry[subclassof_jsonld_key]: # Determine if the id the entry has been assigned as a sublcass of is also recoreded # as a model id. If it is, then the entry is not an attribute itself, but a valid value. - subclass_id = entry['rdfs:subClassOf'][0]['rdfs:label'] + subclass_id = entry[subclassof_jsonld_key][0][id_jsonld_key] + if not subclass_id in model_ids: # Get the label of the entry - ## To allow for contexts split by the delimiter - entry_id = entry['rdfs:label'].split(':')[1] + entry_id = entry[label_jsonld_key] # If the entry is an attribute that has not already been added to the dictionary, add it. if entry_id not in attr_rel_dictionary.keys(): attr_rel_dictionary.update({entry_id: {'Relationships': {}}}) - for relationship in self.data_model_relationships.keys(): - if relationship in entry.keys(): - if entry[relationship] != []: - if type(entry[relationship][0]) == dict: - rels = [r['rdfs:label'].split(':')[1] for r in entry[relationship]] - else: - rels = entry[relationship] + # Add relationships for each attribute + # Right now, here we are stripping contexts, will need to track them in the future. + for key, val in self.rel_dict.items(): + if val['jsonld_key'] in entry.keys() and 'csv_header' in val.keys(): + rel_entry = entry[val['jsonld_key']] + if rel_entry != []: + try: + # add dictionary entry by itself. + if type(rel_entry) == dict: + rels = entry.get(val['jsonld_key'])['@id'] + # parse list of dictionaries to make a list of entries with context stripped (will update this section when contexts added.) + elif type(rel_entry[0]) == dict: + rels = [r[id_jsonld_key].split(':')[1] for r in rel_entry] + elif type(rel_entry) == str: + if ':' in rel_entry and 'http:' not in rel_entry: + rels = rel_entry.split(':')[1] + # Convert true/false strings to boolean + if rels.lower() =='true': + rels = True + elif rels.lower == 'false': + rels == False + else: + rels = rel_entry + else: + rels = rel_entry + except: + breakpoint() + attr_rel_dictionary[ entry_id]['Relationships'].update( - {k: rels for k in self.data_model_relationships[relationship].keys()}) - + {self.rel_dict[key]['csv_header']: rels}) return attr_rel_dictionary def parse_jsonld_model( diff --git a/schematic/schemas/data_model_relationships.py b/schematic/schemas/data_model_relationships.py index e2cb9bba6..4ce4f4df8 100644 --- a/schematic/schemas/data_model_relationships.py +++ b/schematic/schemas/data_model_relationships.py @@ -117,10 +117,10 @@ def define_data_model_relationships(self) -> Dict: 'jsonld_key': 'sms:required', 'csv_header': 'Required', 'node_label': 'required', - 'type': str, + 'type': bool, 'edge_rel': False, 'required_header': True, - 'node_attr_dict':{'default': 'sms:false', + 'node_attr_dict':{'default': False, 'standard': convert_bool, }, }, From d5a95c1e0ccb680bfc7884901cbcfe55c8802693 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Thu, 3 Aug 2023 13:44:30 -0700 Subject: [PATCH 035/239] remove breakpoint --- schematic/manifest/generator.py | 1 - 1 file changed, 1 deletion(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index eb113ba17..f8bf236b1 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -369,7 +369,6 @@ def _get_json_schema(self, json_schema_filepath: str) -> Dict: # TODO: catch error data_model_js = DataModelJSONSchema(jsonld_path=self.jsonld_path, graph=self.graph) json_schema = data_model_js.get_json_validation_schema(source_node=self.root, schema_name=self.title) - breakpoint() else: with open(json_schema_filepath) as jsonfile: json_schema = json.load(jsonfile) From 0b2f0520058b82e57ff576b95f1775078d4388b3 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Thu, 3 Aug 2023 13:44:57 -0700 Subject: [PATCH 036/239] remove unused functions --- schematic/schemas/data_model_graph.py | 32 --------------------------- 1 file changed, 32 deletions(-) diff --git a/schematic/schemas/data_model_graph.py b/schematic/schemas/data_model_graph.py index 6c42d24ce..9aafaf956 100644 --- a/schematic/schemas/data_model_graph.py +++ b/schematic/schemas/data_model_graph.py @@ -319,8 +319,6 @@ def get_edges_by_relationship(self, return edges - - def get_ordered_entry(self, key: str, source_node_label:str): # Check if node is in the graph, if not throw an error. @@ -488,13 +486,6 @@ def get_node_validation_rules(self, node_display_name: str = None, node_label: s def find_adjacent_child_classes(self, schema_class): return self.get_adjacent_nodes_by_relationship(node = schema_class, relationship = self.subClassOf_ek) - def find_all_class_properties(self): - """ - does not seem used. do not transfer now. - """ - breakpoint() - return - def find_class_specific_properties(self, schema_class): """Find properties specifically associated with a given class""" @@ -522,26 +513,3 @@ def find_class_specific_properties(self, schema_class): ): properties.append(record["rdfs:label"]) return properties - return - - def find_class_usages(self): - """ - Does not look used, do not transfer for now. - """ - return - - - - def explore_class(self, source_node): - """ - nx specific version of this? This might not be necessary since each nx node should already contain all required information. - Put this here for now as a dummy function so this can be explored more. - """ - breakpoint() - return - - def explore_property(self): - breakpoint() - return - - \ No newline at end of file From c33020cff37933167425d17ef1cb00a88749718b Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Thu, 3 Aug 2023 13:45:26 -0700 Subject: [PATCH 037/239] add inline comment --- schematic/schemas/data_model_json_schema.py | 1 + 1 file changed, 1 insertion(+) diff --git a/schematic/schemas/data_model_json_schema.py b/schematic/schemas/data_model_json_schema.py index 638da02ba..5817b84ba 100644 --- a/schematic/schemas/data_model_json_schema.py +++ b/schematic/schemas/data_model_json_schema.py @@ -18,6 +18,7 @@ def __init__(self, jsonld_path: str, graph:nx.MultiDiGraph, self.dmr = DataModelRelationships() self.rel_dict = self.dmr.relationships_dictionary + # Edge Keys self.reqDep_ek = self.rel_dict['requiresDependency']['edge_key'] self.rangeIncludes_ek = self.rel_dict['rangeIncludes']['edge_key'] self.reqComp_ek = self.rel_dict['requiresComponent']['edge_key'] From 397e326960690940ed9ea1200944ba07f2a2bb9a Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 16 Aug 2023 09:48:43 -0700 Subject: [PATCH 038/239] update data model validator, add unit tests and test models --- schematic/schemas/data_model_validator.py | 132 +++++++++++++++------- tests/data/validator_dag_test.model.csv | 44 ++++++++ tests/data/validator_test.model.csv | 45 ++++++++ tests/test_validator.py | 105 +++++++++++++++++ 4 files changed, 285 insertions(+), 41 deletions(-) create mode 100644 tests/data/validator_dag_test.model.csv create mode 100644 tests/data/validator_test.model.csv create mode 100644 tests/test_validator.py diff --git a/schematic/schemas/data_model_validator.py b/schematic/schemas/data_model_validator.py index 1fddde117..a864c8814 100644 --- a/schematic/schemas/data_model_validator.py +++ b/schematic/schemas/data_model_validator.py @@ -1,37 +1,59 @@ import networkx as nx +from schematic.schemas.data_model_relationships import ( + DataModelRelationships + ) + class DataModelValidator(): ''' Check for consistency within data model. ''' def __init__( self, - data_model, + graph, ): - self.data_model = data_model + ''' + TODO: put blacklisted chars and reserved_names in some global space where they can be accessed centrally + ''' + self.graph = graph + self.DMR = DataModelRelationships() + # Removed check for spaces in display name since we get rid of those. + self.blacklisted_chars = ['(', ')', '.', '-'] + self.reserved_names = {'entityId'} def run_checks(self): - checks = [ - self.check_has_name(), + error_checks = [ + self.check_graph_has_required_node_fields(), self.check_is_dag(), - self.check_namespace_overlap(), - self.check_for_orphan_attributes(), - self.check_namespace_similarity(), + self.check_reserved_names() + ] + warning_checks = [ + self.check_blacklisted_characters(), ] - errors = [error for error in checks if error] + errors = [error for error in error_checks if error] + warnings = [warning for warning in warning_checks if warning] return errors - def check_has_name(self): - '''Checks that each node is assigned a label. + def check_graph_has_required_node_fields(self): ''' - error = [] + Checks that each node is assigned a label. + ''' + # Get all the fields that should be recorded per node + rel_dict = self.DMR.relationships_dictionary + node_fields = [] + for k, v in rel_dict.items(): + if 'node_label' in v.keys(): + node_fields.append(v['node_label']) + error = [] + missing_fields = [] # Check that nodes have labels - node_labels = nx.get_node_attributes(self.data_model, "label") - for k, v in node_labels.items(): - if not v: - error.append(f'Node {k} does not have a label attached.') - breakpoint() + for node, node_dict in self.graph.nodes(data=True): + missing_fields.extend([(node, f) for f in node_fields if f not in node_dict.keys()]) + + if missing_fields: + for nf in missing_fields: + error.append(f'For entry: {nf[0]}, the required field {nf[1]} is missing in the data model graph, please double check your model and generate the graph again.') return error def check_is_dag(self): @@ -41,42 +63,70 @@ def check_is_dag(self): - Try wit topological sort as well. Benchmark against current approach. - Add unit test to verify this works properly. - ''' - if nx.number_of_selfloops(self.data_model)!=0 and nx.is_directed(self.data_model) == False: + + if nx.number_of_selfloops(self.graph)!=0 and nx.is_directed(self.graph) == False: error = f'Schematic requires that data models are Directed Acyclic Graphs (DAGs). ' \ f'Model supplied is not a DAG, please check your model.' return error + ''' + error = [] + if not nx.is_directed_acyclic_graph(self.graph): + # Attempt to find any cycles: + cycles = nx.simple_cycles(self.graph) + if cycles: + for cycle in cycles: + error.append(f'Schematic requires models be a directed acyclic graph (DAG). Your graph is not a DAG, we found a loop between: {cycle[0]} and {cycle[1]}, please remove this loop from your model and submit again.') + else: + error.append(f'Schematic requires models be a directed acyclic graph (DAG). Your graph is not a DAG, we could not locate the sorce of the error, please inspect your model.') + return error + def check_blacklisted_characters(self): + """ We strip these characters in store, so not sure if it matter if we have them now, maybe add warning + """ + warning = [] + for node, node_dict in self.graph.nodes(data=True): + if any(bl_char in node_dict['displayName'] for bl_char in self.blacklisted_chars): + node_display_name = node_dict['displayName'] + blacklisted_characters_found = [bl_char for bl_char in self.blacklisted_chars if bl_char in node_dict['displayName'] ] + blacklisted_characters_str= ','.join(blacklisted_characters_found) + warning.append(f'Node: {node_display_name} contains a blacklisted character(s): {blacklisted_characters_str}, they will be striped if used in Synapse annotations.') + return warning - def check_namespace_overlap(self): + def check_reserved_names(self): ''' - Check if name is repeated. - TODO: - - Add unit test to verify this works properly. - - The way this looks, it wont find namespace overlaps, - Have to go back to loading the csv and looking before overlaps have been removed. - Look for duplicate attributes. - Look for valid values that overlap with attributes and flag. + # TODO: the error message is odd, what are the actual names that should be used? Not attribute or componenet... ''' error = [] - if len(self.data_model.nodes.keys()) != set(list(self.data_model.nodes.keys())): - all_node_names = list(self.data_model.nodes.keys()) - for n_name in self.data_model.nodes.keys(): - all_node_names = [i for i in all_node_names if i != n_name] - if n_name in all_node_names: - error.append(f'There appears to be a namespace overlap, {n_name} appears at least twice.') - + reserved_names_found = [(name, node) for node in self.graph.nodes + for name in self.reserved_names + if name.lower() == node.lower() + ] + if reserved_names_found: + for reserved_name, node_name in reserved_names_found: + error.append(f'Your data model entry name: {node_name} overlaps with the reserved name: {reserved_name}. Please change this name in your data model.') return error + + def check_namespace_overlap(self): + ''' + Check if name is repeated. + Implement in the future + ''' + warning = [] + return warning + def check_for_orphan_attributes(self): - error = [] - return error + ''' + Check if attribute is specified but not connected to another attribute or component. + Implement in future + ''' + warning = [] + return warning def check_namespace_similarity(self): - """ Checks to see if names are incredibly similar save for formatting. Raise warning not error. + """ + Using AI, check if submitted attributes or valid values are similar to other ones, warn users. + Implement in future """ - error=[] - return error - - def check_required_filled(self): - return \ No newline at end of file + warning=[] + return warning diff --git a/tests/data/validator_dag_test.model.csv b/tests/data/validator_dag_test.model.csv new file mode 100644 index 000000000..10da28991 --- /dev/null +++ b/tests/data/validator_dag_test.model.csv @@ -0,0 +1,44 @@ +Attribute,Description,Valid Values,DependsOn,Properties,Required,Parent,DependsOn Component,Source,Validation Rules +Patient,,,"Patient ID, Sex, Year of Birth, Diagnosis, Component",,FALSE,DataType,,, +Patient ID,,,Patient,,TRUE,DataProperty,,, +Sex,,"Female, Male, Other",,,TRUE,DataProperty,,, +Year of Birth,,,,,FALSE,DataProperty,,, +Diagnosis,,"Healthy, Cancer",,,TRUE,DataProperty,,, +Cancer,,,"Cancer Type, Family History",,FALSE,ValidValue,,, +Cancer Type,,"Breast, Colorectal, Lung, Prostate, Skin",,,TRUE,DataProperty,,, +Family History,,"Breast, Colorectal, Lung, Prostate, Skin",Cancer Type,,TRUE,DataProperty,,,list strict +Biospecimen,,,"Sample ID, Patient ID, Tissue Status, Component",,FALSE,DataType,Patient,, +Sample ID,,,,,TRUE,DataProperty,,, +Tissue Status,,"Healthy, Malignant",,,TRUE,DataProperty,,, +Bulk RNA-seq Assay,,,"Filename, Sample ID, File Format, Component",,FALSE,DataType,Biospecimen,, +Filename,,,,,TRUE,DataProperty,,, +File Format,,"FASTQ, BAM, CRAM, CSV/TSV",,,TRUE,DataProperty,,, +BAM,,,Genome Build,,FALSE,ValidValue,,, +CRAM,,,"Genome Build, Genome FASTA",,FALSE,ValidValue,,, +CSV/TSV,,,Genome Build,,FALSE,ValidValue,,, +Genome Build,,"GRCh37, GRCh38, GRCm38, GRCm39",,,TRUE,DataProperty,,, +Genome FASTA,,,,,TRUE,DataProperty,,, +MockComponent,,,"Component, Check List, Check Regex List, Check Regex Single, Check Regex Format, Check Regex Integer, Check Num, Check Float, Check Int, Check String, Check URL,Check Match at Least, Check Match at Least values, Check Match Exactly, Check Match Exactly values, Check Recommended, Check Ages, Check Unique, Check Range, Check Date, Check NA",,FALSE,DataType,,, +Check List,,"ab, cd, ef, gh",,,TRUE,DataProperty,,,list strict +Check Regex List,,,,,TRUE,DataProperty,,,list strict::regex match [a-f] +Check Regex Single,,,,,TRUE,DataProperty,,,regex search [a-f] +Check Regex Format,,,,,TRUE,DataProperty,,,regex match [a-f] +Check Regex Integer,,,,,TRUE,DataProperty,,,regex search ^\d+$ +Check Num,,,,,TRUE,DataProperty,,,num +Check Float,,,,,TRUE,DataProperty,,,float +Check Int,,,,,TRUE,DataProperty,,,int +Check String,,,,,TRUE,DataProperty,,,str +Check URL,,,,,TRUE,DataProperty,,,url +Check Match at Least,,,,,TRUE,DataProperty,,,matchAtLeastOne Patient.PatientID set +Check Match Exactly,,,,,TRUE,DataProperty,,,matchExactlyOne MockComponent.checkMatchExactly set +Check Match at Least values,,,,,TRUE,DataProperty,,,matchAtLeastOne MockComponent.checkMatchatLeastvalues value +Check Match Exactly values,,,,,TRUE,DataProperty,,,matchExactlyOne MockComponent.checkMatchExactlyvalues value +Check Recommended,,,,,FALSE,DataProperty,,,recommended +Check Ages,,,,,TRUE,DataProperty,,,protectAges +Check Unique,,,,,TRUE,DataProperty,,,unique error +Check Range,,,,,TRUE,DataProperty,,,inRange 50 100 error +Check Date,,,,,TRUE,DataProperty,,,date +Check NA,,,,,TRUE,DataProperty,,,int::IsNA +MockRDB,,,"Component, MockRDB_id, SourceManifest",,FALSE,DataType,,, +MockRDB_id,,,,,TRUE,DataProperty,,,int +SourceManifest,,,,,TRUE,DataProperty,,, \ No newline at end of file diff --git a/tests/data/validator_test.model.csv b/tests/data/validator_test.model.csv new file mode 100644 index 000000000..b5b84760f --- /dev/null +++ b/tests/data/validator_test.model.csv @@ -0,0 +1,45 @@ +Attribute,Description,Valid Values,DependsOn,Properties,Required,Parent,DependsOn Component,Source,Validation Rules +Patient),,,"Patient ID., Sex-, Year of Birth(, Diagnosis, Component",,FALSE,DataType,,, +Patient ID.,,,,,TRUE,DataProperty,,, +Sex-,,"Female, Male, Other",,,TRUE,DataProperty,,, +Year of Birth(,,,,,FALSE,DataProperty,,, +Diagnosis,,"Healthy, Cancer",,,TRUE,DataProperty,,, +Cancer,,,"Cancer Type, Family History",,FALSE,ValidValue,,, +Cancer Type,,"Breast, Colorectal, Lung, Prostate, Skin",,,TRUE,DataProperty,,, +Family History,,"Breast, Colorectal, Lung, Prostate, Skin",,,TRUE,DataProperty,,,list strict +Biospecimen,,,"Sample ID, Patient ID., Tissue Status, Component",,FALSE,DataType,Patient,, +Sample ID,,,,,TRUE,DataProperty,,, +Tissue Status,,"Healthy, Malignant",,,TRUE,DataProperty,,, +Bulk RNA-seq Assay,,,"Filename, Sample ID, File Format, Component",,FALSE,DataType,Biospecimen,, +Filename,,,,,TRUE,DataProperty,,, +File Format,,"FASTQ, BAM, CRAM, CSV/TSV",,,TRUE,DataProperty,,, +BAM,,,Genome Build,,FALSE,ValidValue,,, +CRAM,,,"Genome Build, Genome FASTA",,FALSE,ValidValue,,, +CSV/TSV,,,Genome Build,,FALSE,ValidValue,,, +Genome Build,,"GRCh37, GRCh38, GRCm38, GRCm39",,,TRUE,DataProperty,,, +Genome FASTA,,,,,TRUE,DataProperty,,, +MockComponent,,,"Component, Check List, Check Regex List, Check Regex Single, Check Regex Format, Check Regex Integer, Check Num, Check Float, Check Int, Check String, Check URL,Check Match at Least, Check Match at Least values, Check Match Exactly, Check Match Exactly values, Check Recommended, Check Ages, Check Unique, Check Range, Check Date, Check NA",,FALSE,DataType,,, +Check List,,"ab, cd, ef, gh",,,TRUE,DataProperty,,,list strict +Check Regex List,,,,,TRUE,DataProperty,,,list strict::regex match [a-f] +Check Regex Single,,,,,TRUE,DataProperty,,,regex search [a-f] +Check Regex Format,,,,,TRUE,DataProperty,,,regex match [a-f] +Check Regex Integer,,,,,TRUE,DataProperty,,,regex search ^\d+$ +Check Num,,,,,TRUE,DataProperty,,,num +Check Float,,,,,TRUE,DataProperty,,,float +Check Int,,,,,TRUE,DataProperty,,,int +Check String,,,,,TRUE,DataProperty,,,str +Check URL,,,,,TRUE,DataProperty,,,url +Check Match at Least,,,,,TRUE,DataProperty,,,matchAtLeastOne Patient.PatientID set +Check Match Exactly,,,,,TRUE,DataProperty,,,matchExactlyOne MockComponent.checkMatchExactly set +Check Match at Least values,,,,,TRUE,DataProperty,,,matchAtLeastOne MockComponent.checkMatchatLeastvalues value +Check Match Exactly values,,,,,TRUE,DataProperty,,,matchExactlyOne MockComponent.checkMatchExactlyvalues value +Check Recommended,,,,,FALSE,DataProperty,,,recommended +Check Ages,,,,,TRUE,DataProperty,,,protectAges +Check Unique,,,,,TRUE,DataProperty,,,unique error +Check Range,,,,,TRUE,DataProperty,,,inRange 50 100 error +Check Date,,,,,TRUE,DataProperty,,,date +Check NA,,,,,TRUE,DataProperty,,,int::IsNA +MockRDB,,,"Component, MockRDB_id, SourceManifest",,FALSE,DataType,,, +MockRDB_id,,,,,TRUE,DataProperty,,,int +SourceManifest,,,,,TRUE,DataProperty,,, +entityId,,,,,TRUE,DataProperty,,, \ No newline at end of file diff --git a/tests/test_validator.py b/tests/test_validator.py new file mode 100644 index 000000000..5f01bbd34 --- /dev/null +++ b/tests/test_validator.py @@ -0,0 +1,105 @@ +from io import StringIO +import json +import networkx as nx +import os +import pandas as pd +import pytest +import logging + + +from schematic.schemas.data_model_parser import DataModelParser +from schematic.schemas.data_model_graph import DataModelGraph, DataModelGraphExporer +from schematic.schemas.data_model_validator import DataModelValidator +from schematic.schemas.data_model_jsonld import DataModelJsonLD, convert_graph_to_jsonld + + + +logging.basicConfig(level=logging.DEBUG) +logger = logging.getLogger(__name__) + +def graph_data_model_func(helpers, data_model_name): + path_to_data_model = helpers.get_data_path(data_model_name) + + # Instantiate Parser + data_model_parser = DataModelParser(path_to_data_model=path_to_data_model) + + #Parse Model + parsed_data_model = data_model_parser.parse_model() + + # Convert parsed model to graph + # Instantiate DataModelGraph + data_model_grapher = DataModelGraph(parsed_data_model) + + # Generate graph + graph_data_model = data_model_grapher.generate_data_model_graph() + + return graph_data_model + + + +class TestDataModelValidator: + def test_check_blacklisted_characters(self, helpers): + # Get graph data model + graph_data_model = graph_data_model_func(helpers, data_model_name='validator_test.model.csv') + + # Instantiate Data Model Validator + DMV = DataModelValidator(graph_data_model) + + # Run validation + validator_errors = DMV.check_blacklisted_characters() + + # Expected Error + expected_error = ['Node: Patient) contains a blacklisted character(s): ), they will be striped if used in Synapse annotations.', + 'Node: Patient ID. contains a blacklisted character(s): ., they will be striped if used in Synapse annotations.', + 'Node: Sex- contains a blacklisted character(s): -, they will be striped if used in Synapse annotations.', + 'Node: Year of Birth( contains a blacklisted character(s): (, they will be striped if used in Synapse annotations.', + 'Node: Bulk RNA-seq Assay contains a blacklisted character(s): -, they will be striped if used in Synapse annotations.', + ] + + assert expected_error == validator_errors + + def test_check_reserved_names(self, helpers): + # Get graph data model + graph_data_model = graph_data_model_func(helpers, data_model_name='validator_test.model.csv') + + # Instantiate Data Model Validator + DMV = DataModelValidator(graph_data_model) + + # Run validation + validator_errors = DMV.check_reserved_names() + + # Expected Error + expected_error = ['Your data model entry name: EntityId overlaps with the reserved name: entityId. Please change this name in your data model.'] + assert expected_error == validator_errors + + def test_check_graph_has_required_node_fields(self, helpers): + # Get graph data model + graph_data_model = graph_data_model_func(helpers, data_model_name='validator_test.model.csv') + + # Remove a field from an entry graph + del graph_data_model.nodes['Cancer']['label'] + + # Instantiate Data Model Validator + DMV = DataModelValidator(graph_data_model) + + # Run validation + validator_errors = DMV.check_graph_has_required_node_fields() + + # Expected Error + expected_error = ['For entry: Cancer, the required field label is missing in the data model graph, please double check your model and generate the graph again.'] + assert expected_error == validator_errors + + def test_dag(self, helpers): + # Get graph data model + graph_data_model = graph_data_model_func(helpers, data_model_name='validator_dag_test.model.csv') + + # Instantiate Data Model Validator + DMV = DataModelValidator(graph_data_model) + + # Run validation + validator_errors = DMV.check_is_dag() + + expected_error = ['Schematic requires models be a directed acyclic graph (DAG). Your graph is not a DAG, we found a loop between: Patient and PatientID, please remove this loop from your model and submit again.'] + + assert expected_error == validator_errors + From 1ba804991f0d60e5b44d075fc0c7a9e8f5347fa3 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 16 Aug 2023 09:49:05 -0700 Subject: [PATCH 039/239] add additional docstrings to JSONLD parser --- schematic/schemas/data_model_parser.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/schematic/schemas/data_model_parser.py b/schematic/schemas/data_model_parser.py index f18185b10..f6c26ff06 100644 --- a/schematic/schemas/data_model_parser.py +++ b/schematic/schemas/data_model_parser.py @@ -97,7 +97,7 @@ def parse_model(self): base_model = self.parse_base_model() return model_dict - + class DataModelCSVParser(): ''' @@ -110,6 +110,7 @@ def __init__( self.dmr = DataModelRelationships() # Load relationships dictionary. self.rel_dict = self.dmr.define_data_model_relationships() + self.edge_relationships_dictionary = self.dmr.define_edge_relationships() # Load required csv headers self.required_headers = self.dmr.define_required_csv_headers() @@ -250,12 +251,17 @@ def gather_jsonld_attributes_relationships( Note: unlike a CSV the JSONLD might already have the biothings schema attached to it. So the output may not initially look identical. TODO Check relationship attribute types like in CSV + + It is also just about impossible to extract attributes explicitly. Using a dictionary should avoid duplications. + + This is a close approximation to finding attributes and relationships but will not be convertable between csv and jsonld + since jsonld does not have the concept of attributes. - Make sure we can take in list of types. + TODO: Simplify or change this dictionary capture. ''' - #label_jsonld_key = self.rel_dict['label']['jsonld_key'] - #subclassof_jsonld_key = self.rel_dict['subClassOf']['jsonld_key'] + + # TODO: define this within the relationships class jsonld_keys_to_extract = ['label', 'subClassOf', 'id'] label_jsonld_key, subclassof_jsonld_key, id_jsonld_key = [self.rel_dict[key]['jsonld_key'] for key in jsonld_keys_to_extract ] @@ -332,5 +338,3 @@ def parse_jsonld_model( json_load = load_json(path_to_data_model) model_dict = self.gather_jsonld_attributes_relationships(json_load['@graph']) return model_dict - - From 3daac3acde5a3580d4e995788375993d6fc38265 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 16 Aug 2023 09:49:41 -0700 Subject: [PATCH 040/239] add additional docstrings to data_model_relationships --- schematic/schemas/data_model_relationships.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/schematic/schemas/data_model_relationships.py b/schematic/schemas/data_model_relationships.py index 4ce4f4df8..a4d995f33 100644 --- a/schematic/schemas/data_model_relationships.py +++ b/schematic/schemas/data_model_relationships.py @@ -208,6 +208,8 @@ def define_edge_relationships(self): def define_value_relationships(self): """ Think about changing outputs. + Change to node_relationships. + Use node_label to pull info. Save node_label instead? """ value_relationships = {} for k, v in self.relationships_dictionary.items(): From c15dda4d619799cb22bcc46e254d1267e06bd3bb Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 16 Aug 2023 15:52:57 -0700 Subject: [PATCH 041/239] add get_subgraph_by_edge_type --- schematic/schemas/data_model_graph.py | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/schematic/schemas/data_model_graph.py b/schematic/schemas/data_model_graph.py index 9aafaf956..6fbe13c85 100644 --- a/schematic/schemas/data_model_graph.py +++ b/schematic/schemas/data_model_graph.py @@ -482,6 +482,31 @@ def get_node_validation_rules(self, node_display_name: str = None, node_label: s return node_validation_rules + def get_subgraph_by_edge_type( + self, relationship: str + ) -> nx.DiGraph: + """Get a subgraph containing all edges of a given type (aka relationship). + + Args: + graph: input multi digraph (aka hypergraph) + relationship: edge / link relationship type with possible values same as in above docs. + + Returns: + Directed graph on edges of a particular type (aka relationship) + """ + + # prune the metadata model graph so as to include only those edges that match the relationship type + rel_edges = [] + for (u, v, key, c) in self.graph.out_edges(data=True, keys=True): + if key == relationship: + rel_edges.append((u, v)) + + relationship_subgraph = nx.DiGraph() + relationship_subgraph.add_edges_from(rel_edges) + + return relationship_subgraph + + def find_adjacent_child_classes(self, schema_class): return self.get_adjacent_nodes_by_relationship(node = schema_class, relationship = self.subClassOf_ek) From b8e4da9932da9e59b3a323e5d0bfb421db2b0cf5 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Fri, 25 Aug 2023 16:12:13 -0700 Subject: [PATCH 042/239] update all tests to work with new schemas --- tests/conftest.py | 29 +++++-- tests/test_api.py | 39 ++++++---- tests/test_manifest.py | 64 +++++++++++++-- tests/test_schemas.py | 72 +++-------------- tests/test_store.py | 79 ++++++++++++++++--- tests/test_utils.py | 163 ++++++++++++++++++++++++++++++++++----- tests/test_validation.py | 160 ++++++++++++++++++++++---------------- tests/test_validator.py | 9 ++- 8 files changed, 424 insertions(+), 191 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index f19b4a9dc..5d403185a 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -8,7 +8,8 @@ import pandas as pd from dotenv import load_dotenv, find_dotenv -from schematic.schemas.explorer import SchemaExplorer +from schematic.schemas.data_model_parser import DataModelParser +from schematic.schemas.data_model_graph import DataModelGraph, DataModelGraphExplorer from schematic.configuration.configuration import CONFIG from schematic.utils.df_utils import load_df @@ -51,16 +52,32 @@ def get_data_frame(path, *paths, **kwargs): fullpath = os.path.join(DATA_DIR, path, *paths) return load_df(fullpath, **kwargs) + @staticmethod - def get_schema_explorer(path=None, *paths): + def get_data_model_explorer(path=None, *paths): + #commenting this now bc we dont want to have multiple instances if path is None: - return SchemaExplorer() + return fullpath = Helpers.get_data_path(path, *paths) - se = SchemaExplorer() - se.load_schema(fullpath) - return se + # Instantiate DataModelParser + data_model_parser = DataModelParser(path_to_data_model = fullpath) + + #Parse Model + parsed_data_model = data_model_parser.parse_model() + + # Instantiate DataModelGraph + data_model_grapher = DataModelGraph(parsed_data_model) + + # Generate graph + graph_data_model = data_model_grapher.generate_data_model_graph() + + #Instantiate DataModelGraphExplorer + DME = DataModelGraphExplorer(graph_data_model) + + return DME + @staticmethod def get_python_version(self): diff --git a/tests/test_api.py b/tests/test_api.py index ea597a9ce..f1e3e0ae8 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -13,8 +13,10 @@ import pytest from schematic.configuration.configuration import Configuration -from schematic.schemas.generator import \ - SchemaGenerator # Local application/library specific imports. +from schematic.schemas.data_model_parser import DataModelParser +from schematic.schemas.data_model_graph import DataModelGraph, DataModelGraphExplorer +from schematic.schemas.data_model_relationships import DataModelRelationships + from schematic_api.api import create_app @@ -74,8 +76,20 @@ def get_MockComponent_attribute(): Yield all of the mock conponent attributes one at a time TODO: pull in jsonld from fixture """ - sg = SchemaGenerator("https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.single_rule.model.jsonld") - attributes=sg.get_node_dependencies('MockComponent') + schema_url = "https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.single_rule.model.jsonld" + data_model_parser = DataModelParser(path_to_data_model = schema_url) + #Parse Model + parsed_data_model = data_model_parser.parse_model() + + # Instantiate DataModelGraph + data_model_grapher = DataModelGraph(parsed_data_model) + + # Generate graph + graph_data_model = data_model_grapher.generate_data_model_graph() + + DME = DataModelGraphExplorer(graph_data_model) + #sg = SchemaGenerator("https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.single_rule.model.jsonld") + attributes=DME.get_node_dependencies('MockComponent') attributes.remove('Component') for MockComponent_attribute in attributes: @@ -242,16 +256,15 @@ def test_component_requirement(self, client, data_model_jsonld, as_graph): @pytest.mark.schematic_api -class TestSchemaExplorerOperation: +class TestUtilsOperation: @pytest.mark.parametrize("strict_camel_case", [True, False]) - def test_get_property_label_from_display_name(self, client, data_model_jsonld, strict_camel_case): + def test_get_property_label_from_display_name(self, client, strict_camel_case): params = { - "schema_url": data_model_jsonld, "display_name": "mocular entity", "strict_camel_case": strict_camel_case } - response = client.get("http://localhost:3001/v1/explorer/get_property_label_from_display_name", query_string = params) + response = client.get("http://localhost:3001/v1/utils/get_property_label_from_display_name", query_string = params) assert response.status_code == 200 response_dt = json.loads(response.data) @@ -261,6 +274,9 @@ def test_get_property_label_from_display_name(self, client, data_model_jsonld, s else: assert response_dt == "mocularentity" + +@pytest.mark.schematic_api +class TestDataModelGraphExplorerOperation: def test_get_schema(self, client, data_model_jsonld): params = { "schema_url": data_model_jsonld @@ -306,9 +322,6 @@ def test_get_nodes_display_names(test, client, data_model_jsonld): assert response.status_code == 200 assert "Family History" and "Biospecimen" in response_dta - -@pytest.mark.schematic_api -class TestSchemaGeneratorOperation: @pytest.mark.parametrize("relationship", ["parentOf", "requiresDependency", "rangeValue", "domainValue"]) def test_get_subgraph_by_edge(self, client, data_model_jsonld, relationship): params = { @@ -329,7 +342,7 @@ def test_get_node_range(self, client, data_model_jsonld, return_display_names, n "node_label": node_label } - response = client.get('http://localhost:3001/v1/explorer/get_node_range', query_string=params) + response = client.get('http://localhost:3001/v1/schemas/get_node_range', query_string=params) response_dt = json.loads(response.data) assert response.status_code == 200 @@ -356,7 +369,7 @@ def test_node_dependencies(self, client, data_model_jsonld, source_node, return_ "return_schema_ordered": return_schema_ordered } - response = client.get('http://localhost:3001/v1/explorer/get_node_dependencies', query_string=params) + response = client.get('http://localhost:3001/v1/schemas/get_node_dependencies', query_string=params) response_dt = json.loads(response.data) assert response.status_code == 200 diff --git a/tests/test_manifest.py b/tests/test_manifest.py index a145f6426..a5c291d40 100644 --- a/tests/test_manifest.py +++ b/tests/test_manifest.py @@ -7,7 +7,9 @@ from unittest.mock import patch from unittest.mock import MagicMock from schematic.manifest.generator import ManifestGenerator -from schematic.schemas.generator import SchemaGenerator +from schematic.schemas.data_model_parser import DataModelParser +from schematic.schemas.data_model_graph import DataModelGraph,DataModelGraphExplorer +from schematic.schemas.data_model_json_schema import DataModelJSONSchema from schematic.configuration.configuration import Configuration from schematic.utils.google_api_utils import execute_google_api_requests @@ -17,6 +19,25 @@ logger = logging.getLogger(__name__) +def generate_graph_data_model(helpers, path_to_data_model): + """ + Simple helper function to generate a networkx graph data model from a CSV or JSONLD data model + """ + + # Instantiate Parser + data_model_parser = DataModelParser(path_to_data_model=path_to_data_model) + + #Parse Model + parsed_data_model = data_model_parser.parse_model() + + # Convert parsed model to graph + # Instantiate DataModelGraph + data_model_grapher = DataModelGraph(parsed_data_model) + + # Generate graph + graph_data_model = data_model_grapher.generate_data_model_graph() + + return graph_data_model @pytest.fixture( params=[ @@ -32,13 +53,21 @@ "skip_annotations-BulkRNAseqAssay", ], ) + def manifest_generator(helpers, request): # Rename request param for readability use_annotations, data_type = request.param + path_to_data_model = helpers.get_data_path("example.model.jsonld") + + # Get graph data model + graph_data_model = generate_graph_data_model(helpers, path_to_data_model=path_to_data_model) + + manifest_generator = ManifestGenerator( - path_to_json_ld=helpers.get_data_path("example.model.jsonld"), + path_to_json_ld=path_to_data_model, + graph=graph_data_model, root=data_type, use_annotations=use_annotations, ) @@ -83,16 +112,22 @@ def manifest(dataset_id, manifest_generator, request): class TestManifestGenerator: def test_init(self, helpers): + path_to_data_model = helpers.get_data_path("example.model.jsonld") + + # Get graph data model + graph_data_model = generate_graph_data_model(helpers, path_to_data_model=path_to_data_model) + generator = ManifestGenerator( + graph=graph_data_model, title="mock_title", - path_to_json_ld=helpers.get_data_path("example.model.jsonld"), + path_to_json_ld=path_to_data_model, ) assert type(generator.title) is str # assert generator.sheet_service == mock_creds["sheet_service"] assert generator.root is None - assert type(generator.sg) is SchemaGenerator + assert type(generator.DME) is DataModelGraphExplorer @pytest.mark.google_credentials_needed def test_get_manifest_first_time(self, manifest): @@ -159,13 +194,19 @@ def test_get_manifest_excel(self, helpers, sheet_url, output_format, dataset_id) data_type = "Patient" + path_to_data_model = helpers.get_data_path("example.model.jsonld") + + # Get graph data model + graph_data_model = generate_graph_data_model(helpers, path_to_data_model=path_to_data_model) + + generator = ManifestGenerator( - path_to_json_ld=helpers.get_data_path("example.model.jsonld"), + path_to_json_ld=path_to_data_model, + graph=graph_data_model, root=data_type, use_annotations=False, ) - manifest= generator.get_manifest(dataset_id=dataset_id, sheet_url = sheet_url, output_format = output_format) # if dataset id exists, it could return pandas dataframe, google spreadsheet, or an excel spreadsheet @@ -222,7 +263,7 @@ def test_get_json_schema(self, simple_manifest_generator, helpers, schema_path_p else: mock_json_schema = Mock() mock_json_schema.return_value = "mock json ld" - with patch.object(SchemaGenerator, "get_json_schema_requirements",mock_json_schema): + with patch.object(DataModelJSONSchema, "get_json_validation_schema",mock_json_schema): json_schema = generator._get_json_schema(json_schema_filepath=None) assert json_schema == "mock json ld" @@ -280,8 +321,15 @@ def test_update_dataframe_with_existing_df(self, helpers, existing_manifest): data_type = "Patient" sheet_url = True + path_to_data_model = helpers.get_data_path("example.model.jsonld") + + # Get graph data model + graph_data_model = generate_graph_data_model(helpers, path_to_data_model=path_to_data_model) + + # Instantiate the Manifest Generator. - generator = ManifestGenerator(path_to_json_ld=helpers.get_data_path("example.model.jsonld"), + generator = ManifestGenerator(path_to_json_ld=path_to_data_model, + graph=graph_data_model, root=data_type, use_annotations=False, ) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index bf807789e..3b1ffef6b 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -4,13 +4,20 @@ import pandas as pd import pytest -from schematic.schemas import df_parser +#from schematic.schemas import df_parser from schematic.utils.df_utils import load_df logging.basicConfig(level=logging.DEBUG) logger = logging.getLogger(__name__) +class TestDataModelEdges(): + def test_generate_edge(self,helpers): + return +class TestDataModelGraph(): + + +''' @pytest.fixture def extended_schema_path(helpers, tmp_path): data_model_csv_path = helpers.get_data_path("example.model.csv") @@ -207,65 +214,4 @@ def test_convert_csv_to_data_model(self, helpers, extended_schema_path): attribute_present = df_parser.attribute_exists(extended_csv_model_se, "Assay") assert attribute_present - - def test_get_property_label_from_display_name(self, helpers): - se_obj = helpers.get_schema_explorer("example.model.jsonld") - - # tests where strict_camel_case is the same - assert(se_obj.get_property_label_from_display_name("howToAcquire") == "howToAcquire") - assert(se_obj.get_property_label_from_display_name("howToAcquire", strict_camel_case = True) == "howToAcquire") - assert(se_obj.get_property_label_from_display_name("how_to_acquire") == "howToAcquire") - assert(se_obj.get_property_label_from_display_name("how_to_acquire", strict_camel_case = True) == "howToAcquire") - assert(se_obj.get_property_label_from_display_name("howtoAcquire") == "howtoAcquire") - assert(se_obj.get_property_label_from_display_name("howtoAcquire", strict_camel_case = True) == "howtoAcquire") - assert(se_obj.get_property_label_from_display_name("How To Acquire") == "howToAcquire") - assert(se_obj.get_property_label_from_display_name("How To Acquire", strict_camel_case = True) == "howToAcquire") - assert(se_obj.get_property_label_from_display_name("Model Of Manifestation") == "modelOfManifestation") - assert(se_obj.get_property_label_from_display_name("Model Of Manifestation", strict_camel_case = True) == "modelOfManifestation") - assert(se_obj.get_property_label_from_display_name("ModelOfManifestation") == "modelOfManifestation") - assert(se_obj.get_property_label_from_display_name("ModelOfManifestation", strict_camel_case = True) == "modelOfManifestation") - assert(se_obj.get_property_label_from_display_name("model Of Manifestation") == "modelOfManifestation") - assert(se_obj.get_property_label_from_display_name("model Of Manifestation", strict_camel_case = True) == "modelOfManifestation") - - # tests where strict_camel_case changes the result - assert(se_obj.get_property_label_from_display_name("how to Acquire") == "howtoAcquire") - assert(se_obj.get_property_label_from_display_name("how to Acquire", strict_camel_case = True) == "howToAcquire") - assert(se_obj.get_property_label_from_display_name("How to Acquire") == "howtoAcquire") - assert(se_obj.get_property_label_from_display_name("How to Acquire", strict_camel_case = True) == "howToAcquire") - assert(se_obj.get_property_label_from_display_name("how to acquire") == "howtoacquire") - assert(se_obj.get_property_label_from_display_name("how to acquire", strict_camel_case = True) == "howToAcquire") - assert(se_obj.get_property_label_from_display_name("model of manifestation") == "modelofmanifestation") - assert(se_obj.get_property_label_from_display_name("model of manifestation", strict_camel_case = True) == "modelOfManifestation") - assert(se_obj.get_property_label_from_display_name("model of manifestation") == "modelofmanifestation") - assert(se_obj.get_property_label_from_display_name("model of manifestation", strict_camel_case = True) == "modelOfManifestation") - - def test_get_class_label_from_display_name(self, helpers): - se_obj = helpers.get_schema_explorer("example.model.jsonld") - - # tests where strict_camel_case is the same - assert(se_obj.get_class_label_from_display_name("howToAcquire") == "HowToAcquire") - assert(se_obj.get_class_label_from_display_name("howToAcquire", strict_camel_case = True) == "HowToAcquire") - assert(se_obj.get_class_label_from_display_name("how_to_acquire") == "HowToAcquire") - assert(se_obj.get_class_label_from_display_name("how_to_acquire", strict_camel_case = True) == "HowToAcquire") - assert(se_obj.get_class_label_from_display_name("howtoAcquire") == "HowtoAcquire") - assert(se_obj.get_class_label_from_display_name("howtoAcquire", strict_camel_case = True) == "HowtoAcquire") - assert(se_obj.get_class_label_from_display_name("How To Acquire") == "HowToAcquire") - assert(se_obj.get_class_label_from_display_name("How To Acquire", strict_camel_case = True) == "HowToAcquire") - assert(se_obj.get_class_label_from_display_name("Model Of Manifestation") == "ModelOfManifestation") - assert(se_obj.get_class_label_from_display_name("Model Of Manifestation", strict_camel_case = True) == "ModelOfManifestation") - assert(se_obj.get_class_label_from_display_name("ModelOfManifestation") == "ModelOfManifestation") - assert(se_obj.get_class_label_from_display_name("ModelOfManifestation", strict_camel_case = True) == "ModelOfManifestation") - assert(se_obj.get_class_label_from_display_name("model Of Manifestation") == "ModelOfManifestation") - assert(se_obj.get_class_label_from_display_name("model Of Manifestation", strict_camel_case = True) == "ModelOfManifestation") - - # tests where strict_camel_case changes the result - assert(se_obj.get_class_label_from_display_name("how to Acquire") == "HowtoAcquire") - assert(se_obj.get_class_label_from_display_name("how to Acquire", strict_camel_case = True) == "HowToAcquire") - assert(se_obj.get_class_label_from_display_name("How to Acquire") == "HowtoAcquire") - assert(se_obj.get_class_label_from_display_name("How to Acquire", strict_camel_case = True) == "HowToAcquire") - assert(se_obj.get_class_label_from_display_name("how to acquire") == "Howtoacquire") - assert(se_obj.get_class_label_from_display_name("how to acquire", strict_camel_case = True) == "HowToAcquire") - assert(se_obj.get_class_label_from_display_name("model of manifestation") == "Modelofmanifestation") - assert(se_obj.get_class_label_from_display_name("model of manifestation", strict_camel_case = True) == "ModelOfManifestation") - assert(se_obj.get_class_label_from_display_name("model of manifestation") == "Modelofmanifestation") - assert(se_obj.get_class_label_from_display_name("model of manifestation", strict_camel_case = True) == "ModelOfManifestation") \ No newline at end of file +''' diff --git a/tests/test_store.py b/tests/test_store.py index 4005069b2..32de8ac99 100644 --- a/tests/test_store.py +++ b/tests/test_store.py @@ -12,7 +12,11 @@ from schematic.models.metadata import MetadataModel from schematic.store.base import BaseStorage from schematic.store.synapse import SynapseStorage, DatasetFileView, ManifestDownload -from schematic.schemas.generator import SchemaGenerator +from schematic.schemas.data_model_parser import DataModelParser +from schematic.schemas.data_model_graph import DataModelGraph, DataModelGraphExplorer +from schematic.schemas.data_model_relationships import DataModelRelationships + + from synapseclient.core.exceptions import SynapseHTTPError from synapseclient.entity import File from schematic.configuration.configuration import Configuration @@ -146,7 +150,21 @@ def test_get_file_entityIds(self, helpers, synapse_store, only_new_files): 'file-based']) def test_annotation_submission(self, synapse_store, helpers, manifest_path, test_annotations, datasetId, manifest_record_type, config: Configuration): # Upload dataset annotations - sg = SchemaGenerator(config.model_location) + + # Instantiate DataModelParser + data_model_parser = DataModelParser(path_to_data_model = config.model_location) + + #Parse Model + parsed_data_model = data_model_parser.parse_model() + + # Instantiate DataModelGraph + data_model_grapher = DataModelGraph(parsed_data_model) + + # Generate graph + graph_data_model = data_model_grapher.generate_data_model_graph() + + # Instantiate DataModelGraphExplorer + DME = DataModelGraphExplorer(graph_data_model) try: for attempt in Retrying( @@ -156,7 +174,7 @@ def test_annotation_submission(self, synapse_store, helpers, manifest_path, test ): with attempt: manifest_id = synapse_store.associateMetadataWithFiles( - schemaGenerator = sg, + DME = DME, metadataManifestPath = helpers.get_data_path(manifest_path), datasetId = datasetId, manifest_record_type = manifest_record_type, @@ -337,11 +355,25 @@ def test_createTable(self, helpers, synapse_store, config: Configuration, projec # associate metadata with files manifest_path = "mock_manifests/table_manifest.csv" inputModelLocaiton = helpers.get_data_path(os.path.basename(config.model_location)) - sg = SchemaGenerator(inputModelLocaiton) + #sg = SchemaGenerator(inputModelLocaiton) + + # Instantiate DataModelParser + data_model_parser = DataModelParser(path_to_data_model = inputModelLocaiton) + #Parse Model + parsed_data_model = data_model_parser.parse_model() + + # Instantiate DataModelGraph + data_model_grapher = DataModelGraph(parsed_data_model) + + # Generate graph + graph_data_model = data_model_grapher.generate_data_model_graph() + + # Instantiate DataModelGraphExplorer + DME = DataModelGraphExplorer(graph_data_model) # updating file view on synapse takes a long time manifestId = synapse_store.associateMetadataWithFiles( - schemaGenerator = sg, + DME = DME, metadataManifestPath = helpers.get_data_path(manifest_path), datasetId = datasetId, manifest_record_type = 'table_and_file', @@ -376,11 +408,24 @@ def test_replaceTable(self, helpers, synapse_store, config: Configuration, proje # associate org FollowUp metadata with files inputModelLocaiton = helpers.get_data_path(os.path.basename(config.model_location)) - sg = SchemaGenerator(inputModelLocaiton) + #sg = SchemaGenerator(inputModelLocaiton) + + data_model_parser = DataModelParser(path_to_data_model = inputModelLocaiton) + #Parse Model + parsed_data_model = data_model_parser.parse_model() + + # Instantiate DataModelGraph + data_model_grapher = DataModelGraph(parsed_data_model) + + # Generate graph + graph_data_model = data_model_grapher.generate_data_model_graph() + + # Instantiate DataModelGraphExplorer + DME = DataModelGraphExplorer(graph_data_model) # updating file view on synapse takes a long time manifestId = synapse_store.associateMetadataWithFiles( - schemaGenerator = sg, + DME = DME, metadataManifestPath = helpers.get_data_path(manifest_path), datasetId = datasetId, manifest_record_type = 'table_and_file', @@ -402,7 +447,7 @@ def test_replaceTable(self, helpers, synapse_store, config: Configuration, proje # Associate replacement manifest with files manifestId = synapse_store.associateMetadataWithFiles( - schemaGenerator = sg, + DME = DME, metadataManifestPath = helpers.get_data_path(replacement_manifest_path), datasetId = datasetId, manifest_record_type = 'table_and_file', @@ -443,11 +488,23 @@ def test_upsertTable(self, helpers, synapse_store, config:Configuration, project # associate org FollowUp metadata with files inputModelLocaiton = helpers.get_data_path(os.path.basename(config.model_location)) - sg = SchemaGenerator(inputModelLocaiton) + + data_model_parser = DataModelParser(path_to_data_model = inputModelLocaiton) + #Parse Model + parsed_data_model = data_model_parser.parse_model() + + # Instantiate DataModelGraph + data_model_grapher = DataModelGraph(parsed_data_model) + + # Generate graph + graph_data_model = data_model_grapher.generate_data_model_graph() + + # Instantiate DataModelGraphExplorer + DME = DataModelGraphExplorer(graph_data_model) # updating file view on synapse takes a long time manifestId = synapse_store.associateMetadataWithFiles( - schemaGenerator = sg, + DME = DME, metadataManifestPath = helpers.get_data_path(manifest_path), datasetId = datasetId, manifest_record_type = 'table_and_file', @@ -473,7 +530,7 @@ def test_upsertTable(self, helpers, synapse_store, config:Configuration, project # Associate new manifest with files manifestId = synapse_store.associateMetadataWithFiles( - schemaGenerator = sg, + DME = DME, metadataManifestPath = helpers.get_data_path(replacement_manifest_path), datasetId = datasetId, manifest_record_type = 'table_and_file', diff --git a/tests/test_utils.py b/tests/test_utils.py index 98fa3b63a..f3ddb18ee 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -11,13 +11,19 @@ from pandas.testing import assert_frame_equal from synapseclient.core.exceptions import SynapseHTTPError -from schematic.schemas.explorer import SchemaExplorer -from schematic.schemas import df_parser +from schematic.schemas.data_model_parser import DataModelParser +from schematic.schemas.data_model_graph import DataModelGraph, DataModelGraphExplorer +from schematic.schemas.data_model_jsonld import DataModelJsonLD +from schematic.schemas.data_model_json_schema import DataModelJSONSchema + +from schematic.schemas.data_model_relationships import DataModelRelationships +from schematic.schemas.data_model_jsonld import DataModelJsonLD, convert_graph_to_jsonld from schematic.utils import general from schematic.utils import cli_utils from schematic.utils import io_utils from schematic.utils import df_utils from schematic.utils import validate_utils +from schematic.utils.schema_utils import export_schema, get_property_label_from_display_name, get_class_label_from_display_name from schematic.exceptions import ( MissingConfigValueError, MissingConfigAndArgumentValueError, @@ -262,46 +268,153 @@ def test_populate_column(self): output_df = df_utils.populate_df_col_with_another_col(input_df,'column1','column2') assert (output_df["column2"].values == ["col1Val","col1Val"]).all() +class TestSchemaUtils: + def test_get_property_label_from_display_name(self, helpers): + + # tests where strict_camel_case is the same + assert(get_property_label_from_display_name("howToAcquire") == "howToAcquire") + assert(get_property_label_from_display_name("howToAcquire", strict_camel_case = True) == "howToAcquire") + assert(get_property_label_from_display_name("how_to_acquire") == "howToAcquire") + assert(get_property_label_from_display_name("how_to_acquire", strict_camel_case = True) == "howToAcquire") + assert(get_property_label_from_display_name("howtoAcquire") == "howtoAcquire") + assert(get_property_label_from_display_name("howtoAcquire", strict_camel_case = True) == "howtoAcquire") + assert(get_property_label_from_display_name("How To Acquire") == "howToAcquire") + assert(get_property_label_from_display_name("How To Acquire", strict_camel_case = True) == "howToAcquire") + assert(get_property_label_from_display_name("Model Of Manifestation") == "modelOfManifestation") + assert(get_property_label_from_display_name("Model Of Manifestation", strict_camel_case = True) == "modelOfManifestation") + assert(get_property_label_from_display_name("ModelOfManifestation") == "modelOfManifestation") + assert(get_property_label_from_display_name("ModelOfManifestation", strict_camel_case = True) == "modelOfManifestation") + assert(get_property_label_from_display_name("model Of Manifestation") == "modelOfManifestation") + assert(get_property_label_from_display_name("model Of Manifestation", strict_camel_case = True) == "modelOfManifestation") + + # tests where strict_camel_case changes the result + assert(get_property_label_from_display_name("how to Acquire") == "howtoAcquire") + assert(get_property_label_from_display_name("how to Acquire", strict_camel_case = True) == "howToAcquire") + assert(get_property_label_from_display_name("How to Acquire") == "howtoAcquire") + assert(get_property_label_from_display_name("How to Acquire", strict_camel_case = True) == "howToAcquire") + assert(get_property_label_from_display_name("how to acquire") == "howtoacquire") + assert(get_property_label_from_display_name("how to acquire", strict_camel_case = True) == "howToAcquire") + assert(get_property_label_from_display_name("model of manifestation") == "modelofmanifestation") + assert(get_property_label_from_display_name("model of manifestation", strict_camel_case = True) == "modelOfManifestation") + assert(get_property_label_from_display_name("model of manifestation") == "modelofmanifestation") + assert(get_property_label_from_display_name("model of manifestation", strict_camel_case = True) == "modelOfManifestation") + + def test_get_class_label_from_display_name(self, helpers): + + # tests where strict_camel_case is the same + assert(get_class_label_from_display_name("howToAcquire") == "HowToAcquire") + assert(get_class_label_from_display_name("howToAcquire", strict_camel_case = True) == "HowToAcquire") + assert(get_class_label_from_display_name("how_to_acquire") == "HowToAcquire") + assert(get_class_label_from_display_name("how_to_acquire", strict_camel_case = True) == "HowToAcquire") + assert(get_class_label_from_display_name("howtoAcquire") == "HowtoAcquire") + assert(get_class_label_from_display_name("howtoAcquire", strict_camel_case = True) == "HowtoAcquire") + assert(get_class_label_from_display_name("How To Acquire") == "HowToAcquire") + assert(get_class_label_from_display_name("How To Acquire", strict_camel_case = True) == "HowToAcquire") + assert(get_class_label_from_display_name("Model Of Manifestation") == "ModelOfManifestation") + assert(get_class_label_from_display_name("Model Of Manifestation", strict_camel_case = True) == "ModelOfManifestation") + assert(get_class_label_from_display_name("ModelOfManifestation") == "ModelOfManifestation") + assert(get_class_label_from_display_name("ModelOfManifestation", strict_camel_case = True) == "ModelOfManifestation") + assert(get_class_label_from_display_name("model Of Manifestation") == "ModelOfManifestation") + assert(get_class_label_from_display_name("model Of Manifestation", strict_camel_case = True) == "ModelOfManifestation") + + # tests where strict_camel_case changes the result + assert(get_class_label_from_display_name("how to Acquire") == "HowtoAcquire") + assert(get_class_label_from_display_name("how to Acquire", strict_camel_case = True) == "HowToAcquire") + assert(get_class_label_from_display_name("How to Acquire") == "HowtoAcquire") + assert(get_class_label_from_display_name("How to Acquire", strict_camel_case = True) == "HowToAcquire") + assert(get_class_label_from_display_name("how to acquire") == "Howtoacquire") + assert(get_class_label_from_display_name("how to acquire", strict_camel_case = True) == "HowToAcquire") + assert(get_class_label_from_display_name("model of manifestation") == "Modelofmanifestation") + assert(get_class_label_from_display_name("model of manifestation", strict_camel_case = True) == "ModelOfManifestation") + assert(get_class_label_from_display_name("model of manifestation") == "Modelofmanifestation") + assert(get_class_label_from_display_name("model of manifestation", strict_camel_case = True) == "ModelOfManifestation") + class TestValidateUtils: def test_validate_schema(self, helpers): - + ''' + Previously did: se_obj = helpers.get_schema_explorer("example.model.jsonld") - actual = validate_utils.validate_schema(se_obj.schema) + schema is defined as: self.schema = load_json(schema) + + TODO: Validate this is doing what its supposed to. + ''' + # Get data model path + data_model_path = helpers.get_data_path("example.model.jsonld") + schema = io_utils.load_json(data_model_path) + #need to pass the jsonschema + actual = validate_utils.validate_schema(schema) + assert actual is None + def test_validate_class_schema(self, helpers): + """ + Get a class template, fill it out with mock data, and validate against a JSON Schema - se_obj = helpers.get_schema_explorer("example.model.jsonld") + """ + + # Get data model path + data_model_path = helpers.get_data_path("example.model.jsonld") + + # Instantiate DataModelParser + data_model_parser = DataModelParser(path_to_data_model = data_model_path) + + #Parse Model + parsed_data_model = data_model_parser.parse_model() + + # Instantiate DataModelGraph + data_model_grapher = DataModelGraph(parsed_data_model) - mock_class = se_obj.generate_class_template() + # Generate graph + graph_data_model = data_model_grapher.generate_data_model_graph() + + dm_jsonld = DataModelJsonLD(graph_data_model) + + mock_class = dm_jsonld.class_template() mock_class["@id"] = "bts:MockClass" mock_class["@type"] = "rdfs:Class" mock_class["@rdfs:comment"] = "This is a mock class" mock_class["@rdfs:label"] = "MockClass" - mock_class["rdfs:subClassOf"]["@id"] = "bts:Patient" + mock_class["rdfs:subClassOf"].append({"@id":"bts:Patient"}) - actual = validate_utils.validate_class_schema(mock_class) + error = validate_utils.validate_class_schema(mock_class) - assert actual is None + assert error is None + def test_validate_property_schema(self, helpers): - se_obj = helpers.get_schema_explorer("example.model.jsonld") + # Get data model path + data_model_path = helpers.get_data_path("example.model.jsonld") + + # Instantiate DataModelParser + data_model_parser = DataModelParser(path_to_data_model = data_model_path) + + #Parse Model + parsed_data_model = data_model_parser.parse_model() + + # Instantiate DataModelGraph + data_model_grapher = DataModelGraph(parsed_data_model) + + # Generate graph + graph_data_model = data_model_grapher.generate_data_model_graph() - mock_class = se_obj.generate_property_template() + dm_jsonld = DataModelJsonLD(graph_data_model) + + mock_class = dm_jsonld.property_template() mock_class["@id"] = "bts:MockProperty" mock_class["@type"] = "rdf:Property" mock_class["@rdfs:comment"] = "This is a mock Patient class" - mock_class["@rdfs:label"] = "MockProperty" - mock_class["schema:domainIncludes"]["@id"] = "bts:Patient" - - actual = validate_utils.validate_property_schema(mock_class) + mock_class["@rdfs:label"] = "MockProperty" + mock_class["schema:domainIncludes"].append({"@id":"bts:Patient"}) - assert actual is None + error = validate_utils.validate_property_schema(mock_class) + assert error is None + class TestCsvUtils: def test_csv_to_schemaorg(self, helpers, tmp_path): @@ -312,11 +425,25 @@ def test_csv_to_schemaorg(self, helpers, tmp_path): """ csv_path = helpers.get_data_path("example.model.csv") - base_se = df_parser._convert_csv_to_data_model(csv_path) + # Instantiate DataModelParser + data_model_parser = DataModelParser(path_to_data_model = csv_path) + + #Parse Model + parsed_data_model = data_model_parser.parse_model() + + # Instantiate DataModelGraph + data_model_grapher = DataModelGraph(parsed_data_model) + + # Generate graph + graph_data_model = data_model_grapher.generate_data_model_graph() + + # Convert graph to JSONLD + jsonld_data_model = convert_graph_to_jsonld(Graph=graph_data_model) # saving updated schema.org schema actual_jsonld_path = tmp_path / "example.from_csv.model.jsonld" - base_se.export_schema(actual_jsonld_path) + #base_se.export_schema(actual_jsonld_path) + export_schema(jsonld_data_model, actual_jsonld_path) # Compare both JSON-LD files expected_jsonld_path = helpers.get_data_path("example.model.jsonld") diff --git a/tests/test_validation.py b/tests/test_validation.py index 22b64199b..923669f84 100644 --- a/tests/test_validation.py +++ b/tests/test_validation.py @@ -1,6 +1,7 @@ import os import logging import re +import networkx as nx import jsonschema import pytest from pathlib import Path @@ -10,18 +11,34 @@ from schematic.models.validate_manifest import ValidateManifest from schematic.models.metadata import MetadataModel from schematic.store.synapse import SynapseStorage -from schematic.schemas.generator import SchemaGenerator + +from schematic.schemas.data_model_parser import DataModelParser +from schematic.schemas.data_model_graph import DataModelGraph, DataModelGraphExplorer +from schematic.schemas.data_model_json_schema import DataModelJSONSchema + from schematic.utils.validate_rules_utils import validation_rule_info logging.basicConfig(level=logging.DEBUG) logger = logging.getLogger(__name__) @pytest.fixture -def sg(helpers): +def DME(helpers): inputModelLocation = helpers.get_data_path('example.model.jsonld') - sg = SchemaGenerator(inputModelLocation) + #sg = SchemaGenerator(inputModelLocation) + data_model_parser = DataModelParser(path_to_data_model = inputModelLocation) + #Parse Model + parsed_data_model = data_model_parser.parse_model() + + # Instantiate DataModelGraph + data_model_grapher = DataModelGraph(parsed_data_model) + + # Generate graph + graph_data_model = data_model_grapher.generate_data_model_graph() - yield sg + # Instantiate DataModelGraphExplorer + DME = DataModelGraphExplorer(graph_data_model) + + yield DME @pytest.fixture def metadataModel(helpers): @@ -56,7 +73,7 @@ def test_valid_manifest(self,helpers,metadataModel): assert warnings == [] - def test_invalid_manifest(self,helpers,sg,metadataModel): + def test_invalid_manifest(self,helpers, DME,metadataModel): manifestPath = helpers.get_data_path("mock_manifests/Invalid_Test_Manifest.csv") rootNode = 'MockComponent' @@ -71,7 +88,7 @@ def test_invalid_manifest(self,helpers,sg,metadataModel): row_num = '3', attribute_name = 'Check Num', invalid_entry = 'c', - sg = sg, + DME = DME, )[0] in errors assert GenerateError.generate_type_error( @@ -79,7 +96,7 @@ def test_invalid_manifest(self,helpers,sg,metadataModel): row_num = '3', attribute_name = 'Check Int', invalid_entry = '5.63', - sg = sg, + DME = DME, )[0] in errors assert GenerateError.generate_type_error( @@ -87,7 +104,7 @@ def test_invalid_manifest(self,helpers,sg,metadataModel): row_num = '3', attribute_name = 'Check String', invalid_entry = '94', - sg = sg, + DME = DME, )[0] in errors assert GenerateError.generate_list_error( @@ -97,7 +114,7 @@ def test_invalid_manifest(self,helpers,sg,metadataModel): attribute_name = 'Check List', list_error = "not_comma_delimited", invalid_entry = 'invalid list values', - sg = sg, + DME = DME, )[0] in errors assert GenerateError.generate_list_error( @@ -107,7 +124,7 @@ def test_invalid_manifest(self,helpers,sg,metadataModel): attribute_name = 'Check Regex List', list_error = "not_comma_delimited", invalid_entry = 'ab cd ef', - sg = sg, + DME = DME, )[0] in errors assert GenerateError.generate_regex_error( @@ -117,7 +134,7 @@ def test_invalid_manifest(self,helpers,sg,metadataModel): attribute_name = 'Check Regex Format', module_to_call = 'match', invalid_entry = 'm', - sg = sg, + DME = DME, )[0] in errors assert GenerateError.generate_regex_error( @@ -127,7 +144,7 @@ def test_invalid_manifest(self,helpers,sg,metadataModel): attribute_name = 'Check Regex Single', module_to_call = 'search', invalid_entry = 'q', - sg = sg, + DME = DME, )[0] in errors assert GenerateError.generate_regex_error( @@ -137,7 +154,7 @@ def test_invalid_manifest(self,helpers,sg,metadataModel): attribute_name = 'Check Regex Integer', module_to_call = 'search', invalid_entry = '5.4', - sg = sg, + DME = DME, )[0] in errors assert GenerateError.generate_url_error( @@ -148,14 +165,14 @@ def test_invalid_manifest(self,helpers,sg,metadataModel): attribute_name = 'Check URL', argument = None, invalid_entry = 'http://googlef.com/', - sg = sg, + DME = DME, )[0] in errors date_err = GenerateError.generate_content_error( val_rule = 'date', attribute_name = 'Check Date', - sg = sg, + DME = DME, row_num = ['2','3','4'], error_val = ['84-43-094', '32-984', 'notADate'], )[0] @@ -165,7 +182,7 @@ def test_invalid_manifest(self,helpers,sg,metadataModel): assert GenerateError.generate_content_error( val_rule = 'unique error', attribute_name = 'Check Unique', - sg = sg, + DME = DME, row_num = ['2','3','4'], error_val = ['str1'], )[0] in errors @@ -173,7 +190,7 @@ def test_invalid_manifest(self,helpers,sg,metadataModel): assert GenerateError.generate_content_error( val_rule = 'inRange 50 100 error', attribute_name = 'Check Range', - sg = sg, + DME = DME, row_num = ['3'], error_val = ['30'], )[0] in errors @@ -182,13 +199,13 @@ def test_invalid_manifest(self,helpers,sg,metadataModel): assert GenerateError.generate_content_error( val_rule = 'recommended', attribute_name = 'Check Recommended', - sg = sg, + DME = DME, )[1] in warnings assert GenerateError.generate_content_error( val_rule = 'protectAges', attribute_name = 'Check Ages', - sg = sg, + DME = DME, row_num = ['2','3'], error_val = ['6549','32851'], )[1] in warnings @@ -199,7 +216,7 @@ def test_invalid_manifest(self,helpers,sg,metadataModel): attribute_name='Check Match at Least', invalid_entry = ['7163'], missing_manifest_ID = ['syn27600110', 'syn29381803'], - sg = sg, + DME = DME, )[1] in warnings assert GenerateError.generate_cross_warning( @@ -207,7 +224,7 @@ def test_invalid_manifest(self,helpers,sg,metadataModel): row_num = ['3'], attribute_name = 'Check Match at Least values', invalid_entry = ['51100'], - sg = sg, + DME = DME, )[1] in warnings assert \ @@ -215,14 +232,14 @@ def test_invalid_manifest(self,helpers,sg,metadataModel): val_rule = 'matchExactlyOne', attribute_name='Check Match Exactly', matching_manifests = ['syn29862078', 'syn27648165'], - sg = sg, + DME = DME, )[1] in warnings \ or \ GenerateError.generate_cross_warning( val_rule = 'matchExactlyOne', attribute_name='Check Match Exactly', matching_manifests = ['syn29862066', 'syn27648165'], - sg = sg, + DME = DME, )[1] in warnings @@ -231,7 +248,7 @@ def test_invalid_manifest(self,helpers,sg,metadataModel): row_num = ['2', '3', '4'], attribute_name='Check Match Exactly values', invalid_entry = ['71738', '98085', '210065'], - sg = sg, + DME = DME, )[1] warning_in_list = [cross_warning[1] in warning for warning in warnings] assert any(warning_in_list) @@ -239,7 +256,7 @@ def test_invalid_manifest(self,helpers,sg,metadataModel): - def test_in_house_validation(self,helpers,sg,metadataModel): + def test_in_house_validation(self,helpers,DME,metadataModel): manifestPath = helpers.get_data_path("mock_manifests/Invalid_Test_Manifest.csv") rootNode = 'MockComponent' @@ -255,7 +272,7 @@ def test_in_house_validation(self,helpers,sg,metadataModel): row_num = '3', attribute_name = 'Check Num', invalid_entry = 'c', - sg = sg, + DME = DME, )[0] in errors assert GenerateError.generate_type_error( @@ -263,7 +280,7 @@ def test_in_house_validation(self,helpers,sg,metadataModel): row_num = '3', attribute_name = 'Check Int', invalid_entry = '5.63', - sg = sg, + DME = DME, )[0] in errors assert GenerateError.generate_type_error( @@ -271,7 +288,7 @@ def test_in_house_validation(self,helpers,sg,metadataModel): row_num = '3', attribute_name = 'Check String', invalid_entry = '94', - sg = sg, + DME = DME, )[0] in errors assert GenerateError.generate_type_error( @@ -279,7 +296,7 @@ def test_in_house_validation(self,helpers,sg,metadataModel): row_num = '3', attribute_name = 'Check NA', invalid_entry = '9.5', - sg = sg, + DME = DME, )[0] in errors assert GenerateError.generate_list_error( @@ -289,7 +306,7 @@ def test_in_house_validation(self,helpers,sg,metadataModel): attribute_name = 'Check List', list_error = "not_comma_delimited", invalid_entry = 'invalid list values', - sg = sg, + DME = DME, )[0] in errors assert GenerateError.generate_list_error( @@ -299,7 +316,7 @@ def test_in_house_validation(self,helpers,sg,metadataModel): attribute_name = 'Check Regex List', list_error = "not_comma_delimited", invalid_entry = 'ab cd ef', - sg = sg, + DME = DME, )[0] in errors assert GenerateError.generate_regex_error( @@ -309,7 +326,7 @@ def test_in_house_validation(self,helpers,sg,metadataModel): attribute_name = 'Check Regex Single', module_to_call = 'search', invalid_entry = 'q', - sg = sg, + DME = DME, )[0] in errors assert GenerateError.generate_regex_error( @@ -319,7 +336,7 @@ def test_in_house_validation(self,helpers,sg,metadataModel): attribute_name = 'Check Regex Format', module_to_call = 'match', invalid_entry = 'm', - sg = sg, + DME = DME, )[0] in errors assert GenerateError.generate_url_error( @@ -330,7 +347,7 @@ def test_in_house_validation(self,helpers,sg,metadataModel): attribute_name = 'Check URL', argument = None, invalid_entry = 'http://googlef.com/', - sg = sg, + DME = DME, )[0] in errors @@ -341,7 +358,7 @@ def test_in_house_validation(self,helpers,sg,metadataModel): attribute_name='Check Match at Least', invalid_entry = ['7163'], missing_manifest_ID = ['syn27600110', 'syn29381803'], - sg = sg, + DME = DME, )[1] in warnings assert GenerateError.generate_cross_warning( @@ -349,7 +366,7 @@ def test_in_house_validation(self,helpers,sg,metadataModel): row_num = ['3'], attribute_name = 'Check Match at Least values', invalid_entry = ['51100'], - sg = sg, + DME = DME, )[1] in warnings assert \ @@ -357,14 +374,14 @@ def test_in_house_validation(self,helpers,sg,metadataModel): val_rule = 'matchExactlyOne', attribute_name='Check Match Exactly', matching_manifests = ['syn29862078', 'syn27648165'], - sg = sg, + DME = DME, )[1] in warnings \ or \ GenerateError.generate_cross_warning( val_rule = 'matchExactlyOne', attribute_name='Check Match Exactly', matching_manifests = ['syn29862066', 'syn27648165'], - sg = sg, + DME = DME, )[1] in warnings assert GenerateError.generate_cross_warning( @@ -372,65 +389,72 @@ def test_in_house_validation(self,helpers,sg,metadataModel): row_num = ['2', '3', '4'], attribute_name='Check Match Exactly values', invalid_entry = ['71738', '98085', '210065'], - sg = sg, + DME = DME, )[1] in warnings @pytest.mark.rule_combos(reason = 'This introduces a great number of tests covering every possible rule combination that are only necessary on occasion.') @pytest.mark.parametrize("base_rule, second_rule", get_rule_combinations()) - def test_rule_combinations(self, helpers, sg, base_rule, second_rule, metadataModel): - #print(base_rule,second_rule) + def test_rule_combinations(self, helpers, DME, base_rule, second_rule, metadataModel): + """ + TODO: Describe what this test is doing. + Updating the data model graph to allow testing of allowable rule combinations. + Works one rule combo at a time using (get_rule_combinations.) + """ rule_regex = re.compile(base_rule+'.*') + rootNode = 'MockComponent' manifestPath = helpers.get_data_path("mock_manifests/Rule_Combo_Manifest.csv") manifest = helpers.get_data_frame(manifestPath) - - # adjust rules and arguments as necessary for testing combinations - for attribute in sg.se.schema['@graph']: #Doing it in a loop becasue of sg.se.edit_class design - if 'sms:validationRules' in attribute and attribute['sms:validationRules']: - # remove default combination for attribute's reules - if attribute['sms:displayName'] == 'Check NA': - attribute['sms:validationRules'].remove('int') - - # update class - sg.se.edit_class(attribute) + + # Get a view of the node data + all_node_data = DME.graph.nodes.data() + + # Update select validation rules in the data model graph for columns in the manifest + for attribute in manifest.columns: + # Get the node label + node_label = DME.get_node_label(attribute) + + # Get a view of the recorded info for current node + node_info = all_node_data[node_label] + if node_info['validationRules']: + + if node_info['displayName'] == 'Check NA': + # Edit the node info -in place- + node_info['validationRules'].remove('int') break - - # Add rule args if necessary - if base_rule in attribute['sms:validationRules'] or re.match(rule_regex, attribute['sms:validationRules'][0]): + + if base_rule in node_info['validationRules'] or re.match(rule_regex, node_info['validationRules'][0]): if second_rule.startswith('matchAtLeastOne') or second_rule.startswith('matchExactlyOne'): - rule_args = f" MockComponent.{attribute['rdfs:label']} Patient.PatientID" + rule_args = f" MockComponent.{node_label} Patient.PatientID" elif second_rule.startswith('inRange'): rule_args = ' 1 1000 warning' elif second_rule.startswith('regex'): rule_args = ' search [a-f]' else: rule_args = '' - - attribute['sms:validationRules'].append(second_rule + rule_args) - - # update class - sg.se.edit_class(attribute) + # Edit the node info -in place- + node_info['validationRules'].append(second_rule + rule_args) break - target_column=attribute['sms:displayName'] - for col in manifest.columns: - if col not in ('Component', target_column): - manifest.drop(columns=col, inplace=True) + # Update the manifest to only contain the Component and attribute column where the rule was changed. + manifest = manifest[['Component', attribute]] + + data_model_js = DataModelJSONSchema(jsonld_path=helpers.get_data_path('example.model.jsonld'), graph=DME.graph) + json_schema = data_model_js.get_json_validation_schema(source_node=rootNode, schema_name=rootNode + "_validation") - rootNode = 'MockComponent' validateManifest = ValidateManifest( errors = [], manifest = manifest, manifestPath = manifestPath, - sg = sg, - jsonSchema = sg.get_json_schema_requirements(rootNode, rootNode + "_validation") + DME = DME, + jsonSchema = json_schema ) #perform validation with no exceptions raised _, errors, warnings = validateManifest.validate_manifest_rules( manifest = manifest, - sg = sg, + DME = DME, restrict_rules = False, project_scope = None, ) diff --git a/tests/test_validator.py b/tests/test_validator.py index 5f01bbd34..c4e743c80 100644 --- a/tests/test_validator.py +++ b/tests/test_validator.py @@ -8,7 +8,7 @@ from schematic.schemas.data_model_parser import DataModelParser -from schematic.schemas.data_model_graph import DataModelGraph, DataModelGraphExporer +from schematic.schemas.data_model_graph import DataModelGraph from schematic.schemas.data_model_validator import DataModelValidator from schematic.schemas.data_model_jsonld import DataModelJsonLD, convert_graph_to_jsonld @@ -99,7 +99,8 @@ def test_dag(self, helpers): # Run validation validator_errors = DMV.check_is_dag() - expected_error = ['Schematic requires models be a directed acyclic graph (DAG). Your graph is not a DAG, we found a loop between: Patient and PatientID, please remove this loop from your model and submit again.'] - - assert expected_error == validator_errors + # nodes could be in different order so need to account for that + expected_errors = ['Schematic requires models be a directed acyclic graph (DAG). Your graph is not a DAG, we found a loop between: Patient and PatientID, please remove this loop from your model and submit again.', + 'Schematic requires models be a directed acyclic graph (DAG). Your graph is not a DAG, we found a loop between: PatientID and Patient, please remove this loop from your model and submit again.'] + assert validator_errors[0] in expected_errors From e51e7f10d1760ca3a7f59596513c57643383ba97 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Fri, 25 Aug 2023 16:16:36 -0700 Subject: [PATCH 043/239] update all referencing so all tests CLI calls and APIs work --- schematic/manifest/commands.py | 28 +- schematic/manifest/generator.py | 6 +- schematic/models/GE_Helpers.py | 26 +- schematic/models/metadata.py | 60 +- schematic/models/validate_attribute.py | 98 +- schematic/models/validate_manifest.py | 42 +- schematic/schemas/__init__.py | 10 +- schematic/schemas/commands.py | 38 +- schematic/schemas/data_model_graph.py | 130 +- schematic/schemas/data_model_json_schema.py | 4 +- schematic/schemas/data_model_jsonld.py | 39 +- schematic/schemas/data_model_nodes.py | 4 +- schematic/schemas/data_model_relationships.py | 3 +- schematic/store/synapse.py | 140 +- schematic/utils/schema_utils.py | 303 +- .../visualization/attributes_explorer.py | 32 +- schematic/visualization/tangled_tree.py | 43 +- schematic_api/api/openapi/api.yaml | 18 +- schematic_api/api/routes.py | 163 +- tests/data/example.model.jsonld | 3126 ++++------------- 20 files changed, 1236 insertions(+), 3077 deletions(-) diff --git a/schematic/manifest/commands.py b/schematic/manifest/commands.py index a75aa7216..1d03b38d8 100644 --- a/schematic/manifest/commands.py +++ b/schematic/manifest/commands.py @@ -6,11 +6,14 @@ import click import click_log +from schematic.schemas.data_model_parser import DataModelParser +from schematic.schemas.data_model_graph import DataModelGraph, DataModelGraphExplorer from schematic.manifest.generator import ManifestGenerator + from schematic.utils.cli_utils import log_value_from_config, query_dict, parse_synIDs -from schematic.help import manifest_commands -from schematic.schemas.generator import SchemaGenerator from schematic.utils.google_api_utils import export_manifest_csv +from schematic.help import manifest_commands + from schematic.store.synapse import SynapseStorage from schematic.configuration.configuration import CONFIG @@ -128,10 +131,24 @@ def get_manifest( title = CONFIG.manifest_title log_value_from_config("title", title) + data_model_parser = DataModelParser(path_to_data_model = jsonld) + + #Parse Model + logger.info("Parsing data model.") + parsed_data_model = data_model_parser.parse_model() + + # Instantiate DataModelGraph + data_model_grapher = DataModelGraph(parsed_data_model) + + # Generate graph + logger.info("Generating data model graph.") + graph_data_model = data_model_grapher.generate_data_model_graph() + def create_single_manifest(data_type, output_csv=None, output_xlsx=None): # create object of type ManifestGenerator manifest_generator = ManifestGenerator( path_to_json_ld=jsonld, + graph = graph_data_model, title=t, root=data_type, use_annotations=use_annotations, @@ -194,9 +211,10 @@ def create_single_manifest(data_type, output_csv=None, output_xlsx=None): if type(data_type) is str: data_type = [data_type] - if data_type[0] == 'all manifests': - sg = SchemaGenerator(path_to_json_ld=jsonld) - component_digraph = sg.se.get_digraph_by_edge_type('requiresComponent') + if data_type[0] == 'all manifests': + # Feed graph into the data model graph explorer + DME = DataModelGraphExplorer(graph_data_model) + component_digraph = DME.get_digraph_by_edge_type('requiresComponent') components = component_digraph.nodes() for component in components: t = f'{title}.{component}.manifest' diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index f8bf236b1..eccc71e0e 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -12,7 +12,7 @@ from tempfile import NamedTemporaryFile from typing import Dict, List, Optional, Tuple, Union -from schematic.schemas.data_model_graph import DataModelGraphExporer +from schematic.schemas.data_model_graph import DataModelGraphExplorer from schematic.schemas.data_model_json_schema import DataModelJSONSchema from schematic.utils.google_api_utils import ( execute_google_api_requests, @@ -83,7 +83,7 @@ def __init__( ) # Data Model Explorer object - self.DME = DataModelGraphExporer(self.graph) + self.DME = DataModelGraphExplorer(self.graph) # additional metadata to add to manifest self.additional_metadata = additional_metadata @@ -1630,7 +1630,7 @@ def _update_dataframe_with_existing_df(self, empty_manifest_url: str, existing_d # Get headers for the current schema and existing manifest df. current_schema_headers = list(self.get_dataframe_by_url(manifest_url=empty_manifest_url).columns) - existing_manfiest_headers = list(existing_df.columns) + existing_manifest_headers = list(existing_df.columns) # Find columns that exist in the current schema, but are not in the manifest being downloaded. new_columns = self._get_missing_columns(current_schema_headers, existing_manifest_headers) diff --git a/schematic/models/GE_Helpers.py b/schematic/models/GE_Helpers.py index 73ad20709..5cd70d05c 100644 --- a/schematic/models/GE_Helpers.py +++ b/schematic/models/GE_Helpers.py @@ -25,7 +25,8 @@ from schematic.models.validate_attribute import GenerateError -from schematic.schemas.generator import SchemaGenerator +from schematic.schemas.data_model_graph import DataModelGraphExplorer + from schematic.utils.validate_utils import rule_in_rule_list, np_array_to_str_list, iterable_to_str_list logger = logging.getLogger(__name__) @@ -39,7 +40,7 @@ class GreatExpectationsHelpers(object): 2) Parse results dict to generate appropriate errors """ def __init__(self, - sg, + DME, unimplemented_expectations, manifest, manifestPath @@ -48,8 +49,8 @@ def __init__(self, Purpose: Instantiate a great expectations helpers object Args: - sg: - schemaGenerator object + DME: + DataModelExplorer Object unimplemented_expectations: dictionary of validation rules that currently do not have expectations developed manifest: @@ -58,7 +59,7 @@ def __init__(self, path to manifest being validated """ self.unimplemented_expectations = unimplemented_expectations - self.sg = sg + self.DME = DME self.manifest = manifest self.manifestPath = manifestPath @@ -151,18 +152,21 @@ def build_expectation_suite(self,): overwrite_existing=True ) - #build expectation configurations for each expecation + #build expectation configurations for each expectation for col in self.manifest.columns: args={} meta={} # remove trailing/leading whitespaces from manifest self.manifest.applymap(lambda x: x.strip() if isinstance(x, str) else x) - validation_rules = self.sg.get_node_validation_rules(col) + validation_rules = self.DME.get_node_validation_rules(col) #check if attribute has any rules associated with it if validation_rules: #iterate through all validation rules for an attribute + #TODO: Can remove when handling updated so split within graph + if '::' in validation_rules[0]: + validation_rules = validation_rules[0].split("::") for rule in validation_rules: base_rule = rule.split(" ")[0] @@ -384,7 +388,7 @@ def generate_errors( validation_types: Dict, errors: List, warnings: List, - sg: SchemaGenerator, + DME: DataModelGraphExplorer, ): """ Purpose: @@ -449,7 +453,7 @@ def generate_errors( row_num = str(row+2), attribute_name = errColumn, invalid_entry = str(value), - sg = sg, + DME = DME, ) if vr_errors: errors.append(vr_errors) @@ -465,7 +469,7 @@ def generate_errors( module_to_call = 'match', attribute_name = errColumn, invalid_entry = value, - sg = sg, + DME = DME, ) if vr_errors: errors.append(vr_errors) @@ -477,7 +481,7 @@ def generate_errors( attribute_name = errColumn, row_num = np_array_to_str_list(np.array(indices)+2), error_val = iterable_to_str_list(values), - sg = self.sg + DME = self.DME ) if vr_errors: errors.append(vr_errors) diff --git a/schematic/models/metadata.py b/schematic/models/metadata.py index d10d27151..f7ddfaee3 100644 --- a/schematic/models/metadata.py +++ b/schematic/models/metadata.py @@ -12,12 +12,12 @@ # allows specifying explicit variable types from typing import Any, Dict, Optional, Text, List -# handle schema logic; to be refactored as SchemaExplorer matures into a package -# as collaboration with Biothings progresses -from schematic.schemas.explorer import SchemaExplorer from schematic.manifest.generator import ManifestGenerator -from schematic.schemas.generator import SchemaGenerator +from schematic.schemas.data_model_graph import DataModelGraph, DataModelGraphExplorer +from schematic.schemas.data_model_parser import DataModelParser +from schematic.schemas.data_model_json_schema import DataModelJSONSchema + #TODO: This module should only be aware of the store interface # we shouldn't need to expose Synapse functionality explicitly @@ -25,7 +25,7 @@ from schematic.utils.df_utils import load_df -from schematic.models.validate_attribute import ValidateAttribute +#from schematic.models.validate_attribute import ValidateAttribute #looks unused. from schematic.models.validate_manifest import validate_all @@ -55,11 +55,22 @@ def __init__(self, inputMModelLocation: str, inputMModelLocationType: str,) -> N # ensure that it is necessarily pointing to a '.jsonld' file if inputMModelLocation.rpartition(".")[-1] == "jsonld": logger.debug( - f"Initializing SchemaGenerator object from {inputMModelLocation} schema." + f"Initializing DataModelGraphExplorer object from {inputMModelLocation} schema." ) self.inputMModelLocation = inputMModelLocation - self.sg = SchemaGenerator(inputMModelLocation) + data_model_parser = DataModelParser(path_to_data_model = self.inputMModelLocation) + #Parse Model + parsed_data_model = data_model_parser.parse_model() + + # Instantiate DataModelGraph + data_model_grapher = DataModelGraph(parsed_data_model) + + # Generate graph + self.graph_data_model = data_model_grapher.generate_data_model_graph() + + self.DME = DataModelGraphExplorer(self.graph_data_model) + else: raise TypeError( f"Please make sure {inputMModelLocation} is a .jsonld file." @@ -102,7 +113,7 @@ def getOrderedModelNodes(self, rootNode: str, relationshipType: str) -> List[str Raises: ValueError: rootNode not found in metadata model. """ - ordered_nodes = self.sg.get_descendants_by_edge_type( + ordered_nodes = self.DME.get_descendants_by_edge_type( rootNode, relationshipType, connected=True, ordered=True ) @@ -140,6 +151,7 @@ def getModelManifest( mg = ManifestGenerator( path_to_json_ld=self.inputMModelLocation, + graph = self.graph_data_model, title=title, root=rootNode, additional_metadata=additionalMetadata, @@ -169,11 +181,11 @@ def get_component_requirements( """ # get required components for the input/source component - req_components = self.sg.get_component_requirements(source_component) + req_components = self.DME.get_component_requirements(source_component) # retreive components as graph if as_graph: - req_components_graph = self.sg.get_component_requirements_graph( + req_components_graph = self.DME.get_component_requirements_graph( source_component ) @@ -205,7 +217,11 @@ def validateModelManifest( # get validation schema for a given node in the data model, if the user has not provided input validation schema if not jsonSchema: - jsonSchema = self.sg.get_json_schema_requirements( + + # Instantiate Data Model Json Schema + self.data_model_js = DataModelJSONSchema(jsonld_path=self.inputMModelLocation, graph=self.graph_data_model) + + jsonSchema = self.data_model_js.get_json_validation_schema( rootNode, rootNode + "_validation" ) @@ -251,7 +267,15 @@ def validateModelManifest( return errors, warnings - errors, warnings, manifest = validate_all(self, errors, warnings, manifest, manifestPath, self.sg, jsonSchema, restrict_rules, project_scope) + errors, warnings, manifest = validate_all(self, + errors=errors, + warnings=warnings, + manifest=manifest, + manifestPath=manifestPath, + DME=self.DME, + jsonSchema=jsonSchema, + restrict_rules=restrict_rules, + project_scope=project_scope) return errors, warnings def populateModelManifest(self, title, manifestPath: str, rootNode: str, return_excel = False) -> str: @@ -269,7 +293,7 @@ def populateModelManifest(self, title, manifestPath: str, rootNode: str, return_ ValueError: rootNode not found in metadata model. """ mg = ManifestGenerator( - path_to_json_ld=self.inputMModelLocation, title=title, root=rootNode + path_to_json_ld=self.inputMModelLocation, graph = self.graph_data_model, title=title, root=rootNode ) emptyManifestURL = mg.get_manifest() @@ -316,7 +340,7 @@ def submit_metadata_manifest( try: # check if the component ("class" in schema) passed as argument is valid (present in schema) or not - self.sg.se.is_class_in_schema(validate_component) + self.DME.is_class_in_schema(validate_component) except: # a KeyError exception is raised when validate_component fails in the try-block above # here, we are suppressing the KeyError exception and replacing it with a more @@ -336,7 +360,7 @@ def submit_metadata_manifest( # upload manifest file from `manifest_path` path to entity with Syn ID `dataset_id` if exists(censored_manifest_path): censored_manifest_id = syn_store.associateMetadataWithFiles( - schemaGenerator = self.sg, + DME = self.DME, metadataManifestPath = censored_manifest_path, datasetId = dataset_id, manifest_record_type = manifest_record_type, @@ -347,7 +371,7 @@ def submit_metadata_manifest( restrict_maniest = True manifest_id = syn_store.associateMetadataWithFiles( - schemaGenerator = self.sg, + DME = self.DME, metadataManifestPath = manifest_path, datasetId = dataset_id, manifest_record_type = manifest_record_type, @@ -369,7 +393,7 @@ def submit_metadata_manifest( # no need to perform validation, just submit/associate the metadata manifest file if exists(censored_manifest_path): censored_manifest_id = syn_store.associateMetadataWithFiles( - schemaGenerator = self.sg, + DME = self.DME, metadataManifestPath=censored_manifest_path, datasetId=dataset_id, manifest_record_type=manifest_record_type, @@ -380,7 +404,7 @@ def submit_metadata_manifest( restrict_maniest = True manifest_id = syn_store.associateMetadataWithFiles( - schemaGenerator = self.sg, + DME = self.DME, metadataManifestPath=manifest_path, datasetId=dataset_id, manifest_record_type=manifest_record_type, diff --git a/schematic/models/validate_attribute.py b/schematic/models/validate_attribute.py index da98b20ef..9f8e6a31f 100644 --- a/schematic/models/validate_attribute.py +++ b/schematic/models/validate_attribute.py @@ -16,7 +16,8 @@ import pandas as pd from jsonschema import ValidationError -from schematic.schemas.generator import SchemaGenerator +from schematic.schemas.data_model_graph import DataModelGraphExplorer + from schematic.store.base import BaseStorage from schematic.store.synapse import SynapseStorage from schematic.utils.validate_rules_utils import validation_rule_info @@ -31,7 +32,7 @@ logger = logging.getLogger(__name__) class GenerateError: - def generate_schema_error(row_num: str, attribute_name: str, error_msg: str, invalid_entry: str, sg: SchemaGenerator,)-> List[str]: + def generate_schema_error(row_num: str, attribute_name: str, error_msg: str, invalid_entry: str, DME: DataModelGraphExplorer,)-> List[str]: ''' Purpose: Process error messages generated from schema Input: @@ -49,7 +50,7 @@ def generate_schema_error(row_num: str, attribute_name: str, error_msg: str, inv raises = GenerateError.get_message_level( val_rule = 'schema', attribute_name = attribute_name, - sg = sg, + DME = DME, ) #if a message needs to be raised, get the approrpiate function to do so @@ -77,7 +78,7 @@ def generate_schema_error(row_num: str, attribute_name: str, error_msg: str, inv def generate_list_error( list_string: str, row_num: str, attribute_name: str, list_error: str, - invalid_entry:str, sg: SchemaGenerator, val_rule: str, + invalid_entry:str, DME: DataModelGraphExplorer, val_rule: str, ) -> List[str]: """ Purpose: @@ -100,7 +101,7 @@ def generate_list_error( raises = GenerateError.get_message_level( val_rule = val_rule, attribute_name = attribute_name, - sg = sg, + DME = DME, ) #if a message needs to be raised, get the approrpiate function to do so @@ -137,7 +138,7 @@ def generate_regex_error( module_to_call: str, attribute_name: str, invalid_entry: str, - sg: SchemaGenerator, + DME: DataModelGraphExplorer, ) -> List[str]: """ Purpose: @@ -161,7 +162,7 @@ def generate_regex_error( raises = GenerateError.get_message_level( val_rule = val_rule, attribute_name = attribute_name, - sg = sg, + DME = DME, ) #if a message needs to be raised, get the approrpiate function to do so @@ -190,7 +191,7 @@ def generate_regex_error( return error_list, warning_list def generate_type_error( - val_rule: str, row_num: str, attribute_name: str, invalid_entry:str, sg: SchemaGenerator, + val_rule: str, row_num: str, attribute_name: str, invalid_entry:str, DME: DataModelGraphExplorer, ) -> List[str]: """ Purpose: @@ -208,12 +209,12 @@ def generate_type_error( error_list = [] warning_list = [] - + #Determine which, if any, message to raise raises = GenerateError.get_message_level( - val_rule = val_rule, + DME = DME, attribute_name = attribute_name, - sg = sg, + val_rule = val_rule, ) #if a message needs to be raised, get the approrpiate function to do so @@ -231,8 +232,15 @@ def generate_type_error( error_message = type_error_str error_val = invalid_entry + #TODO: not sure if this i needed (to split) + validation_rules=DME.get_node_validation_rules(attribute_name) + + #TODO: Can remove when handling updated so split within graph + if validation_rules and '::' in validation_rules[0]: + validation_rules = validation_rules[0].split("::") + # If IsNA rule is being used to allow `Not Applicable` entries, do not log a message - if error_val.lower() == 'not applicable' and rule_in_rule_list('IsNA', sg.get_node_validation_rules(sg.get_node_label(attribute_name))): + if error_val.lower() == 'not applicable' and rule_in_rule_list('IsNA', validation_rules): pass else: logLevel(type_error_str) @@ -247,7 +255,7 @@ def generate_type_error( def generate_url_error( url: str, url_error: str, row_num: str, attribute_name: str, argument: str, - invalid_entry:str, sg: SchemaGenerator, val_rule: str, + invalid_entry:str, DME: DataModelGraphExplorer, val_rule: str, ) -> List[str]: """ Purpose: @@ -281,7 +289,7 @@ def generate_url_error( raises = GenerateError.get_message_level( val_rule = val_rule, attribute_name = attribute_name, - sg = sg, + DME = DME, ) #if a message needs to be raised, get the approrpiate function to do so @@ -331,7 +339,7 @@ def generate_url_error( def generate_cross_warning( val_rule: str, attribute_name: str, - sg: SchemaGenerator, + DME: DataModelGraphExplorer, matching_manifests = [], missing_manifest_ID = None, invalid_entry = None, @@ -361,7 +369,7 @@ def generate_cross_warning( raises = GenerateError.get_message_level( val_rule = val_rule, attribute_name = attribute_name, - sg = sg, + DME = DME, ) #if a message needs to be raised, get the approrpiate function to do so @@ -409,7 +417,7 @@ def generate_cross_warning( def generate_content_error( val_rule: str, attribute_name: str, - sg: SchemaGenerator, + DME: DataModelGraphExplorer, row_num = None, error_val = None, ) -> (List[str], List[str]): @@ -424,7 +432,7 @@ def generate_content_error( Input: val_rule: str, defined in the schema. attribute_name: str, attribute being validated - sg: schemaGenerator object + DME: DataModelGraphExplorer object row_num: str, row where the error was detected error_val: value duplicated @@ -443,7 +451,7 @@ def generate_content_error( raises = GenerateError.get_message_level( val_rule=val_rule, attribute_name = attribute_name, - sg = sg, + DME = DME, ) #if a message needs to be raised, get the approrpiate function to do so @@ -505,7 +513,7 @@ def generate_content_error( return error_list, warning_list def get_message_level( - sg: SchemaGenerator, + DME: DataModelGraphExplorer, attribute_name: str, val_rule: str, ) -> str: @@ -521,7 +529,7 @@ def get_message_level( Input: val_rule: str, defined in the schema. - sg: schemaGenerator object + DME: DataModelGraphExplorer object attribute_name: str, attribute being validated Returns: 'error', 'warning' or None @@ -535,16 +543,15 @@ def get_message_level( #set message level to default and change after if rule_parts[0] != 'schema': level = rule_info[rule_parts[0]]['default_message_level'] - # Parse rule for level, set to default if not specified if rule_parts[-1].lower() == 'error' or rule_parts[0] == 'schema': level = 'error' elif rule_parts[-1].lower() == 'warning': level = 'warning' - elif not sg.is_node_required(node_display_name=attribute_name): + elif not DME.get_node_required(node_display_name=attribute_name): # If not required raise warnings to notify level = 'warning' - elif sg.is_node_required(node_display_name=attribute_name) and 'recommended' in val_rule: + elif DME.get_node_required(node_display_name=attribute_name) and 'recommended' in val_rule: level = None return level @@ -589,7 +596,7 @@ def get_target_manifests(target_component, project_scope: List): return synStore, target_manifest_IDs, target_dataset_IDs def list_validation( - self, val_rule: str, manifest_col: pd.core.series.Series, sg: SchemaGenerator, + self, val_rule: str, manifest_col: pd.core.series.Series, DME: DataModelGraphExplorer, ) -> (List[List[str]], List[List[str]], pd.core.series.Series): """ Purpose: @@ -630,7 +637,7 @@ def list_validation( attribute_name=manifest_col.name, list_error=list_error, invalid_entry=manifest_col[i], - sg = sg, + DME = DME, val_rule = val_rule, ) if vr_errors: @@ -645,7 +652,7 @@ def list_validation( return errors, warnings, manifest_col def regex_validation( - self, val_rule: str, manifest_col: pd.core.series.Series, sg: SchemaGenerator, + self, val_rule: str, manifest_col: pd.core.series.Series, DME: DataModelGraphExplorer, ) -> (List[List[str]], List[List[str]]): """ Purpose: @@ -655,6 +662,7 @@ def regex_validation( - val_rule: str, Validation rule - manifest_col: pd.core.series.Series, column for a given attribute in the manifest + - DME: DataModelGraphExplorer Object Using this module requres validation rules written in the following manner: 'regex module regular expression' - regex: is an exact string specifying that the input is to be validated as a @@ -685,7 +693,10 @@ def regex_validation( errors = [] warnings = [] - validation_rules=self.sg.se.get_class_validation_rules(self.sg.se.get_class_label_from_display_name(manifest_col.name)) + + validation_rules = DME.get_node_validation_rules(manifest_col.name) + if validation_rules and '::' in validation_rules[0]: + validation_rules = validation_rules[0].split("::") # Handle case where validating re's within a list. if re.search('list',"|".join(validation_rules)): if type(manifest_col[0]) == str: @@ -705,7 +716,7 @@ def regex_validation( module_to_call=reg_exp_rules[1], attribute_name=manifest_col.name, invalid_entry=manifest_col[i], - sg = sg, + DME = DME, ) if vr_errors: errors.append(vr_errors) @@ -726,7 +737,7 @@ def regex_validation( module_to_call=reg_exp_rules[1], attribute_name=manifest_col.name, invalid_entry=manifest_col[i], - sg = sg, + DME = DME, ) if vr_errors: errors.append(vr_errors) @@ -736,7 +747,7 @@ def regex_validation( return errors, warnings def type_validation( - self, val_rule: str, manifest_col: pd.core.series.Series, sg: SchemaGenerator, + self, val_rule: str, manifest_col: pd.core.series.Series, DME: DataModelGraphExplorer, ) -> (List[List[str]], List[List[str]]): """ Purpose: @@ -747,6 +758,7 @@ def type_validation( 'float', 'int', 'num', 'str' - manifest_col: pd.core.series.Series, column for a given attribute in the manifest + - DME: DataModelGraphExplorer Object Returns: -This function will return errors when the user input value does not match schema specifications. @@ -774,7 +786,7 @@ def type_validation( row_num=str(i + 2), attribute_name=manifest_col.name, invalid_entry=str(manifest_col[i]), - sg = sg, + DME = DME, ) if vr_errors: errors.append(vr_errors) @@ -788,7 +800,7 @@ def type_validation( row_num=str(i + 2), attribute_name=manifest_col.name, invalid_entry=str(manifest_col[i]), - sg = sg, + DME = DME, ) if vr_errors: errors.append(vr_errors) @@ -796,7 +808,7 @@ def type_validation( warnings.append(vr_warnings) return errors, warnings - def url_validation(self, val_rule: str, manifest_col: str, sg: SchemaGenerator,) -> (List[List[str]], List[List[str]]): + def url_validation(self, val_rule: str, manifest_col: str, DME: DataModelGraphExplorer) -> (List[List[str]], List[List[str]]): """ Purpose: Validate URL's submitted for a particular attribute in a manifest. @@ -806,6 +818,7 @@ def url_validation(self, val_rule: str, manifest_col: str, sg: SchemaGenerator,) - val_rule: str, Validation rule - manifest_col: pd.core.series.Series, column for a given attribute in the manifest + - DME: DataModelGraphExplorer Object Output: This function will return errors when the user input value does not match schema specifications. @@ -835,7 +848,7 @@ def url_validation(self, val_rule: str, manifest_col: str, sg: SchemaGenerator,) attribute_name=manifest_col.name, argument=url_args, invalid_entry=manifest_col[i], - sg = sg, + DME = DME, val_rule = val_rule, ) if vr_errors: @@ -863,7 +876,7 @@ def url_validation(self, val_rule: str, manifest_col: str, sg: SchemaGenerator,) attribute_name=manifest_col.name, argument=url_args, invalid_entry=manifest_col[i], - sg = sg, + DME = DME, val_rule = val_rule, ) if vr_errors: @@ -883,7 +896,7 @@ def url_validation(self, val_rule: str, manifest_col: str, sg: SchemaGenerator,) attribute_name=manifest_col.name, argument=arg, invalid_entry=manifest_col[i], - sg = sg, + DME = DME, val_rule = val_rule, ) if vr_errors: @@ -893,7 +906,7 @@ def url_validation(self, val_rule: str, manifest_col: str, sg: SchemaGenerator,) return errors, warnings def cross_validation( - self, val_rule: str, manifest_col: pd.core.series.Series, project_scope: List, sg: SchemaGenerator, + self, val_rule: str, manifest_col: pd.core.series.Series, project_scope: List, DME: DataModelGraphExplorer, ) -> List[List[str]]: """ Purpose: @@ -903,6 +916,7 @@ def cross_validation( - val_rule: str, Validation rule - manifest_col: pd.core.series.Series, column for a given attribute in the manifest + - DME: DataModelGraphExplorer Object Output: This function will return errors when values in the current manifest's attribute are not fully present in the correct amount of other manifests. @@ -980,7 +994,7 @@ def cross_validation( row_num = missing_rows, attribute_name = source_attribute, invalid_entry = iterable_to_str_list(missing_values), - sg = sg, + DME = DME, ) if vr_errors: errors.append(vr_errors) @@ -995,7 +1009,7 @@ def cross_validation( row_num = invalid_rows, attribute_name = source_attribute, invalid_entry = iterable_to_str_list(invalid_values.squeeze()), - sg = sg, + DME = DME, ) if vr_errors: errors.append(vr_errors) @@ -1022,7 +1036,7 @@ def cross_validation( attribute_name = source_attribute, invalid_entry = missing_values, missing_manifest_ID = missing_manifest_IDs, - sg = sg, + DME = DME, ) if vr_errors: errors.append(vr_errors) @@ -1033,7 +1047,7 @@ def cross_validation( val_rule = val_rule, attribute_name = source_attribute, matching_manifests = present_manifest_log, - sg = sg, + DME = DME, ) if vr_errors: errors.append(vr_errors) diff --git a/schematic/models/validate_manifest.py b/schematic/models/validate_manifest.py index 244e75d9a..cd1a425e1 100644 --- a/schematic/models/validate_manifest.py +++ b/schematic/models/validate_manifest.py @@ -19,7 +19,8 @@ from urllib import error from schematic.models.validate_attribute import ValidateAttribute, GenerateError -from schematic.schemas.generator import SchemaGenerator +#from schematic.schemas.generator import SchemaGenerator +from schematic.schemas.data_model_graph import DataModelGraphExplorer from schematic.store.synapse import SynapseStorage from schematic.models.GE_Helpers import GreatExpectationsHelpers from schematic.utils.validate_rules_utils import validation_rule_info @@ -28,11 +29,11 @@ logger = logging.getLogger(__name__) class ValidateManifest(object): - def __init__(self, errors, manifest, manifestPath, sg, jsonSchema): + def __init__(self, errors, manifest, manifestPath, DME, jsonSchema): self.errors = errors self.manifest = manifest self.manifestPath = manifestPath - self.sg = sg + self.DME = DME self.jsonSchema = jsonSchema def get_multiple_types_error( @@ -62,7 +63,7 @@ def get_multiple_types_error( return ["NA", error_col, error_message, error_val] def validate_manifest_rules( - self, manifest: pd.core.frame.DataFrame, sg: SchemaGenerator, restrict_rules: bool, project_scope: List, + self, manifest: pd.core.frame.DataFrame, DME: DataModelGraphExplorer, restrict_rules: bool, project_scope: List, ) -> (pd.core.frame.DataFrame, List[List[str]]): """ Purpose: @@ -72,7 +73,7 @@ def validate_manifest_rules( manifest: pd.core.frame.DataFrame imported from models/metadata.py contains metadata input from user for each attribute. - sg: SchemaGenerator + DME: DataModelGraphExplorer initialized within models/metadata.py Returns: manifest: pd.core.frame.DataFrame @@ -129,7 +130,7 @@ def validate_manifest_rules( t_GE = perf_counter() #operations necessary to set up and run ge suite validation ge_helpers=GreatExpectationsHelpers( - sg=sg, + DME=DME, unimplemented_expectations=unimplemented_expectations, manifest = manifest, manifestPath = self.manifestPath, @@ -154,8 +155,7 @@ def validate_manifest_rules( finally: ge_helpers.context.delete_checkpoint(ge_helpers.checkpoint_name) - validation_results = results.list_validation_results() - + validation_results = results.list_validation_results() #parse validation results dict and generate errors errors, warnings = ge_helpers.generate_errors( @@ -163,7 +163,7 @@ def validate_manifest_rules( warnings = warnings, validation_results = validation_results, validation_types = validation_types, - sg = sg, + DME = DME, ) logger.debug(f"GE elapsed time {perf_counter()-t_GE}") else: @@ -175,7 +175,11 @@ def validate_manifest_rules( # remove trailing/leading whitespaces from manifest manifest.applymap(lambda x: x.strip() if isinstance(x, str) else x) - validation_rules = sg.get_node_validation_rules(col) + validation_rules = DME.get_node_validation_rules(col) + + #TODO: Can remove when handling updated so split within graph + if validation_rules and '::' in validation_rules[0]: + validation_rules = validation_rules[0].split("::") # Check that attribute rules conform to limits: # no more than two rules for an attribute. @@ -203,16 +207,16 @@ def validate_manifest_rules( if validation_type == "list": vr_errors, vr_warnings, manifest_col = validation_method( - self, rule, manifest[col], sg, + self, rule, manifest[col], DME, ) manifest[col] = manifest_col elif validation_type.lower().startswith("match"): vr_errors, vr_warnings = validation_method( - self, rule, manifest[col], project_scope, sg, + self, rule, manifest[col], project_scope, DME, ) else: vr_errors, vr_warnings = validation_method( - self, rule, manifest[col], sg, + self, rule, manifest[col], DME, ) # Check for validation rule errors and add them to other errors. if vr_errors: @@ -223,7 +227,7 @@ def validate_manifest_rules( logger.debug(f"In House validation elapsed time {perf_counter()-t_err}") return manifest, errors, warnings - def validate_manifest_values(self, manifest, jsonSchema, sg + def validate_manifest_values(self, manifest, jsonSchema, DME, ) -> (List[List[str]], List[List[str]]): t_json_schema = perf_counter() @@ -246,7 +250,7 @@ def validate_manifest_values(self, manifest, jsonSchema, sg errorMsg = error.message[0:500] errorVal = error.instance if len(error.path) > 0 else "Wrong schema" - val_errors, val_warnings = GenerateError.generate_schema_error(row_num = errorRow, attribute_name = errorColName, error_msg = errorMsg, invalid_entry = errorVal, sg = sg) + val_errors, val_warnings = GenerateError.generate_schema_error(row_num = errorRow, attribute_name = errorColName, error_msg = errorMsg, invalid_entry = errorVal, DME = DME) if val_errors: errors.append(val_errors) @@ -256,15 +260,15 @@ def validate_manifest_values(self, manifest, jsonSchema, sg return errors, warnings -def validate_all(self, errors, warnings, manifest, manifestPath, sg, jsonSchema, restrict_rules, project_scope: List): - vm = ValidateManifest(errors, manifest, manifestPath, sg, jsonSchema) - manifest, vmr_errors, vmr_warnings = vm.validate_manifest_rules(manifest, sg, restrict_rules, project_scope) +def validate_all(self, errors, warnings, manifest, manifestPath, DME, jsonSchema, restrict_rules, project_scope: List): + vm = ValidateManifest(errors, manifest, manifestPath, DME, jsonSchema) + manifest, vmr_errors, vmr_warnings = vm.validate_manifest_rules(manifest, DME, restrict_rules, project_scope) if vmr_errors: errors.extend(vmr_errors) if vmr_warnings: warnings.extend(vmr_warnings) - vmv_errors, vmv_warnings = vm.validate_manifest_values(manifest, jsonSchema, sg) + vmv_errors, vmv_warnings = vm.validate_manifest_values(manifest, jsonSchema, DME) if vmv_errors: errors.extend(vmv_errors) if vmv_warnings: diff --git a/schematic/schemas/__init__.py b/schematic/schemas/__init__.py index 93df34ead..7943ef50e 100644 --- a/schematic/schemas/__init__.py +++ b/schematic/schemas/__init__.py @@ -1,3 +1,7 @@ -from schematic.schemas.explorer import SchemaExplorer -from schematic.schemas.generator import SchemaGenerator -from schematic.schemas.validator import SchemaValidator +from schematic.schemas.data_model_edges import DataModelEdges +from schematic.schemas.data_model_nodes import DataModelNodes +from schematic.schemas.data_model_graph import DataModelGraph, DataModelGraphExplorer +from schematic.schemas.data_model_json_schema import DataModelJSONSchema +from schematic.schemas.data_model_jsonld import DataModelJsonLD +from schematic.schemas.data_model_relationships import DataModelRelationships +from schematic.schemas.data_model_validator import DataModelValidator diff --git a/schematic/schemas/commands.py b/schematic/schemas/commands.py index 93b6e4cf3..a9f0198b3 100644 --- a/schematic/schemas/commands.py +++ b/schematic/schemas/commands.py @@ -4,17 +4,16 @@ import click_log import logging import sys -#TODO Remove timing after development import time import re from schematic.schemas.data_model_parser import DataModelParser -from schematic.schemas.data_model_graph import DataModelGraph +from schematic.schemas.data_model_graph import DataModelGraph, DataModelGraphExplorer from schematic.schemas.data_model_validator import DataModelValidator from schematic.schemas.data_model_jsonld import DataModelJsonLD, convert_graph_to_jsonld from schematic.utils.cli_utils import query_dict -from schematic.utils.schema_util import export_schema +from schematic.utils.schema_utils import export_schema from schematic.help import schema_commands logger = logging.getLogger('schematic') @@ -58,9 +57,9 @@ def convert(schema, base_schema, output_jsonld): """ Running CLI to convert data model specification in CSV format to data model in JSON-LD format. - """ - # TO DO: Throw these steps into their own function + TODO: Throw actual errors in the future rather than just logging. + """ # get the start time st = time.time() @@ -82,7 +81,7 @@ def convert(schema, base_schema, output_jsonld): # Validate generated data model. logger.info("Validating the data model internally.") - data_model_validator = DataModelValidator(data_model=graph_data_model) + data_model_validator = DataModelValidator(graph=graph_data_model) data_model_errors = data_model_validator.run_checks() # If there are errors log them. @@ -93,9 +92,7 @@ def convert(schema, base_schema, output_jsonld): elif isinstance(err, list): for e in err: logger.error(e) - # Actually raise error here with message. - - #data_model_jsonld_converter = DataModelJsonLD() + logger.info("Converting data model to JSON-LD") jsonld_data_model = convert_graph_to_jsonld(Graph=graph_data_model) @@ -123,26 +120,3 @@ def convert(schema, base_schema, output_jsonld): # get the execution time elapsed_time = time.strftime("%M:%S", time.gmtime(et - st)) click.echo(f"Execution time: {elapsed_time} (M:S)") - - ''' - # convert RFC to Data Model - base_se = _convert_csv_to_data_model(schema_csv, base_schema) - - # output JSON-LD file alongside CSV file by default - if output_jsonld is None: - csv_no_ext = re.sub("[.]csv$", "", schema_csv) - output_jsonld = csv_no_ext + ".jsonld" - - logger.info( - "By default, the JSON-LD output will be stored alongside the first " - f"input CSV file. In this case, it will appear here: '{output_jsonld}'. " - "You can use the `--output_jsonld` argument to specify another file path." - ) - - # saving updated schema.org schema - try: - base_se.export_schema(output_jsonld) - click.echo(f"The Data Model was created and saved to '{output_jsonld}' location.") - except: - click.echo(f"The Data Model could not be created by using '{output_jsonld}' location. Please check your file path again") - ''' diff --git a/schematic/schemas/data_model_graph.py b/schematic/schemas/data_model_graph.py index 6fbe13c85..d2486745b 100644 --- a/schematic/schemas/data_model_graph.py +++ b/schematic/schemas/data_model_graph.py @@ -27,7 +27,7 @@ from schematic.utils.general import find_duplicates from schematic.utils.io_utils import load_default, load_json, load_schemaorg -from schematic.utils.schema_util import get_property_label_from_display_name, get_class_label_from_display_name +from schematic.utils.schema_utils import get_property_label_from_display_name, get_class_label_from_display_name from schematic.utils.general import dict2list, unlist from schematic.utils.viz_utils import visualize from schematic.utils.validate_utils import ( @@ -112,7 +112,7 @@ def generate_data_model_graph(self): G = self.dme.generate_edge(G, node, all_node_dict, self.attribute_relationships_dict, edge_relationships) return G -class DataModelGraphExporer(): +class DataModelGraphExplorer(): def __init__(self, G,): ''' @@ -130,6 +130,7 @@ def __init__(self, self.reqComp_ek = self.rel_dict['requiresComponent']['edge_key'] self.reqDep_ek = self.rel_dict['requiresDependency']['edge_key'] self.subClassOf_ek = self.rel_dict['subClassOf']['edge_key'] + self.rangeIncludes_ek = self.rel_dict['rangeIncludes']['edge_key'] # Node Labels self.displayName_nl = self.rel_dict['displayName']['node_label'] @@ -154,6 +155,15 @@ def find_classes(self): classes = nodes - properties return classes + def find_node_range(self, attribute): + valid_values=[] + for node_1, node_2, rel in self.graph.edges: + if node_1 == attribute and rel == self.rangeIncludes_ek: + valid_values.append(node_2) + valid_values = list(set(valid_values)) + return valid_values + + def get_adjacent_nodes_by_relationship(self, node: str, relationship: str) -> List[str]: @@ -215,7 +225,7 @@ def get_component_requirements_graph(self, # get the subgraph induced on required component nodes req_components_graph = self.get_subgraph_by_edge_type( - self.graph, self.reqComp_ek, + self.reqComp_ek, ).subgraph(req_components) return req_components_graph @@ -286,7 +296,10 @@ def get_descendants_by_edge_type(self, return list(descendants) - def get_digraph_by_edge_type(self): + def get_digraph_by_edge_type(self, edge_type): + ''' + TODO: rename to get_digraph, since edge type parameter is not used, will take it now for legacy. + ''' digraph = nx.DiGraph() for (u, v, key, c) in self.graph.edges(data=True, keys=True): @@ -344,6 +357,16 @@ def get_ordered_entry(self, key: str, source_node_label:str): # Get values associated with a node # TODO: make sure all these gets follow the same pattern for clarity + def get_nodes_ancestors(self, graph, component): + """ + Return a list of nodes reachable from source in graph + graph: networkx graph object + component: any given node + """ + all_ancestors = list(nx.ancestors(graph, component)) + + return all_ancestors + def get_node_comment(self, node_display_name: str = None, node_label: str= None) -> str: """Get the node definition, i.e., the "comment" associated with a given node display name. @@ -400,6 +423,16 @@ def get_node_dependencies(self, return required_dependencies + def get_nodes_descendants(self, component): + """ + Return a list of nodes reachable from source in graph + graph: networkx graph object + component: any given node + """ + all_descendants = list(nx.descendants(self.graph, component)) + + return all_descendants + def get_nodes_display_names( self, node_list: List[str], ) -> List[str]: @@ -444,6 +477,36 @@ def get_node_label(self, node_display_name: str) -> str: return node_label + def get_node_range(self, node_label: str, display_names: bool = True) -> List[str]: + """Get the range, i.e., all the valid values that are associated with a node label. + + Args: + node_label: Node / termn for which you need to retrieve the range. + + Returns: + List of display names of nodes associateed with the given node. + """ + try: + # get node range in the order defined in schema for given node + #required_range = self.graph.explore_class(node_label)["range"] + required_range = self.find_node_range(attribute = node_label) + except KeyError: + raise ValueError( + f"The source node {node_label} does not exist in the graph. " + "Please use a different node." + ) + + if display_names: + # get the display name(s) of all dependencies + dependencies_display_names = [] + + for req in required_range: + dependencies_display_names.append(self.graph.nodes[req]["displayName"]) + + return dependencies_display_names + + return required_range + def get_node_required(self, node_display_name: str = None, node_label:str = None) -> bool: """Check if a given node is required or not. @@ -511,15 +574,23 @@ def get_subgraph_by_edge_type( def find_adjacent_child_classes(self, schema_class): return self.get_adjacent_nodes_by_relationship(node = schema_class, relationship = self.subClassOf_ek) + def find_child_classes(self, schema_class): + """Find schema classes that inherit from the given class""" + return unlist(list(self.graph.successors(schema_class))) + def find_class_specific_properties(self, schema_class): """Find properties specifically associated with a given class""" #This is called directly from the API # Needs to be refactored no longer be JSONLD specific - - breakpoint() - schema_uri = self.graph.nodes[schema_class]["uri"] + breakpoint() + #schema_uri = self.graph.nodes[schema_class]["uri"] properties = [] + for k, v in self.graph[schema_class]: + if 'domainIncludes' in v.keys(): + properties.append(k) + ''' + for record in self.schema["@graph"]: if record["@type"] == "rdf:Property": if ( @@ -537,4 +608,49 @@ def find_class_specific_properties(self, schema_class): != [] ): properties.append(record["rdfs:label"]) + ''' return properties + + def find_parent_classes(self, schema_class): + """Find all parents of the class""" + + digraph = self.get_digraph_by_edge_type("parentOf") + + root_node = list(nx.topological_sort(digraph))[0] + + paths = nx.all_simple_paths( + self.graph, source=root_node, target=schema_class + ) + # print(root_node) + return [_path[:-1] for _path in paths] + + def full_schema_graph(self, size=None): + edges = self.graph.edges() + return visualize(edges, size=size) + + def is_class_in_schema(self, class_label): + if self.graph.nodes[class_label]: + return True + else: + return False + + def sub_schema_graph(self, source, direction, size=None): + if direction == "down": + edges = list(nx.edge_bfs(self.graph, [source])) + return visualize(edges, size=size) + elif direction == "up": + paths = self.find_parent_classes(source) + edges = [] + for _path in paths: + _path.append(source) + for i in range(0, len(_path) - 1): + edges.append((_path[i], _path[i + 1])) + return visualize(edges, size=size) + elif direction == "both": + paths = self.find_parent_classes(source) + edges = list(nx.edge_bfs(self.graph, [source])) + for _path in paths: + _path.append(source) + for i in range(0, len(_path) - 1): + edges.append((_path[i], _path[i + 1])) + return visualize(edges, size=size) diff --git a/schematic/schemas/data_model_json_schema.py b/schematic/schemas/data_model_json_schema.py index 5817b84ba..0699bb819 100644 --- a/schematic/schemas/data_model_json_schema.py +++ b/schematic/schemas/data_model_json_schema.py @@ -3,7 +3,7 @@ import os from typing import Any, Dict, Optional, Text, List -from schematic.schemas.data_model_graph import DataModelGraphExporer +from schematic.schemas.data_model_graph import DataModelGraphExplorer from schematic.schemas.data_model_relationships import DataModelRelationships from schematic.utils.validate_utils import rule_in_rule_list @@ -14,7 +14,7 @@ def __init__(self, jsonld_path: str, graph:nx.MultiDiGraph, ): self.jsonld_path = jsonld_path self.graph = graph - self.DME = DataModelGraphExporer(self.graph) + self.DME = DataModelGraphExplorer(self.graph) self.dmr = DataModelRelationships() self.rel_dict = self.dmr.relationships_dictionary diff --git a/schematic/schemas/data_model_jsonld.py b/schematic/schemas/data_model_jsonld.py index 1c12c91d0..78f8ad64f 100644 --- a/schematic/schemas/data_model_jsonld.py +++ b/schematic/schemas/data_model_jsonld.py @@ -2,9 +2,9 @@ from typing import Any, Dict, Optional, Text, List import networkx as nx -from schematic.schemas.data_model_graph import DataModelGraphExporer +from schematic.schemas.data_model_graph import DataModelGraphExplorer from schematic.schemas.data_model_relationships import DataModelRelationships -from schematic.utils.schema_util import get_label_from_display_name, get_display_name_from_label, convert_bool +from schematic.utils.schema_utils import get_label_from_display_name, get_display_name_from_label, convert_bool class DataModelJsonLD(object): @@ -12,7 +12,7 @@ class DataModelJsonLD(object): #Interface to JSONLD_object ''' - def __init__(self, Graph: nx.MultiDiGraph): + def __init__(self, Graph: nx.MultiDiGraph, output_path:str = ''): # Setup self.graph = Graph self.dmr = DataModelRelationships() @@ -22,7 +22,8 @@ def __init__(self, Graph: nx.MultiDiGraph): self.jsonld_class = JSONLD_class(self.jsonld_object) self.jsonld_property = JSONLD_property(self.jsonld_object) ''' - self.DME = DataModelGraphExporer(self.graph) + self.DME = DataModelGraphExplorer(self.graph) + self.output_path = output_path def base_jsonld_template(self): @@ -30,7 +31,14 @@ def base_jsonld_template(self): #Base starter template, to be filled out with model. For entire file. TODO: when done adding contexts fill out this section here. """ - base_template = {"@context": {}, + base_template = { + "@context": { + "bts": "http://schema.biothings.io/", + "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#", + "rdfs": "http://www.w3.org/2000/01/rdf-schema#", + "schema": "http://schema.org/", + "xsd": "http://www.w3.org/2001/XMLSchema#", + }, "@graph": [], "@id": "http://schema.biothings.io/#0.1", } @@ -57,10 +65,6 @@ def create_object(self, template, node): node_edge_relationships = self.graph[node_1][node_2] edge_rel = rel_vals['edge_key'] - - - #node_edge_key_rels = [for rel in node_edge_relationships.keys] - # Check if key_rel is even one of the relationships for this node pair. #if key_rel in node_edge_relationships: if edge_rel in node_edge_relationships: @@ -70,7 +74,6 @@ def create_object(self, template, node): if relationship == edge_rel: if edge_rel in ['domainIncludes', 'parentOf']: - #breakpoint() if node_2 == node: # Make sure the key is in the template (differs between properties and classes) if rel_vals['jsonld_key'] in template.keys(): @@ -96,8 +99,10 @@ def create_object(self, template, node): else: # attribute here refers to node attibutes (come up with better name.) node_attribute_name = rel_vals['node_label'] + # Get recorded info for current node, and the attribute type node_info = nx.get_node_attributes(self.graph, node_attribute_name)[node] + # Add this information to the template template[rel_vals['jsonld_key']] = node_info # Clean up template @@ -159,17 +164,17 @@ def reorder_template_entries(self, template): template (dict): list entries re-ordered to match user supplied order. ''' - - # user order only matters for nodes that are also attributes template_label = template['rdfs:label'] for jsonld_key, entry in template.items(): + # Make sure dealing with an edge relationship: + is_edge = ['True' for k, v in self.rel_dict.items() if v['jsonld_key']==jsonld_key if v['edge_rel'] == True] + #if the entry is of type list and theres more than one value in the list attempt to reorder - if isinstance(entry, list) and len(entry)>1: + if is_edge and isinstance(entry, list) and len(entry)>1: # Get edge key from data_model_relationships using the jsonld_key: key, edge_key = [(k, v['edge_key']) for k, v in self.rel_dict.items() if jsonld_key == v['jsonld_key']][0] - # Order edges sorted_edges = self.DME.get_ordered_entry(key=key, source_node_label=template_label) edge_weights_dict={edge:i for i, edge in enumerate(sorted_edges)} @@ -177,9 +182,9 @@ def reorder_template_entries(self, template): for k,v in edge_weights_dict.items(): ordered_edges[v] = {'@id': 'bts:' + k} - # TODO: Throw an error if ordered_edges does not get fully filled as expected. + # Throw an error if ordered_edges does not get fully filled as expected. if 0 in ordered_edges: - breakpoint() + logger.error("There was an issue getting values to match order specified in the data model, please submit a help request.") template[jsonld_key] = ordered_edges return template @@ -249,7 +254,7 @@ def __init__(self, Graph: nx.MultiDiGraph): self.jsonld_object = JSONLD_object(DataModelJsonLD) self.jsonld_class = JSONLD_class(self.jsonld_object) self.jsonld_property = JSONLD_property(self.jsonld_object) - self.DME = DataModelGraphExporer(self.graph) + self.DME = DataModelGraphExplorer(self.graph) def generate_jsonld_object(self): ''' diff --git a/schematic/schemas/data_model_nodes.py b/schematic/schemas/data_model_nodes.py index bf9c87743..104133b95 100644 --- a/schematic/schemas/data_model_nodes.py +++ b/schematic/schemas/data_model_nodes.py @@ -5,7 +5,7 @@ DataModelRelationships ) -from schematic.utils.schema_util import get_label_from_display_name, get_display_name_from_label, convert_bool +from schematic.utils.schema_utils import get_label_from_display_name, get_display_name_from_label, convert_bool, parse_validation_rules from schematic.utils.validate_rules_utils import validate_schema_rules from schematic.schemas.curie import uri2curie, curie2uri @@ -105,6 +105,8 @@ def run_rel_functions(self, rel_func, node_display_name='', key='', attr_relatio func_output = '' if rel_func == get_display_name_from_label: func_output = get_display_name_from_label(node_display_name, attr_relationships) + elif rel_func == parse_validation_rules: + func_output = parse_validation_rules(attr_relationships[csv_header]) elif key == 'id' and rel_func == get_label_from_display_name: #func_output = 'bts:' + get_label_from_display_name(display_name =node_display_name, entry_type=entry_type) func_output = get_label_from_display_name(display_name =node_display_name, entry_type=entry_type) diff --git a/schematic/schemas/data_model_relationships.py b/schematic/schemas/data_model_relationships.py index a4d995f33..4bf18f360 100644 --- a/schematic/schemas/data_model_relationships.py +++ b/schematic/schemas/data_model_relationships.py @@ -1,5 +1,5 @@ from typing import Dict -from schematic.utils.schema_util import get_label_from_display_name, get_display_name_from_label, convert_bool +from schematic.utils.schema_utils import get_label_from_display_name, get_display_name_from_label, convert_bool, parse_validation_rules from schematic.schemas.curie import uri2curie, curie2uri class DataModelRelationships(): @@ -146,6 +146,7 @@ def define_data_model_relationships(self) -> Dict: 'edge_rel': False, 'required_header': True, 'node_attr_dict':{'default': [], + 'standard': parse_validation_rules, }, }, 'domainIncludes': { diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index cdc93f434..bc39947c5 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -46,8 +46,9 @@ from schematic.utils.df_utils import update_df, load_df, col_in_dataframe, populate_df_col_with_another_col from schematic.utils.validate_utils import comma_separated_list_regex, rule_in_rule_list from schematic.utils.general import entity_type_mapping, get_dir_size, convert_size, convert_gb_to_bytes, create_temp_folder -from schematic.schemas.explorer import SchemaExplorer -from schematic.schemas.generator import SchemaGenerator +from schematic.utils.schema_utils import get_class_label_from_display_name +from schematic.schemas.data_model_graph import DataModelGraphExplorer + from schematic.store.base import BaseStorage from schematic.exceptions import MissingConfigValueError, AccessCredentialsError @@ -541,10 +542,11 @@ def getDataTypeFromManifest(self, manifestId:str): return result_dict - def updateDatasetManifestFiles(self, sg: SchemaGenerator, datasetId: str, store:bool = True) -> Union[Tuple[str, pd.DataFrame], None]: + def updateDatasetManifestFiles(self, DME: DataModelGraphExplorer, datasetId: str, store:bool = True) -> Union[Tuple[str, pd.DataFrame], None]: """Fetch the names and entity IDs of all current files in dataset in store, if any; update dataset's manifest with new files, if any. Args: + DME: DataModelGraphExplorer Instance datasetId: synapse ID of a storage dataset. store: if set to True store updated manifest in asset store; if set to False return a Pandas dataframe containing updated manifest but do not store to asset store @@ -588,7 +590,7 @@ def updateDatasetManifestFiles(self, sg: SchemaGenerator, datasetId: str, store: manifest.to_csv(manifest_filepath, index=False) # store manifest and update associated metadata with manifest on Synapse - manifest_id = self.associateMetadataWithFiles(sg, manifest_filepath, datasetId) + manifest_id = self.associateMetadataWithFiles(DME, manifest_filepath, datasetId) manifest = manifest.fillna("") @@ -760,8 +762,20 @@ def upload_annotated_project_manifests_to_synapse(self, projectId:str, path_to_j Assumes the manifest is already present as a CSV in a dataset in the project. ''' + # Instantiate DataModelParser + data_model_parser = DataModelParser(path_to_data_model = path_to_json_ld) + #Parse Model + parsed_data_model = data_model_parser.parse_model() + + # Instantiate DataModelGraph + data_model_grapher = DataModelGraph(parsed_data_model) + + # Generate graph + graph_data_model = data_model_grapher.generate_data_model_graph() + + #Instantiate DataModelGraphExplorer + DME = DataModelGraphExplorer(graph_data_model) - sg = SchemaGenerator(path_to_json_ld) manifests = [] manifest_loaded = [] datasets = self.getStorageDatasetsInProject(projectId) @@ -780,7 +794,7 @@ def upload_annotated_project_manifests_to_synapse(self, projectId:str, path_to_j manifest_path = manifest_info["path"] manifest = ((datasetId, datasetName), (manifest_id, manifest_name), ("", "")) if not dry_run: - manifest_syn_id = self.associateMetadataWithFiles(sg, manifest_path, datasetId, manifest_record_type='table') + manifest_syn_id = self.associateMetadataWithFiles(DME, manifest_path, datasetId, manifest_record_type='table') manifest_loaded.append(manifest) return manifests, manifest_loaded @@ -879,7 +893,7 @@ def get_table_info(self, datasetId: str = None, projectId: str = None) -> List[s @missing_entity_handler def uploadDB(self, - sg: SchemaGenerator, + DME: DataModelGraphExplorer, manifest: pd.DataFrame, datasetId: str, table_name: str, @@ -891,7 +905,7 @@ def uploadDB(self, Method to upload a database to an asset store. In synapse, this will upload a metadata table Args: - se: schemaExplorer object + DME: DataModelGraphExplorer object manifest: pd.Df manifest to upload datasetId: synID of the dataset for the manifest table_name: name of the table to be uploaded @@ -908,18 +922,18 @@ def uploadDB(self, """ - col_schema, table_manifest = self.formatDB(sg, manifest, useSchemaLabel) + col_schema, table_manifest = self.formatDB(DME, manifest, useSchemaLabel) - manifest_table_id = self.buildDB(datasetId, table_name, col_schema, table_manifest, table_manipulation, sg, restrict,) + manifest_table_id = self.buildDB(datasetId, table_name, col_schema, table_manifest, table_manipulation, DME, restrict,) return manifest_table_id, manifest, table_manifest - def formatDB(self, sg, manifest, useSchemaLabel): + def formatDB(self, DME, manifest, useSchemaLabel): """ Method to format a manifest appropriatly for upload as table Args: - se: schemaExplorer object + DME: DataModelGraphExplorer object manifest: pd.Df manifest to upload useSchemaLabel: bool whether to use schemaLabel (True) or display label (False) @@ -937,7 +951,7 @@ def formatDB(self, sg, manifest, useSchemaLabel): if useSchemaLabel: cols = [ - sg.se.get_class_label_from_display_name( + get_class_label_from_display_name( str(col) ).translate({ord(x): '' for x in blacklist_chars}) for col in manifest_columns @@ -969,7 +983,7 @@ def buildDB(self, col_schema: List, table_manifest: pd.DataFrame, table_manipulation: str, - sg: SchemaGenerator, + DME: DataModelGraphExplorer, restrict: bool = False, ): @@ -1015,7 +1029,7 @@ def buildDB(self, if table_manipulation.lower() == 'replace': manifest_table_id = tableOps.replaceTable(specifySchema = True, columnTypeDict=col_schema,) elif table_manipulation.lower() == 'upsert': - manifest_table_id = tableOps.upsertTable(sg=sg,) + manifest_table_id = tableOps.upsertTable(DME=DME,) elif table_manipulation.lower() == 'update': manifest_table_id = tableOps.updateTable() @@ -1059,7 +1073,7 @@ def upload_manifest_file(self, manifest, metadataManifestPath, datasetId, restri return manifest_synapse_file_id @missing_entity_handler - def format_row_annotations(self, se, sg, row, entityId, hideBlanks): + def format_row_annotations(self, DME, row, entityId, hideBlanks): # prepare metadata for Synapse storage (resolve display name into a name that Synapse annotations support (e.g no spaces, parenthesis) # note: the removal of special characters, will apply only to annotation keys; we are not altering the manifest # this could create a divergence between manifest column and annotations. this should be ok for most use cases. @@ -1069,7 +1083,7 @@ def format_row_annotations(self, se, sg, row, entityId, hideBlanks): for k, v in row.to_dict().items(): - keySyn = se.get_class_label_from_display_name(str(k)).translate({ord(x): '' for x in blacklist_chars}) + keySyn = get_class_label_from_display_name(str(k)).translate({ord(x): '' for x in blacklist_chars}) # Skip `Filename` and `ETag` columns when setting annotations if keySyn in ["Filename", "ETag", "eTag"]: @@ -1097,7 +1111,7 @@ def format_row_annotations(self, se, sg, row, entityId, hideBlanks): else: if isinstance(anno_v,float) and np.isnan(anno_v): annos[anno_k] = "" - elif isinstance(anno_v,str) and re.fullmatch(csv_list_regex, anno_v) and rule_in_rule_list('list', sg.get_node_validation_rules(anno_k)): + elif isinstance(anno_v,str) and re.fullmatch(csv_list_regex, anno_v) and rule_in_rule_list('list', DME.get_node_validation_rules(anno_k)): annos[anno_k] = anno_v.split(",") else: annos[anno_k] = anno_v @@ -1175,8 +1189,8 @@ def annotate_upload_manifest_table(self, manifest, datasetId, metadataManifestPa else: manifest["entityId"].fillna("", inplace=True) - # get a schema explorer object to ensure schema attribute names used in manifest are translated to schema labels for synapse annotations - se = SchemaExplorer() + # get a DataModelGraphExplorer object to ensure schema attribute names used in manifest are translated to schema labels for synapse annotations + DME = DataModelGraphExplorer() # Create table name here. if 'Component' in manifest.columns: @@ -1186,7 +1200,7 @@ def annotate_upload_manifest_table(self, manifest, datasetId, metadataManifestPa # Upload manifest as a table and get the SynID and manifest manifest_synapse_table_id, manifest, table_manifest = self.upload_format_manifest_table( - se, manifest, datasetId, table_name, restrict = restrict_manifest, useSchemaLabel=useSchemaLabel,) + DME, manifest, datasetId, table_name, restrict = restrict_manifest, useSchemaLabel=useSchemaLabel,) # Iterate over manifest rows, create Synapse entities and store corresponding entity IDs in manifest if needed # also set metadata for each synapse entity as Synapse annotations @@ -1246,7 +1260,7 @@ def _read_manifest(self, metadataManifestPath:str) -> pd.DataFrame: ) from err return manifest - def _add_id_columns_to_manifest(self, manifest: pd.DataFrame, sg: SchemaGenerator): + def _add_id_columns_to_manifest(self, manifest: pd.DataFrame, DME: DataModelGraphExplorer): """Helper function to add id and entityId columns to the manifest if they do not already exist, Fill id values per row. Args: Manifest loaded as a pd.Dataframe @@ -1258,7 +1272,7 @@ def _add_id_columns_to_manifest(self, manifest: pd.DataFrame, sg: SchemaGenerato if not col_in_dataframe("Id", manifest): # See if schema has `Uuid` column specified try: - uuid_col_in_schema = sg.se.is_class_in_schema('Uuid') or sg.se.is_class_in_schema('uuid') + uuid_col_in_schema = DME.is_class_in_schema('Uuid') or DME.is_class_in_schema('uuid') except (KeyError): uuid_col_in_schema = False @@ -1301,11 +1315,10 @@ def _generate_table_name(self, manifest): table_name = 'synapse_storage_manifest_table' return table_name, component_name - def _add_annotations(self, se, schemaGenerator, row, entityId, hideBlanks): + def _add_annotations(self, DME, row, entityId, hideBlanks): """Helper function to format and add annotations to entities in Synapse. Args: - se: schemaExplorer object, - schemaGenerator: schemaGenerator Object. + DME: DataModelGraphExplorer object, row: current row of manifest being processed entityId (str): synapseId of entity to add annotations to hideBlanks: Boolean flag that does not upload annotation keys with blank values when true. Uploads Annotation keys with empty string values when false. @@ -1313,7 +1326,7 @@ def _add_annotations(self, se, schemaGenerator, row, entityId, hideBlanks): Annotations are added to entities in Synapse, no return. """ # Format annotations for Synapse - annos = self.format_row_annotations(se, schemaGenerator, row, entityId, hideBlanks) + annos = self.format_row_annotations(DME, row, entityId, hideBlanks) if annos: # Store annotations for an entity folder @@ -1341,8 +1354,7 @@ def _create_entity_id(self, idx, row, manifest, datasetId): def add_entities( self, - se, - schemaGenerator, + DME, manifest, manifest_record_type, datasetId, @@ -1351,8 +1363,7 @@ def add_entities( ): '''Depending on upload type add Ids to entityId row. Add anotations to connected files. Args: - se: Schema Explorer Object - schemaGenerator: SchemaGenerator object + DME: DataModelGraphExplorer Object manifest (pd.DataFrame): loaded df containing user supplied data. manifest_record_type: valid values are 'entity', 'table' or 'both'. Specifies whether to create entity ids and folders for each row in a manifest, a Synapse table to house the entire manifest or do both. datasetId (str): synapse ID of folder containing the dataset @@ -1389,13 +1400,12 @@ def add_entities( # Adding annotations to connected files. if entityId: - self._add_annotations(se, schemaGenerator, row, entityId, hideBlanks) + self._add_annotations(DME, row, entityId, hideBlanks) return manifest def upload_manifest_as_table( self, - se, - schemaGenerator, + DME, manifest, metadataManifestPath, datasetId, @@ -1409,8 +1419,7 @@ def upload_manifest_as_table( ): """Upload manifest to Synapse as a table and csv. Args: - se: SchemaExplorer object - schemaGenerator: SchemaGenerator Object + DME: DataModelGraphExplorer object manifest (pd.DataFrame): loaded df containing user supplied data. metadataManifestPath: path to csv containing a validated metadata manifest. datasetId (str): synapse ID of folder containing the dataset @@ -1425,7 +1434,7 @@ def upload_manifest_as_table( """ # Upload manifest as a table, get the ID and updated manifest. manifest_synapse_table_id, manifest, table_manifest = self.uploadDB( - schemaGenerator, + DME, manifest, datasetId, table_name, @@ -1433,7 +1442,7 @@ def upload_manifest_as_table( useSchemaLabel, table_manipulation) - manifest = self.add_entities(se, schemaGenerator, manifest, manifest_record_type, datasetId, hideBlanks, manifest_synapse_table_id) + manifest = self.add_entities(DME, manifest, manifest_record_type, datasetId, hideBlanks, manifest_synapse_table_id) # Load manifest to synapse as a CSV File manifest_synapse_file_id = self.upload_manifest_file(manifest, metadataManifestPath, datasetId, restrict, component_name = component_name) @@ -1444,7 +1453,7 @@ def upload_manifest_as_table( # Update manifest Synapse table with new entity id column. manifest_synapse_table_id, manifest, table_manifest = self.uploadDB( - schemaGenerator, + DME, manifest, datasetId, table_name, @@ -1459,8 +1468,7 @@ def upload_manifest_as_table( def upload_manifest_as_csv( self, - se, - schemaGenerator, + DME, manifest, metadataManifestPath, datasetId, @@ -1471,8 +1479,7 @@ def upload_manifest_as_csv( with_entities = False,): """Upload manifest to Synapse as a csv only. Args: - se: SchemaExplorer object - schemaGenerator: SchemaGenerator Object + DME: DataModelGraphExplorer object manifest (pd.DataFrame): loaded df containing user supplied data. metadataManifestPath: path to csv containing a validated metadata manifest. datasetId (str): synapse ID of folder containing the dataset @@ -1485,7 +1492,7 @@ def upload_manifest_as_csv( manifest_synapse_file_id (str): SynID of manifest csv uploaded to synapse. """ if with_entities: - manifest = self.add_entities(se, schemaGenerator, manifest, manifest_record_type, datasetId, hideBlanks) + manifest = self.add_entities(DME, manifest, manifest_record_type, datasetId, hideBlanks) # Load manifest to synapse as a CSV File manifest_synapse_file_id = self.upload_manifest_file(manifest, @@ -1501,8 +1508,7 @@ def upload_manifest_as_csv( def upload_manifest_combo( self, - se, - schemaGenerator, + DME, manifest, metadataManifestPath, datasetId, @@ -1516,8 +1522,7 @@ def upload_manifest_combo( ): """Upload manifest to Synapse as a table and CSV with entities. Args: - se: SchemaExplorer object - schemaGenerator: SchemaGenerator Object + DME: DataModelGraphExplorer object manifest (pd.DataFrame): loaded df containing user supplied data. metadataManifestPath: path to csv containing a validated metadata manifest. datasetId (str): synapse ID of folder containing the dataset @@ -1532,7 +1537,7 @@ def upload_manifest_combo( manifest_synapse_file_id (str): SynID of manifest csv uploaded to synapse. """ manifest_synapse_table_id, manifest, table_manifest = self.uploadDB( - se, + DME, manifest, datasetId, table_name, @@ -1540,7 +1545,7 @@ def upload_manifest_combo( useSchemaLabel=useSchemaLabel, table_manipulation=table_manipulation,) - manifest = self.add_entities(se, schemaGenerator, manifest, manifest_record_type, datasetId, hideBlanks, manifest_synapse_table_id) + manifest = self.add_entities(DME, manifest, manifest_record_type, datasetId, hideBlanks, manifest_synapse_table_id) # Load manifest to synapse as a CSV File manifest_synapse_file_id = self.upload_manifest_file(manifest, metadataManifestPath, datasetId, restrict, component_name) @@ -1552,7 +1557,7 @@ def upload_manifest_combo( # Update manifest Synapse table with new entity id column. manifest_synapse_table_id, manifest, table_manifest = self.uploadDB( - se, + DME, manifest, datasetId, table_name, @@ -1566,7 +1571,7 @@ def upload_manifest_combo( return manifest_synapse_file_id def associateMetadataWithFiles( - self, schemaGenerator: SchemaGenerator, metadataManifestPath: str, datasetId: str, manifest_record_type: str = 'table_file_and_entities', + self, DME: DataModelGraphExplorer, metadataManifestPath: str, datasetId: str, manifest_record_type: str = 'table_file_and_entities', useSchemaLabel: bool = True, hideBlanks: bool = False, restrict_manifest = False, table_manipulation: str = 'replace', ) -> str: """Associate metadata with files in a storage dataset already on Synapse. @@ -1581,7 +1586,7 @@ def associateMetadataWithFiles( for downstream query and interaction with the data. Args: - schemaGenerator: SchemaGenerator Object + DME: DataModelGraphExplorer Object metadataManifestPath: path to csv containing a validated metadata manifest. The manifest should include a column entityId containing synapse IDs of files/entities to be associated with metadata, if that is applicable to the dataset type. Some datasets, e.g. clinical data, do not contain file id's, but data is stored in a table: one row per item. @@ -1597,10 +1602,7 @@ def associateMetadataWithFiles( """ # Read new manifest CSV: manifest = self._read_manifest(metadataManifestPath) - manifest = self._add_id_columns_to_manifest(manifest, schemaGenerator) - - # get a schema explorer object to ensure schema attribute names used in manifest are translated to schema labels for synapse annotations - se = SchemaExplorer() + manifest = self._add_id_columns_to_manifest(manifest, DME) table_name, component_name = self._generate_table_name(manifest) @@ -1608,8 +1610,7 @@ def associateMetadataWithFiles( if manifest_record_type == "file_only": manifest_synapse_file_id = self.upload_manifest_as_csv( - se, - schemaGenerator, + DME, manifest, metadataManifestPath, datasetId=datasetId, @@ -1621,8 +1622,7 @@ def associateMetadataWithFiles( ) elif manifest_record_type == "table_and_file": manifest_synapse_file_id = self.upload_manifest_as_table( - se, - schemaGenerator, + DME, manifest, metadataManifestPath, datasetId=datasetId, @@ -1636,8 +1636,7 @@ def associateMetadataWithFiles( ) elif manifest_record_type == "file_and_entities": manifest_synapse_file_id = self.upload_manifest_as_csv( - se, - schemaGenerator, + DME, manifest, metadataManifestPath, datasetId=datasetId, @@ -1649,8 +1648,7 @@ def associateMetadataWithFiles( ) elif manifest_record_type == "table_file_and_entities": manifest_synapse_file_id = self.upload_manifest_combo( - se, - schemaGenerator, + DME, manifest, metadataManifestPath, datasetId=datasetId, @@ -2136,7 +2134,7 @@ def _get_auth_token(self,): return authtoken - def upsertTable(self, sg: SchemaGenerator,): + def upsertTable(self, DME: DataModelGraphExplorer): """ Method to upsert rows from a new manifest into an existing table on synapse For upsert functionality to work, primary keys must follow the naming convention of _id @@ -2145,7 +2143,7 @@ def upsertTable(self, sg: SchemaGenerator,): Args: - sg: SchemaGenerator instance + DME: DataModelGraphExplorer instance Returns: existingTableId: synID of the already existing table that had its metadata replaced @@ -2161,7 +2159,7 @@ def upsertTable(self, sg: SchemaGenerator,): except(SynapseHTTPError) as ex: # If error is raised because Table has old `Uuid` column and not new `Id` column, then handle and re-attempt upload if 'Id is not a valid column name or id' in str(ex): - self._update_table_uuid_column(sg) + self._update_table_uuid_column(DME) synapseDB.upsert_table_rows(table_name=self.tableName, data=self.tableToLoad) # Raise if other error else: @@ -2169,12 +2167,12 @@ def upsertTable(self, sg: SchemaGenerator,): return self.existingTableId - def _update_table_uuid_column(self, sg: SchemaGenerator,) -> None: + def _update_table_uuid_column(self, DME: DataModelGraphExplorer,) -> None: """Removes the `Uuid` column when present, and relpaces with an `Id` column Used to enable backwards compatability for manifests using the old `Uuid` convention Args: - sg: SchemaGenerator instance + DME: DataModelGraphExplorer instance Returns: None @@ -2189,7 +2187,7 @@ def _update_table_uuid_column(self, sg: SchemaGenerator,) -> None: if col.name.lower() == 'uuid': # See if schema has `Uuid` column specified try: - uuid_col_in_schema = sg.se.is_class_in_schema(col.name) + uuid_col_in_schema = DME.is_class_in_schema(col.name) except (KeyError): uuid_col_in_schema = False diff --git a/schematic/utils/schema_utils.py b/schematic/utils/schema_utils.py index 553ac4fb4..d7a26eb11 100644 --- a/schematic/utils/schema_utils.py +++ b/schematic/utils/schema_utils.py @@ -1,264 +1,69 @@ -import networkx as nx +import inflection import json +import networkx as nx +import string -from schematic.utils.curie_utils import extract_name_from_uri_or_curie -from schematic.utils.validate_utils import validate_class_schema -from schematic.utils.validate_rules_utils import validate_schema_rules - - -def load_schema_into_networkx(schema): - G = nx.MultiDiGraph() - for record in schema["@graph"]: - - # TODO: clean up obsolete code - # if record["@type"] == "rdfs:Class": - - # creation of nodes - # adding nodes to the graph - node = {} - for (k, value) in record.items(): - # Some keys in the current schema.org schema have a dictionary entry for their value that includes keys @language and @value, - # for parity with other schemas, we just want the value - if isinstance(value,dict) and "@language" in value.keys(): - record[k] = record[k]["@value"] - if ":" in k: - key = k.split(":")[1] - node[key] = value - elif "@" in k: - key = k[1:] - node[key] = value - else: - node[k] = value - - # creation of edges - # adding edges to the graph - if "rdfs:subClassOf" in record: - parents = record["rdfs:subClassOf"] - if type(parents) == list: - for _parent in parents: - n1 = extract_name_from_uri_or_curie(_parent["@id"]) - n2 = record["rdfs:label"] - - # do not allow self-loops - if n1 != n2: - G.add_edge(n1, n2, key="parentOf") - elif type(parents) == dict: - n1 = extract_name_from_uri_or_curie(parents["@id"]) - n2 = record["rdfs:label"] - - # do not allow self-loops - if n1 != n2: - G.add_edge(n1, n2, key="parentOf") - - # TODO: refactor: abstract adding relationship method - if "sms:requiresDependency" in record: - dependencies = record["sms:requiresDependency"] - if type(dependencies) == list: - for _dep in dependencies: - n1 = record["rdfs:label"] - n2 = extract_name_from_uri_or_curie(_dep["@id"]) - # do not allow self-loops - if n1 != n2: - G.add_edge(n1, n2, key="requiresDependency") - - if "sms:requiresComponent" in record: - components = record["sms:requiresComponent"] - if type(components) == list: - for _comp in components: - n1 = record["rdfs:label"] - n2 = extract_name_from_uri_or_curie(_comp["@id"]) - # do not allow self-loops - if n1 != n2: - G.add_edge(n1, n2, key="requiresComponent") - - if "schema:rangeIncludes" in record: - range_nodes = record["schema:rangeIncludes"] - if type(range_nodes) == list: - for _range_node in range_nodes: - n1 = record["rdfs:label"] - n2 = extract_name_from_uri_or_curie(_range_node["@id"]) - # do not allow self-loops - if n1 != n2: - G.add_edge(n1, n2, key="rangeValue") - elif type(range_nodes) == dict: - n1 = record["rdfs:label"] - n2 = extract_name_from_uri_or_curie(range_nodes["@id"]) - # do not allow self-loops - if n1 != n2: - G.add_edge(n1, n2, key="rangeValue") - - if "schema:domainIncludes" in record: - domain_nodes = record["schema:domainIncludes"] - if type(domain_nodes) == list: - for _domain_node in domain_nodes: - n1 = extract_name_from_uri_or_curie(_domain_node["@id"]) - n2 = record["rdfs:label"] - # do not allow self-loops - if n1 != n2: - G.add_edge(n1, n2, key="domainValue") - elif type(domain_nodes) == dict: - n1 = extract_name_from_uri_or_curie(domain_nodes["@id"]) - n2 = record["rdfs:label"] - # do not allow self-loops - if n1 != n2: - G.add_edge(n1, n2, key="domainValue") - - # check schema generator (JSON validation schema gen) - if ( - "requiresChildAsValue" in node - and node["requiresChildAsValue"]["@id"] == "sms:True" - ): - node["requiresChildAsValue"] = True - - if "required" in node: - if "sms:true" == record["sms:required"]: - node["required"] = True - else: - node["required"] = False - - # not sure if this is required? - if "sms:validationRules" in record: - node["validationRules"] = record["sms:validationRules"] - if node["validationRules"]: - validate_vr = validate_schema_rules( - record["sms:validationRules"], - record["rdfs:label"], - input_filetype = 'json_schema') - else: - node["validationRules"] = [] - - node["uri"] = record["@id"] - node["description"] = record["rdfs:comment"] - G.add_node(record["rdfs:label"], **node) - # print(node) - # print(G.nodes()) - - return G +def get_property_label_from_display_name(display_name, strict_camel_case = False): + """Convert a given display name string into a proper property label string""" + """ + label = ''.join(x.capitalize() or ' ' for x in display_name.split(' ')) + label = label[:1].lower() + label[1:] if label else '' + """ + # This is the newer more strict method + if strict_camel_case: + display_name = display_name.strip().translate({ord(c): "_" for c in string.whitespace}) + label = inflection.camelize(display_name, uppercase_first_letter=False) -def node_attrs_cleanup(class_add_mod: dict) -> dict: - # clean map that will be inputted into the node/graph - node = {} - for (k, value) in class_add_mod.items(): - if ":" in k: - key = k.split(":")[1] - node[key] = value - elif "@" in k: - key = k[1:] - node[key] = value + # This method remains for backwards compatibility else: - node[k] = value - - return node + display_name = display_name.translate({ord(c): None for c in string.whitespace}) + label = inflection.camelize(display_name.strip(), uppercase_first_letter=False) + return label -def relationship_edges( - schema_graph_nx: nx.MultiDiGraph, class_add_mod: dict, **kwargs -) -> nx.MultiDiGraph: +def get_class_label_from_display_name(display_name, strict_camel_case = False): + """Convert a given display name string into a proper class label string""" """ - Notes: - ===== - # pass the below dictionary as the third argument (kwargs) to relationship_edges(). - # "in" indicates that the relationship has an in-edges behaviour. - # "out" indicates that the relationship has an out-edges behaviour. + label = ''.join(x.capitalize() or ' ' for x in display_name.split(' '))""" + # This is the newer more strict method + if strict_camel_case: + display_name = display_name.strip().translate({ord(c): "_" for c in string.whitespace}) + label = inflection.camelize(display_name, uppercase_first_letter=True) - rel_dict = { - "rdfs:subClassOf": { - "parentOf": "in" - }, - "schema:domainIncludes": { - "domainValue": "in" - }, - "sms:requiresDependency": { - "requiresDependency": "out" - }, - "sms:requiresComponent": { - "requiresComponent": "out" - }, - "schema:rangeIncludes": { - "rangeValue": "out" - } - } - """ - for rel, rel_lab_node_type in kwargs.items(): - for rel_label, node_type in rel_lab_node_type.items(): - if rel in class_add_mod: - parents = class_add_mod[rel] - if type(parents) == list: - for _parent in parents: - - if node_type == "in": - n1 = extract_name_from_uri_or_curie(_parent["@id"]) - n2 = class_add_mod["rdfs:label"] - - if node_type == "out": - n1 = class_add_mod["rdfs:label"] - n2 = extract_name_from_uri_or_curie(_parent["@id"]) - - # do not allow self-loops - if n1 != n2: - schema_graph_nx.add_edge(n1, n2, key=rel_label) - elif type(parents) == dict: - if node_type == "in": - n1 = extract_name_from_uri_or_curie(parents["@id"]) - n2 = class_add_mod["rdfs:label"] - - if node_type == "out": - n1 = class_add_mod["rdfs:label"] - n2 = extract_name_from_uri_or_curie(parents["@id"]) - - # do not allow self-loops - if n1 != n2: - schema_graph_nx.add_edge(n1, n2, key=rel_label) - - return schema_graph_nx - - -def class_to_node(class_to_convert: dict) -> nx.Graph: - G = nx.Graph() - - node = {} # node to be added the above graph and returned - for (k, v) in class_to_convert.items(): - if ":" in k: # if ":" is present in key - key = k.split(":")[1] - node[key] = v - elif "@" in k: # if "@" is present in key - key = k[1:] - node[key] = v - else: - node[k] = v - - if "required" in node: - if class_to_convert["sms:required"] == "sms:true": - node["required"] = True - else: - node["required"] = False - - if "sms:validationRules" in class_to_convert: - node["validationRules"] = class_to_convert["sms:validationRules"] + # This method remains for backwards compatibility else: - node["validationRules"] = [] - - node["uri"] = class_to_convert["@id"] # add separate "uri" key - node["description"] = class_to_convert[ - "rdfs:comment" - ] # separately store "comment" as "description" - G.add_node(class_to_convert["rdfs:label"], **node) + display_name = display_name.translate({ord(c): None for c in string.whitespace}) + label = inflection.camelize(display_name.strip(), uppercase_first_letter=True) - return G - - -def replace_node_in_schema(schema: nx.MultiDiGraph, class_add_mod: dict) -> None: - # part of the code that replaces the modified class in the original JSON-LD schema (not in the data/ folder though) - for i, schema_class in enumerate(schema["@graph"]): - if schema_class["rdfs:label"] == class_add_mod["rdfs:label"]: - validate_class_schema( - class_add_mod - ) # validate that the class to be modified follows the structure for any generic class (node) - - schema["@graph"][i] = class_add_mod - break + return label +def get_display_name_from_label(node_name, attr_relationships): + ''' + TODO: if not display name raise error. + ''' + if 'Attribute' in attr_relationships.keys(): + display_name = attr_relationships['Attribute'] + else: + display_name = node_name + return display_name + +def get_label_from_display_name(display_name, entry_type, strict_camel_case = False): + + if entry_type.lower()=='class': + label = get_class_label_from_display_name(display_name=display_name, strict_camel_case=strict_camel_case) + + elif entry_type.lower()=='property': + label=get_property_label_from_display_name(display_name=display_name, strict_camel_case=strict_camel_case) + return label + +def convert_bool(provided_bool): + return str(provided_bool) + +def parse_validation_rules(validation_rules:list) -> list: + if validation_rules and '::' in validation_rules[0]: + validation_rules = validation_rules[0].split('::') + return validation_rules def export_schema(schema, file_path): with open(file_path, "w") as f: diff --git a/schematic/visualization/attributes_explorer.py b/schematic/visualization/attributes_explorer.py index 0b18ab092..704fc1f4c 100644 --- a/schematic/visualization/attributes_explorer.py +++ b/schematic/visualization/attributes_explorer.py @@ -6,7 +6,10 @@ import pandas as pd from typing import Any, Dict, Optional, Text, List -from schematic.schemas import SchemaGenerator +from schematic.schemas.data_model_parser import DataModelParser +from schematic.schemas.data_model_graph import DataModelGraph, DataModelGraphExplorer +from schematic.schemas.data_model_json_schema import DataModelJSONSchema + from schematic.utils.io_utils import load_json logger = logging.getLogger(__name__) @@ -17,12 +20,27 @@ def __init__(self, )-> None: self.path_to_jsonld = path_to_jsonld - self.json_data_model = load_json(self.path_to_jsonld) + self.jsonld = load_json(self.path_to_jsonld) - # instantiate a schema generator to retrieve db schema graph from metadata model graph - self.sg = SchemaGenerator(self.path_to_jsonld) + # Instantiate Data Model Parser + data_model_parser = DataModelParser(path_to_data_model = self.path_to_jsonld) + + #Parse Model + parsed_data_model = data_model_parser.parse_model() + + # Instantiate DataModelGraph + data_model_grapher = DataModelGraph(parsed_data_model) + # Generate graph + self.graph_data_model = data_model_grapher.generate_data_model_graph() + + # Instantiate Data Model Graph Explorer + self.DME = DataModelGraphExplorer(self.graph_data_model) + + # Instantiate Data Model Json Schema + self.data_model_js = DataModelJSONSchema(jsonld_path=self.path_to_jsonld, graph=self.graph_data_model) + self.output_path = self.create_output_path('merged_csv') def create_output_path(self, terminal_folder): @@ -62,7 +80,7 @@ def parse_attributes(self, save_file=True): ''' # get all components - component_dg = self.sg.se.get_digraph_by_edge_type('requiresComponent') + component_dg = self.DME.get_digraph_by_edge_type('requiresComponent') components = component_dg.nodes() # For each data type to be loaded gather all attribtes the user would @@ -115,9 +133,9 @@ def _parse_attributes(self, components, save_file=True, include_index=True): df_store = [] for component in components: data_dict = {} + # get the json schema - json_schema = self.sg.get_json_schema_requirements( - source_node=component, schema_name=self.path_to_jsonld) + json_schema = self.data_model_js.get_json_validation_schema(source_node=component, schema_name=self.path_to_jsonld) # Gather all attribues, their valid values and requirements for key, value in json_schema['properties'].items(): diff --git a/schematic/visualization/tangled_tree.py b/schematic/visualization/tangled_tree.py index 07757a14d..33c89fbd8 100644 --- a/schematic/visualization/tangled_tree.py +++ b/schematic/visualization/tangled_tree.py @@ -12,8 +12,11 @@ from schematic.utils.viz_utils import visualize from schematic.visualization.attributes_explorer import AttributesExplorer -from schematic.schemas.explorer import SchemaExplorer -from schematic.schemas.generator import SchemaGenerator + +from schematic.schemas.data_model_parser import DataModelParser +from schematic.schemas.data_model_graph import DataModelGraph, DataModelGraphExplorer +from schematic.schemas.data_model_relationships import DataModelRelationships + from schematic import LOADER from schematic.utils.io_utils import load_json from copy import deepcopy @@ -40,11 +43,20 @@ def __init__(self, # Parse schema name self.schema_name = path.basename(self.path_to_json_ld).split(".model.jsonld")[0] - # Instantiate a schema generator to retrieve db schema graph from metadata model graph - self.sg = SchemaGenerator(self.path_to_json_ld) + # Instantiate Data Model Parser + data_model_parser = DataModelParser(path_to_data_model = self.path_to_json_ld) + + #Parse Model + parsed_data_model = data_model_parser.parse_model() + + # Instantiate DataModelGraph + data_model_grapher = DataModelGraph(parsed_data_model) + + # Generate graph + self.graph_data_model = data_model_grapher.generate_data_model_graph() - # Get metadata model schema graph - self.G = self.sg.se.get_nx_schema() + # Instantiate Data Model Graph Explorer + self.DME = DataModelGraphExplorer(self.graph_data_model) # Set Parameters self.figure_type = figure_type.lower() @@ -80,14 +92,14 @@ def get_text_for_tangled_tree(self, text_type, save_file=False): save_file==False: Returns plain or highlighted text as a csv string. ''' # Get nodes in the digraph, many more nodes returned if figure type is dependency - cdg = self.sg.se.get_digraph_by_edge_type(self.dependency_type) + cdg = self.DME.get_digraph_by_edge_type(self.dependency_type) nodes = cdg.nodes() if self.dependency_type == 'requiresComponent': component_nodes = nodes else: # get component nodes if making dependency figure - component_dg = self.sg.se.get_digraph_by_edge_type('requiresComponent') + component_dg = self.DME.get_digraph_by_edge_type('requiresComponent') component_nodes = component_dg.nodes() # Initialize lists @@ -98,7 +110,7 @@ def get_text_for_tangled_tree(self, text_type, save_file=False): for node in component_nodes: # Get the highlighted components based on figure_type if self.figure_type == 'component': - highlight_descendants = self.sg.se.get_descendants_by_edge_type(node, 'requiresComponent') + highlight_descendants = self.DME.get_descendants_by_edge_type(node, 'requiresComponent') elif self.figure_type == 'dependency': highlight_descendants = [node] @@ -139,12 +151,13 @@ def get_topological_generations(self): edges: (Networkx EdgeDataView) Edges of component or dependency graph. When iterated over it works like a list of tuples. ''' # Get nodes in the digraph - digraph = self.sg.se.get_digraph_by_edge_type(self.dependency_type) + digraph = self.DME.get_digraph_by_edge_type(self.dependency_type) nodes = digraph.nodes() # Get subgraph - mm_graph = self.sg.se.get_nx_schema() - subg = self.sg.get_subgraph_by_edge_type(mm_graph, self.dependency_type) + #mm_graph = self.sg.se.get_nx_schema() + #subg = self.sg.get_subgraph_by_edge_type(mm_graph, self.dependency_type) + subg = self.DME.get_subgraph_by_edge_type(self.dependency_type) # Get edges and topological_gen based on figure type. if self.figure_type == 'component': @@ -217,7 +230,7 @@ def gather_component_dependency_info(self, cn, attributes_df): ''' # Gather all component dependency information - component_attributes = self.sg.get_descendants_by_edge_type( + component_attributes = self.DME.get_descendants_by_edge_type( cn, self.dependency_type, connected=True @@ -725,7 +738,7 @@ def get_ancestors_nodes(self, subgraph, components): """ all_parent_children = {} for component in components: - all_ancestors = self.sg.se.get_nodes_ancestors(subgraph, component) + all_ancestors = self.DME.get_nodes_ancestors(subgraph, component) all_parent_children[component] = all_ancestors return all_parent_children @@ -766,7 +779,7 @@ def get_tangled_tree_layers(self, save_file=True): if self.figure_type == 'dependency': # Get component digraph and nodes. - component_dg = self.sg.se.get_digraph_by_edge_type('requiresComponent') + component_dg = self.DME.get_digraph_by_edge_type('requiresComponent') component_nodes = component_dg.nodes() # Get table of attributes. diff --git a/schematic_api/api/openapi/api.yaml b/schematic_api/api/openapi/api.yaml index d6788c5aa..01f038ba0 100644 --- a/schematic_api/api/openapi/api.yaml +++ b/schematic_api/api/openapi/api.yaml @@ -828,7 +828,7 @@ paths: tags: - Schema Operation - /explorer/find_class_specific_properties: + /schmas/find_class_specific_properties: get: summary: Find properties specifically associated with a given class description: Find properties specifically associated with a given class @@ -990,7 +990,7 @@ paths: tags: - Schema Operation - /explorer/get_node_dependencies: + /schemas/get_node_dependencies: get: summary: Get the immediate dependencies that are related to a given source node description: Get the immediate dependencies that are related to a given source node @@ -1036,20 +1036,12 @@ paths: tags: - Schema Operation - /explorer/get_property_label_from_display_name: + /utils/get_property_label_from_display_name: get: summary: Converts a given display name string into a proper property label string description: Converts a given display name string into a proper property label string - operationId: schematic_api.api.routes.get_property_label_from_display_name + operationId: schematic_api.api.routes.get_property_label_from_display_name_route parameters: - - in: query - name: schema_url - schema: - type: string - description: Data Model URL - example: >- - https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld - required: true - in: query name: display_name schema: @@ -1073,7 +1065,7 @@ paths: tags: - Schema Operation - /explorer/get_node_range: + /schemas/get_node_range: get: summary: Get all the valid values that are associated with a node label. description: Get all the valid values that are associated with a node label. diff --git a/schematic_api/api/routes.py b/schematic_api/api/routes.py index 6150fcb3c..58ddafaa7 100644 --- a/schematic_api/api/routes.py +++ b/schematic_api/api/routes.py @@ -23,11 +23,15 @@ from schematic.visualization.tangled_tree import TangledTree from schematic.manifest.generator import ManifestGenerator from schematic.models.metadata import MetadataModel -from schematic.schemas.generator import SchemaGenerator -from schematic.schemas.explorer import SchemaExplorer + +from schematic.schemas.data_model_parser import DataModelParser +from schematic.schemas.data_model_graph import DataModelGraph, DataModelGraphExplorer +#from schematic.schemas.data_model_relationships import DataModelRelationships + from schematic.store.synapse import SynapseStorage, ManifestDownload from synapseclient.core.exceptions import SynapseHTTPError, SynapseAuthenticationError, SynapseUnmetAccessRestrictions, SynapseNoCredentialsError, SynapseTimeoutError from schematic.utils.general import entity_type_mapping +from schematic.utils.schema_utils import get_property_label_from_display_name logger = logging.getLogger(__name__) logging.basicConfig(level=logging.DEBUG) @@ -251,11 +255,23 @@ def get_manifest_route(schema_url: str, use_annotations: bool, dataset_ids=None, f"Please check your submission and try again." ) + data_model_parser = DataModelParser(path_to_data_model = jsonld) + + #Parse Model + parsed_data_model = data_model_parser.parse_model() + + # Instantiate DataModelGraph + data_model_grapher = DataModelGraph(parsed_data_model) + + # Generate graph + graph_data_model = data_model_grapher.generate_data_model_graph() + def create_single_manifest(data_type, title, dataset_id=None, output_format=None, access_token=None, strict=strict_validation): # create object of type ManifestGenerator manifest_generator = ManifestGenerator( path_to_json_ld=jsonld, + graph=graph_data_model, title=title, root=data_type, use_annotations=use_annotations, @@ -283,8 +299,8 @@ def create_single_manifest(data_type, title, dataset_id=None, output_format=None # Gather all returned result urls all_results = [] if data_type[0] == 'all manifests': - sg = SchemaGenerator(path_to_json_ld=jsonld) - component_digraph = sg.se.get_digraph_by_edge_type('requiresComponent') + DME = DataModelGraphExplorer(graph_data_model) + component_digraph = DME.get_digraph_by_edge_type('requiresComponent') components = component_digraph.nodes() for component in components: if title: @@ -647,35 +663,41 @@ def get_manifest_datatype(access_token, manifest_id, asset_view): return manifest_dtypes_dict def get_schema_pickle(schema_url): - # load schema - se = SchemaExplorer() + data_model_parser = DataModelParser(path_to_data_model = schema_url) + #Parse Model + parsed_data_model = data_model_parser.parse_model() - se.load_schema(schema_url) + # Instantiate DataModelGraph + data_model_grapher = DataModelGraph(parsed_data_model) - # get schema - schema_graph = se.get_nx_schema() + # Generate graph + graph_data_model = data_model_grapher.generate_data_model_graph() # write to local pickle file path = os.getcwd() export_path = os.path.join(path, 'tests/data/schema.gpickle') with open(export_path, 'wb') as file: - pickle.dump(schema_graph, file) + pickle.dump(graph_data_model, file) return export_path def get_subgraph_by_edge_type(schema_url, relationship): - # use schema generator and schema explorer - sg = SchemaGenerator(path_to_json_ld=schema_url) - se = SchemaExplorer() - se.load_schema(schema_url) + data_model_parser = DataModelParser(path_to_data_model = schema_url) + + #Parse Model + parsed_data_model = data_model_parser.parse_model() - # get the schema graph - schema_graph = se.get_nx_schema() + # Instantiate DataModelGraph + data_model_grapher = DataModelGraph(parsed_data_model) - # relationship subgraph - relationship_subgraph = sg.get_subgraph_by_edge_type(schema_graph, relationship) + # Generate graph + graph_data_model = data_model_grapher.generate_data_model_graph() + DME = DataModelGraphExplorer(graph_data_model) + + # relationship subgraph + relationship_subgraph = DME.get_subgraph_by_edge_type(relationship) # return relationship Arr = [] for t in relationship_subgraph.edges: @@ -686,14 +708,20 @@ def get_subgraph_by_edge_type(schema_url, relationship): def find_class_specific_properties(schema_url, schema_class): - # use schema explorer - se = SchemaExplorer() + data_model_parser = DataModelParser(path_to_data_model = schema_url) + #Parse Model + parsed_data_model = data_model_parser.parse_model() + + # Instantiate DataModelGraph + data_model_grapher = DataModelGraph(parsed_data_model) + + # Generate graph + graph_data_model = data_model_grapher.generate_data_model_graph() - # load schema - se.load_schema(schema_url) + DME = DataModelGraphExplorer(graph_data_model) # return properties - properties = se.find_class_specific_properties(schema_class) + properties = DME.find_class_specific_properties(schema_class) return properties @@ -721,15 +749,25 @@ def get_node_dependencies( Returns: list[str]: List of nodes that are dependent on the source node. """ - gen = SchemaGenerator(path_to_json_ld=schema_url) - dependencies = gen.get_node_dependencies( + data_model_parser = DataModelParser(path_to_data_model = schema_url) + #Parse Model + parsed_data_model = data_model_parser.parse_model() + + # Instantiate DataModelGraph + data_model_grapher = DataModelGraph(parsed_data_model) + + # Generate graph + graph_data_model = data_model_grapher.generate_data_model_graph() + + DME = DataModelGraphExplorer(graph_data_model) + + dependencies = DME.get_node_dependencies( source_node, return_display_names, return_schema_ordered ) return dependencies -def get_property_label_from_display_name( - schema_url: str, +def get_property_label_from_display_name_route( display_name: str, strict_camel_case: bool = False ) -> str: @@ -744,9 +782,7 @@ def get_property_label_from_display_name( Returns: str: The property label of the display name """ - explorer = SchemaExplorer() - explorer.load_schema(schema_url) - label = explorer.get_property_label_from_display_name(display_name, strict_camel_case) + label = get_property_label_from_display_name(display_name=display_name, strict_camel_case=strict_camel_case) return label @@ -766,8 +802,19 @@ def get_node_range( Returns: list[str]: A list of nodes """ - gen = SchemaGenerator(path_to_json_ld=schema_url) - node_range = gen.get_node_range(node_label, return_display_names) + data_model_parser = DataModelParser(path_to_data_model = schema_url) + #Parse Model + parsed_data_model = data_model_parser.parse_model() + + # Instantiate DataModelGraph + data_model_grapher = DataModelGraph(parsed_data_model) + + # Generate graph + graph_data_model = data_model_grapher.generate_data_model_graph() + + DME = DataModelGraphExplorer(graph_data_model) + + node_range = DME.get_node_range(node_label, return_display_names) return node_range def get_if_node_required(schema_url: str, node_display_name: str) -> bool: @@ -781,8 +828,19 @@ def get_if_node_required(schema_url: str, node_display_name: str) -> bool: True: If the given node is a "required" node. False: If the given node is not a "required" (i.e., an "optional") node. """ - gen = SchemaGenerator(path_to_json_ld=schema_url) - is_required = gen.is_node_required(node_display_name) + data_model_parser = DataModelParser(path_to_data_model = schema_url) + #Parse Model + parsed_data_model = data_model_parser.parse_model() + + # Instantiate DataModelGraph + data_model_grapher = DataModelGraph(parsed_data_model) + + # Generate graph + graph_data_model = data_model_grapher.generate_data_model_graph() + + DME = DataModelGraphExplorer(graph_data_model) + + is_required = DME.get_node_required(node_display_name) return is_required @@ -794,8 +852,22 @@ def get_node_validation_rules(schema_url: str, node_display_name: str) -> list: Returns: List of valiation rules for a given node. """ - gen = SchemaGenerator(path_to_json_ld=schema_url) - node_validation_rules = gen.get_node_validation_rules(node_display_name) + # Instantiate DataModelParser + data_model_parser = DataModelParser(path_to_data_model = schema_url) + + #Parse Model + parsed_data_model = data_model_parser.parse_model() + + # Instantiate DataModelGraph + data_model_grapher = DataModelGraph(parsed_data_model) + + # Generate graph + graph_data_model = data_model_grapher.generate_data_model_graph() + + #Instantiate DataModelGraphExplorer + DME = DataModelGraphExplorer(graph_data_model) + + node_validation_rules = DME.get_node_validation_rules(node_display_name) return node_validation_rules @@ -810,8 +882,21 @@ def get_nodes_display_names(schema_url: str, node_list: list[str]) -> list: node_display_names (List[str]): List of node display names. """ - gen = SchemaGenerator(path_to_json_ld=schema_url) - mm_graph = gen.se.get_nx_schema() - node_display_names = gen.get_nodes_display_names(node_list, mm_graph) + # Instantiate DataModelParser + data_model_parser = DataModelParser(path_to_data_model = schema_url) + + #Parse Model + parsed_data_model = data_model_parser.parse_model() + + # Instantiate DataModelGraph + data_model_grapher = DataModelGraph(parsed_data_model) + + # Generate graph + graph_data_model = data_model_grapher.generate_data_model_graph() + + #Instantiate DataModelGraphExplorer + DME = DataModelGraphExplorer(graph_data_model) + + node_display_names = DME.get_nodes_display_names(node_list) return node_display_names diff --git a/tests/data/example.model.jsonld b/tests/data/example.model.jsonld index 6f29cbf7b..1ebcee1d9 100644 --- a/tests/data/example.model.jsonld +++ b/tests/data/example.model.jsonld @@ -7,1971 +7,6 @@ "xsd": "http://www.w3.org/2001/XMLSchema#" }, "@graph": [ - { - "@id": "schema:Text", - "@type": [ - "schema:DataType", - "rdfs:Class" - ], - "rdfs:comment": "Data type: Text.", - "rdfs:label": "Text" - }, - { - "@id": "schema:Number", - "@type": [ - "schema:DataType", - "rdfs:Class" - ], - "rdfs:comment": "Data type: Number.", - "rdfs:label": "Number" - }, - { - "@id": "schema:Integer", - "@type": "rdfs:Class", - "rdfs:comment": "Data type: Integer.", - "rdfs:label": "Integer", - "rdfs:subClassOf": { - "@id": "schema:Number" - } - }, - { - "@id": "schema:Thing", - "@type": "rdfs:Class", - "rdfs:comment": "Thing", - "rdfs:label": "Thing", - "schema:isPartOf": { - "@id": "http://schema.org" - } - }, - { - "@id": "bts:BiologicalEntity", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "BiologicalEntity", - "rdfs:subClassOf": { - "@id": "schema:Thing" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:OntologyClass", - "@type": "rdfs:Class", - "rdfs:comment": "a concept or class in an ontology, vocabulary or thesaurus", - "rdfs:label": "OntologyClass", - "rdfs:subClassOf": { - "@id": "schema:Thing" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:RelationshipType", - "@type": "rdfs:Class", - "rdfs:comment": "An OWL property used as an edge label", - "rdfs:label": "RelationshipType", - "rdfs:subClassOf": { - "@id": "bts:OntologyClass" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:GeneOntologyClass", - "@type": "rdfs:Class", - "rdfs:comment": "an ontology class that describes a functional aspect of a gene, gene prodoct or complex", - "rdfs:label": "GeneOntologyClass", - "rdfs:subClassOf": { - "@id": "bts:OntologyClass" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:OrganismTaxon", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "OrganismTaxon", - "rdfs:subClassOf": { - "@id": "bts:OntologyClass" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:OrganismalEntity", - "@type": "rdfs:Class", - "rdfs:comment": "A named entity that is either a part of an organism, a whole organism, population or clade of organisms, excluding molecular entities", - "rdfs:label": "OrganismalEntity", - "rdfs:subClassOf": { - "@id": "bts:BiologicalEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:IndividualOrganism", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "IndividualOrganism", - "rdfs:subClassOf": { - "@id": "bts:OrganismalEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Case", - "@type": "rdfs:Class", - "rdfs:comment": "An individual organism that has a patient role in some clinical context.", - "rdfs:label": "Case", - "rdfs:subClassOf": { - "@id": "bts:IndividualOrganism" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:PopulationOfIndividualOrganisms", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "PopulationOfIndividualOrganisms", - "rdfs:subClassOf": { - "@id": "bts:OrganismalEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Biosample", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "Biosample", - "rdfs:subClassOf": { - "@id": "bts:OrganismalEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:DiseaseOrPhenotypicFeature", - "@type": "rdfs:Class", - "rdfs:comment": "Either one of a disease or an individual phenotypic feature. Some knowledge resources such as Monarch treat these as distinct, others such as MESH conflate.", - "rdfs:label": "DiseaseOrPhenotypicFeature", - "rdfs:subClassOf": { - "@id": "bts:BiologicalEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Disease", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "Disease", - "rdfs:subClassOf": { - "@id": "bts:DiseaseOrPhenotypicFeature" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:PhenotypicFeature", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "PhenotypicFeature", - "rdfs:subClassOf": { - "@id": "bts:DiseaseOrPhenotypicFeature" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Environment", - "@type": "rdfs:Class", - "rdfs:comment": "A feature of the environment of an organism that influences one or more phenotypic features of that organism, potentially mediated by genes", - "rdfs:label": "Environment", - "rdfs:subClassOf": { - "@id": "bts:BiologicalEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:InformationContentEntity", - "@type": "rdfs:Class", - "rdfs:comment": "a piece of information that typically describes some piece of biology or is used as support.", - "rdfs:label": "InformationContentEntity", - "rdfs:subClassOf": { - "@id": "schema:Thing" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:ConfidenceLevel", - "@type": "rdfs:Class", - "rdfs:comment": "Level of confidence in a statement", - "rdfs:label": "ConfidenceLevel", - "rdfs:subClassOf": { - "@id": "bts:InformationContentEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:EvidenceType", - "@type": "rdfs:Class", - "rdfs:comment": "Class of evidence that supports an association", - "rdfs:label": "EvidenceType", - "rdfs:subClassOf": { - "@id": "bts:InformationContentEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Publication", - "@type": "rdfs:Class", - "rdfs:comment": "Any published piece of information. Can refer to a whole publication, or to a part of it (e.g. a figure, figure legend, or section highlighted by NLP). The scope is intended to be general and include information published on the web as well as journals.", - "rdfs:label": "Publication", - "rdfs:subClassOf": { - "@id": "bts:InformationContentEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:MolecularEntity", - "@type": "rdfs:Class", - "rdfs:comment": "A gene, gene product, small molecule or macromolecule (including protein complex)", - "rdfs:label": "MolecularEntity", - "rdfs:subClassOf": { - "@id": "bts:BiologicalEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:ChemicalSubstance", - "@type": "rdfs:Class", - "rdfs:comment": "May be a chemical entity or a formulation with a chemical entity as active ingredient, or a complex material with multiple chemical entities as part", - "rdfs:label": "ChemicalSubstance", - "rdfs:subClassOf": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Drug", - "@type": "rdfs:Class", - "rdfs:comment": "A substance intended for use in the diagnosis, cure, mitigation, treatment, or prevention of disease", - "rdfs:label": "Drug", - "rdfs:subClassOf": { - "@id": "bts:ChemicalSubstance" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Metabolite", - "@type": "rdfs:Class", - "rdfs:comment": "Any intermediate or product resulting from metabolism. Includes primary and secondary metabolites.", - "rdfs:label": "Metabolite", - "rdfs:subClassOf": { - "@id": "bts:ChemicalSubstance" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:AnatomicalEntity", - "@type": "rdfs:Class", - "rdfs:comment": "A subcellular location, cell type or gross anatomical part", - "rdfs:label": "AnatomicalEntity", - "rdfs:subClassOf": { - "@id": "bts:OrganismalEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:LifeStage", - "@type": "rdfs:Class", - "rdfs:comment": "A stage of development or growth of an organism, including post-natal adult stages", - "rdfs:label": "LifeStage", - "rdfs:subClassOf": { - "@id": "bts:OrganismalEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:PlanetaryEntity", - "@type": "rdfs:Class", - "rdfs:comment": "Any entity or process that exists at the level of the whole planet", - "rdfs:label": "PlanetaryEntity", - "rdfs:subClassOf": { - "@id": "schema:Thing" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:EnvironmentalProcess", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "EnvironmentalProcess", - "rdfs:subClassOf": { - "@id": "bts:PlanetaryEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:EnvironmentalFeature", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "EnvironmentalFeature", - "rdfs:subClassOf": { - "@id": "bts:PlanetaryEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:ClinicalEntity", - "@type": "rdfs:Class", - "rdfs:comment": "Any entity or process that exists in the clinical domain and outside the biological realm. Diseases are placed under biological entities", - "rdfs:label": "ClinicalEntity", - "rdfs:subClassOf": { - "@id": "schema:Thing" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:ClinicalTrial", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "ClinicalTrial", - "rdfs:subClassOf": { - "@id": "bts:ClinicalEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:ClinicalIntervention", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "ClinicalIntervention", - "rdfs:subClassOf": { - "@id": "bts:ClinicalEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Device", - "@type": "rdfs:Class", - "rdfs:comment": "A thing made or adapted for a particular purpose, especially a piece of mechanical or electronic equipment", - "rdfs:label": "Device", - "rdfs:subClassOf": { - "@id": "schema:Thing" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:GenomicEntity", - "@type": "rdfs:Class", - "rdfs:comment": "an entity that can either be directly located on a genome (gene, transcript, exon, regulatory region) or is encoded in a genome (protein)", - "rdfs:label": "GenomicEntity", - "rdfs:subClassOf": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Genome", - "@type": "rdfs:Class", - "rdfs:comment": "A genome is the sum of genetic material within a cell or virion.", - "rdfs:label": "Genome", - "rdfs:subClassOf": { - "@id": "bts:GenomicEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Transcript", - "@type": "rdfs:Class", - "rdfs:comment": "An RNA synthesized on a DNA or RNA template by an RNA polymerase", - "rdfs:label": "Transcript", - "rdfs:subClassOf": { - "@id": "bts:GenomicEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Exon", - "@type": "rdfs:Class", - "rdfs:comment": "A region of the transcript sequence within a gene which is not removed from the primary RNA transcript by RNA splicing", - "rdfs:label": "Exon", - "rdfs:subClassOf": { - "@id": "bts:GenomicEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:CodingSequence", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "CodingSequence", - "rdfs:subClassOf": { - "@id": "bts:GenomicEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:MacromolecularMachine", - "@type": "rdfs:Class", - "rdfs:comment": "A union of gene, gene product, and macromolecular complex. These are the basic units of function in a cell. They either carry out individual biological activities, or they encode molecules which do this.", - "rdfs:label": "MacromolecularMachine", - "rdfs:subClassOf": { - "@id": "bts:GenomicEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:GeneOrGeneProduct", - "@type": "rdfs:Class", - "rdfs:comment": "a union of genes or gene products. Frequently an identifier for one will be used as proxy for another", - "rdfs:label": "GeneOrGeneProduct", - "rdfs:subClassOf": { - "@id": "bts:MacromolecularMachine" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Gene", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "Gene", - "rdfs:subClassOf": { - "@id": "bts:GeneOrGeneProduct" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:GeneProduct", - "@type": "rdfs:Class", - "rdfs:comment": "The functional molecular product of a single gene. Gene products are either proteins or functional RNA molecules", - "rdfs:label": "GeneProduct", - "rdfs:subClassOf": { - "@id": "bts:GeneOrGeneProduct" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Protein", - "@type": "rdfs:Class", - "rdfs:comment": "A gene product that is composed of a chain of amino acid sequences and is produced by ribosome-mediated translation of mRNA", - "rdfs:label": "Protein", - "rdfs:subClassOf": { - "@id": "bts:GeneProduct" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:GeneProductIsoform", - "@type": "rdfs:Class", - "rdfs:comment": "This is an abstract class that can be mixed in with different kinds of gene products to indicate that the gene product is intended to represent a specific isoform rather than a canonical or reference or generic product. The designation of canonical or reference may be arbitrary, or it may represent the superclass of all isoforms.", - "rdfs:label": "GeneProductIsoform", - "rdfs:subClassOf": { - "@id": "bts:GeneProduct" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:ProteinIsoform", - "@type": "rdfs:Class", - "rdfs:comment": "Represents a protein that is a specific isoform of the canonical or reference protein. See https://www.ncbi.nlm.nih.gov/pmc/articles/PMC4114032/", - "rdfs:label": "ProteinIsoform", - "rdfs:subClassOf": { - "@id": "bts:Protein" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:RnaProduct", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "RnaProduct", - "rdfs:subClassOf": { - "@id": "bts:GeneProduct" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:RnaProductIsoform", - "@type": "rdfs:Class", - "rdfs:comment": "Represents a protein that is a specific isoform of the canonical or reference RNA", - "rdfs:label": "RnaProductIsoform", - "rdfs:subClassOf": { - "@id": "bts:RnaProduct" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:NoncodingRnaProduct", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "NoncodingRnaProduct", - "rdfs:subClassOf": { - "@id": "bts:RnaProduct" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Microrna", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "Microrna", - "rdfs:subClassOf": { - "@id": "bts:NoncodingRnaProduct" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:MacromolecularComplex", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "MacromolecularComplex", - "rdfs:subClassOf": { - "@id": "bts:MacromolecularMachine" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:GeneFamily", - "@type": "rdfs:Class", - "rdfs:comment": "any grouping of multiple genes or gene products related by common descent", - "rdfs:label": "GeneFamily", - "rdfs:subClassOf": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Genotype", - "@type": "rdfs:Class", - "rdfs:comment": "An information content entity that describes a genome by specifying the total variation in genomic sequence and/or gene expression, relative to some extablished background", - "rdfs:label": "Genotype", - "rdfs:subClassOf": { - "@id": "bts:GenomicEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Haplotype", - "@type": "rdfs:Class", - "rdfs:comment": "A set of zero or more Alleles on a single instance of a Sequence[VMC]", - "rdfs:label": "Haplotype", - "rdfs:subClassOf": { - "@id": "bts:GenomicEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:SequenceVariant", - "@type": "rdfs:Class", - "rdfs:comment": "An allele that varies in its sequence from what is considered the reference allele at that locus.", - "rdfs:label": "SequenceVariant", - "rdfs:subClassOf": { - "@id": "bts:GenomicEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:DrugExposure", - "@type": "rdfs:Class", - "rdfs:comment": "A drug exposure is an intake of a particular chemical substance", - "rdfs:label": "DrugExposure", - "rdfs:subClassOf": { - "@id": "bts:Environment" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Treatment", - "@type": "rdfs:Class", - "rdfs:comment": "A treatment is targeted at a disease or phenotype and may involve multiple drug 'exposures'", - "rdfs:label": "Treatment", - "rdfs:subClassOf": { - "@id": "bts:Environment" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:GeographicLocation", - "@type": "rdfs:Class", - "rdfs:comment": "a location that can be described in lat/long coordinates", - "rdfs:label": "GeographicLocation", - "rdfs:subClassOf": { - "@id": "bts:PlanetaryEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:GeographicLocationAtTime", - "@type": "rdfs:Class", - "rdfs:comment": "a location that can be described in lat/long coordinates, for a particular time", - "rdfs:label": "GeographicLocationAtTime", - "rdfs:subClassOf": { - "@id": "bts:GeographicLocation" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Occurrent", - "@type": "rdfs:Class", - "rdfs:comment": "A processual entity", - "rdfs:label": "Occurrent", - "rdfs:subClassOf": { - "@id": "schema:Thing" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:BiologicalProcessOrActivity", - "@type": "rdfs:Class", - "rdfs:comment": "Either an individual molecular activity, or a collection of causally connected molecular activities", - "rdfs:label": "BiologicalProcessOrActivity", - "rdfs:subClassOf": { - "@id": "bts:BiologicalEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:MolecularActivity", - "@type": "rdfs:Class", - "rdfs:comment": "An execution of a molecular function carried out by a gene product or macromolecular complex.", - "rdfs:label": "MolecularActivity", - "rdfs:subClassOf": { - "@id": "bts:BiologicalProcessOrActivity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:ActivityAndBehavior", - "@type": "rdfs:Class", - "rdfs:comment": "Activity or behavior of any independent integral living, organization or mechanical actor in the world", - "rdfs:label": "ActivityAndBehavior", - "rdfs:subClassOf": { - "@id": "bts:Occurrent" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Procedure", - "@type": "rdfs:Class", - "rdfs:comment": "A series of actions conducted in a certain order or manner", - "rdfs:label": "Procedure", - "rdfs:subClassOf": { - "@id": "bts:Occurrent" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Phenomenon", - "@type": "rdfs:Class", - "rdfs:comment": "a fact or situation that is observed to exist or happen, especially one whose cause or explanation is in question", - "rdfs:label": "Phenomenon", - "rdfs:subClassOf": { - "@id": "bts:Occurrent" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:BiologicalProcess", - "@type": "rdfs:Class", - "rdfs:comment": "One or more causally connected executions of molecular functions", - "rdfs:label": "BiologicalProcess", - "rdfs:subClassOf": { - "@id": "bts:BiologicalProcessOrActivity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Pathway", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "Pathway", - "rdfs:subClassOf": { - "@id": "bts:BiologicalProcess" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:PhysiologicalProcess", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "PhysiologicalProcess", - "rdfs:subClassOf": { - "@id": "bts:BiologicalProcess" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:CellularComponent", - "@type": "rdfs:Class", - "rdfs:comment": "A location in or around a cell", - "rdfs:label": "CellularComponent", - "rdfs:subClassOf": { - "@id": "bts:AnatomicalEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Cell", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "Cell", - "rdfs:subClassOf": { - "@id": "bts:AnatomicalEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:CellLine", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "CellLine", - "rdfs:subClassOf": { - "@id": "bts:Biosample" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:GrossAnatomicalStructure", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "GrossAnatomicalStructure", - "rdfs:subClassOf": { - "@id": "bts:AnatomicalEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:ensembl", - "@type": "rdf:Property", - "rdfs:comment": "Ensembl ID for gene, protein or transcript", - "rdfs:label": "ensembl", - "schema:domainIncludes": [ - { - "@id": "bts:Transcript" - } - ], - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "schema:Text" - } - }, - { - "@id": "bts:hgnc", - "@type": "rdf:Property", - "rdfs:comment": "HGNC ID for gene", - "rdfs:label": "hgnc", - "schema:domainIncludes": { - "@id": "bts:Gene" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "schema:Integer" - } - }, - { - "@id": "bts:entrez", - "@type": "rdf:Property", - "rdfs:comment": "Entrez ID for gene", - "rdfs:label": "entrez", - "schema:domainIncludes": { - "@id": "bts:Gene" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "schema:Integer" - } - }, - { - "@id": "bts:refseq", - "@type": "rdf:Property", - "rdfs:comment": "Refseq ID for gene, protein or transcript", - "rdfs:label": "refseq", - "schema:domainIncludes": [ - { - "@id": "bts:Transcript" - } - ], - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "schema:Text" - } - }, - { - "@id": "bts:omim", - "@type": "rdf:Property", - "rdfs:comment": "Refseq ID for gene, protein or transcript", - "rdfs:label": "omim", - "schema:domainIncludes": [ - { - "@id": "bts:Disease" - } - ], - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "schema:Integer" - } - }, - { - "@id": "bts:umls", - "@type": "rdf:Property", - "rdfs:comment": "Refseq ID for gene, protein or transcript", - "rdfs:label": "umls", - "schema:domainIncludes": { - "@id": "bts:Disease" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "schema:Text" - } - }, - { - "@id": "bts:homologousTo", - "@type": "rdf:Property", - "rdfs:comment": "Shared ancestry between protein or gene", - "rdfs:label": "homologousTo", - "schema:domainIncludes": { - "@id": "bts:GeneOrGeneProduct" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:GeneOrGeneProduct" - } - }, - { - "@id": "bts:molecularlyInteractsWith", - "@type": "rdf:Property", - "rdfs:comment": null, - "rdfs:label": "molecularlyInteractsWith", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:geneticallyInteractsWith", - "@type": "rdf:Property", - "rdfs:comment": "holds between two genes whose phenotypic effects are dependent on each other in some way - such that their combined phenotypic effects are the result of some interaction between the activity of their gene products. Examples include epistasis and synthetic lethality.", - "rdfs:label": "geneticallyInteractsWith", - "schema:domainIncludes": { - "@id": "bts:Gene" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:Gene" - } - }, - { - "@id": "bts:affectsAbundanceOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one changes the amount of the other within a system of interest", - "rdfs:label": "affectsAbundanceOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:increasesAbundanceOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one increases the amount of the other within a system of interest", - "rdfs:label": "increasesAbundanceOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:decreasesAbundanceOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one decreases the amount of the other within a system of interest", - "rdfs:label": "decreasesAbundanceOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:affectsActivityOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one changes the activity of the other within a system of interest", - "rdfs:label": "affectsActivityOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:increasesActivityOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one increases the activity of the other within a system of interest", - "rdfs:label": "increasesActivityOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:decreasesActivityOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one decreases the activity of the other within a system of interest", - "rdfs:label": "decreasesActivityOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:affectsExpressionOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one changes the level of expression of the other within a system of interest", - "rdfs:label": "affectsExpressionOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:GenomicEntity" - } - }, - { - "@id": "bts:increasesExpressionOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one increases the level of expression of the other within a system of interest", - "rdfs:label": "increasesExpressionOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:GenomicEntity" - } - }, - { - "@id": "bts:decreasesExpressionOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one decreases the level of expression of the other within a system of interest", - "rdfs:label": "decreasesExpressionOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:GenomicEntity" - } - }, - { - "@id": "bts:affectsFoldingOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one changes the rate or quality of folding of the other ", - "rdfs:label": "affectsFoldingOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:increasesFoldingOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one increases the rate or quality of folding of the other ", - "rdfs:label": "increasesFoldingOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:decreasesFoldingOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one decreases the rate or quality of folding of the other ", - "rdfs:label": "decreasesFoldingOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:affectsLocalizationOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one changes the localization of the other within a system of interest", - "rdfs:label": "affectsLocalizationOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:increasesLocalizationOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one increases the proper localization of the other within a system of interest", - "rdfs:label": "increasesLocalizationOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:decreasesLocalizationOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one decreases the proper localization of the other within a system of interest", - "rdfs:label": "decreasesLocalizationOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:affectsMetabolicProcessingOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one impacts the metabolic processing of the other within a system of interest", - "rdfs:label": "affectsMetabolicProcessingOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:increasesMetabolicProcessingOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one increases the rate of metabolic processing of the other within a system of interest", - "rdfs:label": "increasesMetabolicProcessingOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:decreasesMetabolicProcessingOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one decreases the rate of metabolic processing of the other within a system of interest", - "rdfs:label": "decreasesMetabolicProcessingOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:affectsMolecularModificationOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one leads changes in the molecular modification(s) of the other (e.g. via post-translational modifications of proteins such as the addition of phosphoryl group, or via redox reaction that adds or subtracts electrons)", - "rdfs:label": "affectsMolecularModificationOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:increasesMolecularModificationOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one leads to increased molecular modification(s) of the other (e.g. via post-translational modifications of proteins such as the addition of phosphoryl group, or via redox reaction that adds or subtracts electrons)", - "rdfs:label": "increasesMolecularModificationOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:decreasesMolecularModificationOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one leads to decreased molecular modification(s) of the other (e.g. via post-translational modifications of proteins such as the addition of phosphoryl group, or via redox reaction that adds or subtracts electrons)", - "rdfs:label": "decreasesMolecularModificationOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:affectsSynthesisOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one impacts the rate of chemical synthesis of the other", - "rdfs:label": "affectsSynthesisOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:increasesSynthesisOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one increases the rate of chemical synthesis of the other", - "rdfs:label": "increasesSynthesisOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:decreasesSynthesisOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one decreases the rate of chemical synthesis of the other", - "rdfs:label": "decreasesSynthesisOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:affectsDegradationOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one impacts the rate of degradation of the other within a system of interest", - "rdfs:label": "affectsDegradationOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:increasesDegradationOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one increases the rate of degradation of the other within a system of interest", - "rdfs:label": "increasesDegradationOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:decreasesDegradationOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one decreases the rate of degradation of the other within a system of interest", - "rdfs:label": "decreasesDegradationOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:affectsMutationRateOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between a molecular entity and a genomic entity where the action or effect of the molecular entity impacts the rate of mutation of the genomic entity within a system of interest", - "rdfs:label": "affectsMutationRateOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:GenomicEntity" - } - }, - { - "@id": "bts:increasesMutationRateOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between a molecular entity and a genomic entity where the action or effect of the molecular entity increases the rate of mutation of the genomic entity within a system of interest", - "rdfs:label": "increasesMutationRateOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:GenomicEntity" - } - }, - { - "@id": "bts:decreasesMutationRateOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between a molecular entity and a genomic entity where the action or effect of the molecular entity decreases the rate of mutation of the genomic entity within a system of interest", - "rdfs:label": "decreasesMutationRateOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:GenomicEntity" - } - }, - { - "@id": "bts:affectsResponseTo", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one impacts the susceptibility of a biological entity or system (e.g. an organism, cell, cellular component, macromolecular machine, biological or pathological process) to the other", - "rdfs:label": "affectsResponseTo", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:increasesResponseTo", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one increases the susceptibility of a biological entity or system (e.g. an organism, cell, cellular component, macromolecular machine, biological or pathological process) to the other", - "rdfs:label": "increasesResponseTo", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:decreasesResponseTo", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one decreases the susceptibility of a biological entity or system (e.g. an organism, cell, cellular component, macromolecular machine, biological or pathological process) to the other", - "rdfs:label": "decreasesResponseTo", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:affectsSplicingOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between a molecular entity and an mRNA where the action or effect of the molecular entity impacts the splicing of the mRNA", - "rdfs:label": "affectsSplicingOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:Transcript" - } - }, - { - "@id": "bts:increasesSplicingOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between a molecular entity and an mRNA where the action or effect of the molecular entity increases the proper splicing of the mRNA", - "rdfs:label": "increasesSplicingOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:Transcript" - } - }, - { - "@id": "bts:decreasesSplicingOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between a molecular entity and an mRNA where the action or effect of the molecular entity decreases the proper splicing of the mRNA", - "rdfs:label": "decreasesSplicingOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:Transcript" - } - }, - { - "@id": "bts:affectsStabilityOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one impacts the stability of the other within a system of interest", - "rdfs:label": "affectsStabilityOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:increasesStabilityOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one increases the stability of the other within a system of interest", - "rdfs:label": "increasesStabilityOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:decreasesStabilityOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one decreases the stability of the other within a system of interest", - "rdfs:label": "decreasesStabilityOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:affectsTransportOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one impacts the rate of transport of the other across some boundary in a system of interest", - "rdfs:label": "affectsTransportOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:increasesTransportOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one increases the rate of transport of the other across some boundary in a system of interest", - "rdfs:label": "increasesTransportOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:decreasesTransportOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one decreases the rate of transport of the other across some boundary in a system of interest", - "rdfs:label": "decreasesTransportOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:affectsSecretionOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one impacts the rate of secretion of the other out of a cell, gland, or organ", - "rdfs:label": "affectsSecretionOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:increasesSecretionOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one increases the rate of secretion of the other out of a cell, gland, or organ", - "rdfs:label": "increasesSecretionOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:decreasesSecretionOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one decreases the rate of secretion of the other out of a cell, gland, or organ", - "rdfs:label": "decreasesSecretionOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:affectsUptakeOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one impacts the rate of uptake of the other into of a cell, gland, or organ", - "rdfs:label": "affectsUptakeOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:increasesUptakeOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one increases the rate of uptake of the other into of a cell, gland, or organ", - "rdfs:label": "increasesUptakeOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:decreasesUptakeOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one decreases the rate of uptake of the other into of a cell, gland, or organ", - "rdfs:label": "decreasesUptakeOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:regulates,ProcessToProcess", - "@type": "rdf:Property", - "rdfs:comment": null, - "rdfs:label": "regulates,ProcessToProcess", - "schema:domainIncludes": { - "@id": "bts:Occurrent" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:Occurrent" - } - }, - { - "@id": "bts:regulates,EntityToEntity", - "@type": "rdf:Property", - "rdfs:comment": null, - "rdfs:label": "regulates,EntityToEntity", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:hasGeneProduct", - "@type": "rdf:Property", - "rdfs:comment": "holds between a gene and a transcribed and/or translated product generated from it", - "rdfs:label": "hasGeneProduct", - "schema:domainIncludes": { - "@id": "bts:Gene" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:GeneProduct" - } - }, - { - "@id": "bts:inPathwayWith", - "@type": "rdf:Property", - "rdfs:comment": "holds between two genes or gene products that are part of in the same biological pathway", - "rdfs:label": "inPathwayWith", - "schema:domainIncludes": { - "@id": "bts:GeneOrGeneProduct" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:GeneOrGeneProduct" - } - }, - { - "@id": "bts:inComplexWith", - "@type": "rdf:Property", - "rdfs:comment": "holds between two genes or gene products that are part of (or code for products that are part of) in the same macromolecular complex", - "rdfs:label": "inComplexWith", - "schema:domainIncludes": { - "@id": "bts:GeneOrGeneProduct" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:GeneOrGeneProduct" - } - }, - { - "@id": "bts:inCellPopulationWith", - "@type": "rdf:Property", - "rdfs:comment": "holds between two genes or gene products that are expressed in the same cell type or population ", - "rdfs:label": "inCellPopulationWith", - "schema:domainIncludes": { - "@id": "bts:GeneOrGeneProduct" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:GeneOrGeneProduct" - } - }, - { - "@id": "bts:geneAssociatedWithCondition", - "@type": "rdf:Property", - "rdfs:comment": "holds between a gene and a disease or phenotypic feature that the gene or its alleles/products may influence, contribute to, or correlate with", - "rdfs:label": "geneAssociatedWithCondition", - "schema:domainIncludes": { - "@id": "bts:Gene" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:DiseaseOrPhenotypicFeature" - } - }, - { - "@id": "bts:treats", - "@type": "rdf:Property", - "rdfs:comment": "holds between a therapeutic procedure or chemical substance and a disease or phenotypic feature that it is used to treat", - "rdfs:label": "treats", - "schema:domainIncludes": { - "@id": "bts:Treatment" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:DiseaseOrPhenotypicFeature" - } - }, - { - "@id": "bts:correlatedWith", - "@type": "rdf:Property", - "rdfs:comment": "holds between a disease or phenotypic feature and a measurable molecular entity that is used as an indicator of the presence or state of the disease or feature.", - "rdfs:label": "correlatedWith", - "schema:domainIncludes": { - "@id": "bts:DiseaseOrPhenotypicFeature" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:hasBiomarker", - "@type": "rdf:Property", - "rdfs:comment": "holds between a disease or phenotypic feature and a measurable molecular entity that is used as an indicator of the presence or state of the disease or feature.", - "rdfs:label": "hasBiomarker", - "schema:domainIncludes": { - "@id": "bts:DiseaseOrPhenotypicFeature" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:biomarkerFor", - "@type": "rdf:Property", - "rdfs:comment": "holds between a measurable molecular entity and a disease or phenotypic feature, where the entity is used as an indicator of the presence or state of the disease or feature.", - "rdfs:label": "biomarkerFor", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:DiseaseOrPhenotypicFeature" - } - }, - { - "@id": "bts:expressedIn", - "@type": "rdf:Property", - "rdfs:comment": "holds between a gene or gene product and an anatomical entity in which it is expressed", - "rdfs:label": "expressedIn", - "schema:domainIncludes": { - "@id": "bts:GeneOrGeneProduct" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:AnatomicalEntity" - } - }, - { - "@id": "bts:expresses", - "@type": "rdf:Property", - "rdfs:comment": "holds between an anatomical entity and gene or gene product that is expressed there", - "rdfs:label": "expresses", - "schema:domainIncludes": { - "@id": "bts:AnatomicalEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:GeneOrGeneProduct" - } - }, - { - "@id": "bts:hasPhenotype", - "@type": "rdf:Property", - "rdfs:comment": "holds between a biological entity and a phenotype, where a phenotype is construed broadly as any kind of quality of an organism part, a collection of these qualities, or a change in quality or qualities (e.g. abnormally increased temperature). ", - "rdfs:label": "hasPhenotype", - "schema:domainIncludes": { - "@id": "bts:BiologicalEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:DiseaseOrPhenotypicFeature" - } - }, - { - "@id": "bts:precedes", - "@type": "rdf:Property", - "rdfs:comment": "holds between two processes, where one completes before the other begins", - "rdfs:label": "precedes", - "schema:domainIncludes": { - "@id": "bts:Occurrent" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:Occurrent" - } - }, - { - "@id": "bts:subclassOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two classes where the domain class is a specialization of the range class", - "rdfs:label": "subclassOf", - "schema:domainIncludes": { - "@id": "bts:OntologyClass" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:OntologyClass" - } - }, { "@id": "bts:Patient", "@type": "rdfs:Class", @@ -1986,7 +21,6 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "Patient", - "sms:required": "sms:false", "sms:requiresDependency": [ { "@id": "bts:PatientID" @@ -2047,50 +81,164 @@ "@id": "bts:Other" } ], - "sms:displayName": "Sex", - "sms:required": "sms:true", + "sms:displayName": "Sex", + "sms:required": "sms:true", + "sms:validationRules": [] + }, + { + "@id": "bts:YearofBirth", + "@type": "rdfs:Class", + "rdfs:comment": "TBD", + "rdfs:label": "YearofBirth", + "rdfs:subClassOf": [ + { + "@id": "bts:DataProperty" + } + ], + "schema:isPartOf": { + "@id": "http://schema.biothings.io" + }, + "sms:displayName": "Year of Birth", + "sms:validationRules": [] + }, + { + "@id": "bts:Diagnosis", + "@type": "rdfs:Class", + "rdfs:comment": "TBD", + "rdfs:label": "Diagnosis", + "rdfs:subClassOf": [ + { + "@id": "bts:DataProperty" + } + ], + "schema:isPartOf": { + "@id": "http://schema.biothings.io" + }, + "schema:rangeIncludes": [ + { + "@id": "bts:Healthy" + }, + { + "@id": "bts:Cancer" + } + ], + "sms:displayName": "Diagnosis", + "sms:required": "sms:true", + "sms:validationRules": [] + }, + { + "@id": "bts:Component", + "@type": "rdfs:Class", + "rdfs:comment": "TBD", + "rdfs:label": "Component", + "rdfs:subClassOf": [ + { + "@id": "bts:Thing" + } + ], + "schema:isPartOf": { + "@id": "http://schema.biothings.io" + }, + "sms:displayName": "Component", + "sms:validationRules": [] + }, + { + "@id": "bts:DataType", + "@type": "rdfs:Class", + "rdfs:comment": "TBD", + "rdfs:label": "DataType", + "rdfs:subClassOf": [ + { + "@id": "bts:Thing" + } + ], + "schema:isPartOf": { + "@id": "http://schema.biothings.io" + }, + "sms:displayName": "DataType", + "sms:validationRules": [] + }, + { + "@id": "bts:DataProperty", + "@type": "rdfs:Class", + "rdfs:comment": "TBD", + "rdfs:label": "DataProperty", + "rdfs:subClassOf": [ + { + "@id": "bts:Thing" + } + ], + "schema:isPartOf": { + "@id": "http://schema.biothings.io" + }, + "sms:displayName": "DataProperty", + "sms:validationRules": [] + }, + { + "@id": "bts:Female", + "@type": "rdfs:Class", + "rdfs:comment": "TBD", + "rdfs:label": "Female", + "rdfs:subClassOf": [ + { + "@id": "bts:Sex" + } + ], + "schema:isPartOf": { + "@id": "http://schema.biothings.io" + }, + "sms:displayName": "Female", "sms:validationRules": [] }, { - "@id": "bts:YearofBirth", + "@id": "bts:Male", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "YearofBirth", + "rdfs:label": "Male", "rdfs:subClassOf": [ { - "@id": "bts:DataProperty" + "@id": "bts:Sex" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Year of Birth", - "sms:required": "sms:false", + "sms:displayName": "Male", "sms:validationRules": [] }, { - "@id": "bts:Diagnosis", + "@id": "bts:Other", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "Diagnosis", + "rdfs:label": "Other", "rdfs:subClassOf": [ { - "@id": "bts:DataProperty" + "@id": "bts:Sex" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "schema:rangeIncludes": [ + "sms:displayName": "Other", + "sms:validationRules": [] + }, + { + "@id": "bts:Healthy", + "@type": "rdfs:Class", + "rdfs:comment": "TBD", + "rdfs:label": "Healthy", + "rdfs:subClassOf": [ { - "@id": "bts:Healthy" + "@id": "bts:Diagnosis" }, { - "@id": "bts:Cancer" + "@id": "bts:TissueStatus" } ], - "sms:displayName": "Diagnosis", - "sms:required": "sms:true", + "schema:isPartOf": { + "@id": "http://schema.biothings.io" + }, + "sms:displayName": "Healthy", "sms:validationRules": [] }, { @@ -2101,13 +249,15 @@ "rdfs:subClassOf": [ { "@id": "bts:ValidValue" + }, + { + "@id": "bts:Diagnosis" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, "sms:displayName": "Cancer", - "sms:required": "sms:false", "sms:requiresDependency": [ { "@id": "bts:CancerType" @@ -2189,289 +339,121 @@ ] }, { - "@id": "bts:Biospecimen", + "@id": "bts:ValidValue", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "Biospecimen", + "rdfs:label": "ValidValue", "rdfs:subClassOf": [ { - "@id": "bts:DataType" + "@id": "bts:Thing" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Biospecimen", - "sms:required": "sms:false", - "sms:requiresComponent": [ - { - "@id": "bts:Patient" - } - ], - "sms:requiresDependency": [ - { - "@id": "bts:SampleID" - }, - { - "@id": "bts:PatientID" - }, - { - "@id": "bts:TissueStatus" - }, - { - "@id": "bts:Component" - } - ], + "sms:displayName": "ValidValue", "sms:validationRules": [] }, { - "@id": "bts:SampleID", + "@id": "bts:Breast", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "SampleID", + "rdfs:label": "Breast", "rdfs:subClassOf": [ { - "@id": "bts:DataProperty" - } - ], - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "sms:displayName": "Sample ID", - "sms:required": "sms:true", - "sms:validationRules": [] - }, - { - "@id": "bts:TissueStatus", - "@type": "rdfs:Class", - "rdfs:comment": "TBD", - "rdfs:label": "TissueStatus", - "rdfs:subClassOf": [ + "@id": "bts:CancerType" + }, { - "@id": "bts:DataProperty" + "@id": "bts:FamilyHistory" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "schema:rangeIncludes": [ - { - "@id": "bts:Healthy" - }, - { - "@id": "bts:Malignant" - } - ], - "sms:displayName": "Tissue Status", - "sms:required": "sms:true", + "sms:displayName": "Breast", "sms:validationRules": [] }, { - "@id": "bts:BulkRNA-seqAssay", + "@id": "bts:Colorectal", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "BulkRNA-seqAssay", + "rdfs:label": "Colorectal", "rdfs:subClassOf": [ { - "@id": "bts:DataType" - } - ], - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "sms:displayName": "Bulk RNA-seq Assay", - "sms:required": "sms:false", - "sms:requiresComponent": [ - { - "@id": "bts:Biospecimen" - } - ], - "sms:requiresDependency": [ - { - "@id": "bts:Filename" - }, - { - "@id": "bts:SampleID" - }, - { - "@id": "bts:FileFormat" + "@id": "bts:CancerType" }, { - "@id": "bts:Component" - } - ], - "sms:validationRules": [] - }, - { - "@id": "bts:Filename", - "@type": "rdfs:Class", - "rdfs:comment": "TBD", - "rdfs:label": "Filename", - "rdfs:subClassOf": [ - { - "@id": "bts:DataProperty" + "@id": "bts:FamilyHistory" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Filename", - "sms:required": "sms:true", + "sms:displayName": "Colorectal", "sms:validationRules": [] }, { - "@id": "bts:FileFormat", + "@id": "bts:Lung", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "FileFormat", + "rdfs:label": "Lung", "rdfs:subClassOf": [ { - "@id": "bts:DataProperty" - } - ], - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": [ - { - "@id": "bts:FASTQ" - }, - { - "@id": "bts:BAM" - }, - { - "@id": "bts:CRAM" + "@id": "bts:CancerType" }, { - "@id": "bts:CSV/TSV" - } - ], - "sms:displayName": "File Format", - "sms:required": "sms:true", - "sms:validationRules": [] - }, - { - "@id": "bts:BAM", - "@type": "rdfs:Class", - "rdfs:comment": "TBD", - "rdfs:label": "BAM", - "rdfs:subClassOf": [ - { - "@id": "bts:ValidValue" + "@id": "bts:FamilyHistory" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "BAM", - "sms:required": "sms:false", - "sms:requiresDependency": [ - { - "@id": "bts:GenomeBuild" - } - ], + "sms:displayName": "Lung", "sms:validationRules": [] }, { - "@id": "bts:CRAM", + "@id": "bts:Prostate", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "CRAM", + "rdfs:label": "Prostate", "rdfs:subClassOf": [ { - "@id": "bts:ValidValue" - } - ], - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "sms:displayName": "CRAM", - "sms:required": "sms:false", - "sms:requiresDependency": [ - { - "@id": "bts:GenomeBuild" + "@id": "bts:CancerType" }, { - "@id": "bts:GenomeFASTA" - } - ], - "sms:validationRules": [] - }, - { - "@id": "bts:CSV/TSV", - "@type": "rdfs:Class", - "rdfs:comment": "TBD", - "rdfs:label": "CSV/TSV", - "rdfs:subClassOf": [ - { - "@id": "bts:ValidValue" - } - ], - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "sms:displayName": "CSV/TSV", - "sms:required": "sms:false", - "sms:requiresDependency": [ - { - "@id": "bts:GenomeBuild" - } - ], - "sms:validationRules": [] - }, - { - "@id": "bts:GenomeBuild", - "@type": "rdfs:Class", - "rdfs:comment": "TBD", - "rdfs:label": "GenomeBuild", - "rdfs:subClassOf": [ - { - "@id": "bts:DataProperty" + "@id": "bts:FamilyHistory" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "schema:rangeIncludes": [ - { - "@id": "bts:GRCh37" - }, - { - "@id": "bts:GRCh38" - }, - { - "@id": "bts:GRCm38" - }, - { - "@id": "bts:GRCm39" - } - ], - "sms:displayName": "Genome Build", - "sms:required": "sms:true", + "sms:displayName": "Prostate", "sms:validationRules": [] }, { - "@id": "bts:GenomeFASTA", + "@id": "bts:Skin", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "GenomeFASTA", + "rdfs:label": "Skin", "rdfs:subClassOf": [ { - "@id": "bts:DataProperty" + "@id": "bts:CancerType" + }, + { + "@id": "bts:FamilyHistory" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Genome FASTA", - "sms:required": "sms:true", + "sms:displayName": "Skin", "sms:validationRules": [] }, { - "@id": "bts:MockComponent", + "@id": "bts:Biospecimen", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "MockComponent", + "rdfs:label": "Biospecimen", "rdfs:subClassOf": [ { "@id": "bts:DataType" @@ -2480,80 +462,50 @@ "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "MockComponent", - "sms:required": "sms:false", - "sms:requiresDependency": [ - { - "@id": "bts:Component" - }, - { - "@id": "bts:CheckList" - }, - { - "@id": "bts:CheckRegexList" - }, - { - "@id": "bts:CheckRegexSingle" - }, - { - "@id": "bts:CheckRegexFormat" - }, - { - "@id": "bts:CheckRegexInteger" - }, - { - "@id": "bts:CheckNum" - }, - { - "@id": "bts:CheckFloat" - }, - { - "@id": "bts:CheckInt" - }, - { - "@id": "bts:CheckString" - }, - { - "@id": "bts:CheckURL" - }, - { - "@id": "bts:CheckMatchatLeast" - }, - { - "@id": "bts:CheckMatchatLeastvalues" - }, - { - "@id": "bts:CheckMatchExactly" - }, - { - "@id": "bts:CheckMatchExactlyvalues" - }, + "sms:displayName": "Biospecimen", + "sms:requiresComponent": [ { - "@id": "bts:CheckRecommended" - }, + "@id": "bts:Patient" + } + ], + "sms:requiresDependency": [ { - "@id": "bts:CheckAges" + "@id": "bts:SampleID" }, { - "@id": "bts:CheckUnique" + "@id": "bts:PatientID" }, { - "@id": "bts:CheckRange" + "@id": "bts:TissueStatus" }, { - "@id": "bts:CheckDate" - }, + "@id": "bts:Component" + } + ], + "sms:validationRules": [] + }, + { + "@id": "bts:SampleID", + "@type": "rdfs:Class", + "rdfs:comment": "TBD", + "rdfs:label": "SampleID", + "rdfs:subClassOf": [ { - "@id": "bts:CheckNA" + "@id": "bts:DataProperty" } ], + "schema:isPartOf": { + "@id": "http://schema.biothings.io" + }, + "sms:displayName": "Sample ID", + "sms:required": "sms:true", "sms:validationRules": [] }, { - "@id": "bts:CheckList", + "@id": "bts:TissueStatus", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "CheckList", + "rdfs:label": "TissueStatus", "rdfs:subClassOf": [ { "@id": "bts:DataProperty" @@ -2564,68 +516,72 @@ }, "schema:rangeIncludes": [ { - "@id": "bts:Ab" - }, - { - "@id": "bts:Cd" - }, - { - "@id": "bts:Ef" + "@id": "bts:Healthy" }, { - "@id": "bts:Gh" + "@id": "bts:Malignant" } ], - "sms:displayName": "Check List", + "sms:displayName": "Tissue Status", "sms:required": "sms:true", - "sms:validationRules": [ - "list strict" - ] + "sms:validationRules": [] }, { - "@id": "bts:CheckRegexList", + "@id": "bts:Malignant", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "CheckRegexList", + "rdfs:label": "Malignant", "rdfs:subClassOf": [ { - "@id": "bts:DataProperty" + "@id": "bts:TissueStatus" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Check Regex List", - "sms:required": "sms:true", - "sms:validationRules": [ - "list strict", - "regex match [a-f]" - ] + "sms:displayName": "Malignant", + "sms:validationRules": [] }, { - "@id": "bts:CheckRegexSingle", + "@id": "bts:BulkRNA-seqAssay", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "CheckRegexSingle", + "rdfs:label": "BulkRNA-seqAssay", "rdfs:subClassOf": [ { - "@id": "bts:DataProperty" + "@id": "bts:DataType" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Check Regex Single", - "sms:required": "sms:true", - "sms:validationRules": [ - "regex search [a-f]" - ] + "sms:displayName": "Bulk RNA-seq Assay", + "sms:requiresComponent": [ + { + "@id": "bts:Biospecimen" + } + ], + "sms:requiresDependency": [ + { + "@id": "bts:Filename" + }, + { + "@id": "bts:SampleID" + }, + { + "@id": "bts:FileFormat" + }, + { + "@id": "bts:Component" + } + ], + "sms:validationRules": [] }, { - "@id": "bts:CheckRegexFormat", + "@id": "bts:Filename", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "CheckRegexFormat", + "rdfs:label": "Filename", "rdfs:subClassOf": [ { "@id": "bts:DataProperty" @@ -2634,17 +590,15 @@ "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Check Regex Format", + "sms:displayName": "Filename", "sms:required": "sms:true", - "sms:validationRules": [ - "regex match [a-f]" - ] + "sms:validationRules": [] }, { - "@id": "bts:CheckRegexInteger", + "@id": "bts:FileFormat", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "CheckRegexInteger", + "rdfs:label": "FileFormat", "rdfs:subClassOf": [ { "@id": "bts:DataProperty" @@ -2653,93 +607,120 @@ "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Check Regex Integer", + "schema:rangeIncludes": [ + { + "@id": "bts:FASTQ" + }, + { + "@id": "bts:BAM" + }, + { + "@id": "bts:CRAM" + }, + { + "@id": "bts:CSV/TSV" + } + ], + "sms:displayName": "File Format", "sms:required": "sms:true", - "sms:validationRules": [ - "regex search ^\\d+$" - ] + "sms:validationRules": [] }, { - "@id": "bts:CheckNum", + "@id": "bts:FASTQ", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "CheckNum", + "rdfs:label": "FASTQ", "rdfs:subClassOf": [ { - "@id": "bts:DataProperty" + "@id": "bts:FileFormat" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Check Num", - "sms:required": "sms:true", - "sms:validationRules": [ - "num" - ] + "sms:displayName": "FASTQ", + "sms:validationRules": [] }, { - "@id": "bts:CheckFloat", + "@id": "bts:BAM", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "CheckFloat", + "rdfs:label": "BAM", "rdfs:subClassOf": [ { - "@id": "bts:DataProperty" + "@id": "bts:ValidValue" + }, + { + "@id": "bts:FileFormat" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Check Float", - "sms:required": "sms:true", - "sms:validationRules": [ - "float" - ] + "sms:displayName": "BAM", + "sms:requiresDependency": [ + { + "@id": "bts:GenomeBuild" + } + ], + "sms:validationRules": [] }, { - "@id": "bts:CheckInt", + "@id": "bts:CRAM", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "CheckInt", + "rdfs:label": "CRAM", "rdfs:subClassOf": [ { - "@id": "bts:DataProperty" + "@id": "bts:ValidValue" + }, + { + "@id": "bts:FileFormat" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Check Int", - "sms:required": "sms:true", - "sms:validationRules": [ - "int" - ] + "sms:displayName": "CRAM", + "sms:requiresDependency": [ + { + "@id": "bts:GenomeBuild" + }, + { + "@id": "bts:GenomeFASTA" + } + ], + "sms:validationRules": [] }, { - "@id": "bts:CheckString", + "@id": "bts:CSV/TSV", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "CheckString", + "rdfs:label": "CSV/TSV", "rdfs:subClassOf": [ { - "@id": "bts:DataProperty" + "@id": "bts:ValidValue" + }, + { + "@id": "bts:FileFormat" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Check String", - "sms:required": "sms:true", - "sms:validationRules": [ - "str" - ] + "sms:displayName": "CSV/TSV", + "sms:requiresDependency": [ + { + "@id": "bts:GenomeBuild" + } + ], + "sms:validationRules": [] }, { - "@id": "bts:CheckURL", + "@id": "bts:GenomeBuild", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "CheckURL", + "rdfs:label": "GenomeBuild", "rdfs:subClassOf": [ { "@id": "bts:DataProperty" @@ -2748,17 +729,29 @@ "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Check URL", + "schema:rangeIncludes": [ + { + "@id": "bts:GRCh37" + }, + { + "@id": "bts:GRCh38" + }, + { + "@id": "bts:GRCm38" + }, + { + "@id": "bts:GRCm39" + } + ], + "sms:displayName": "Genome Build", "sms:required": "sms:true", - "sms:validationRules": [ - "url" - ] + "sms:validationRules": [] }, { - "@id": "bts:CheckMatchatLeast", + "@id": "bts:GenomeFASTA", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "CheckMatchatLeast", + "rdfs:label": "GenomeFASTA", "rdfs:subClassOf": [ { "@id": "bts:DataProperty" @@ -2767,112 +760,160 @@ "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Check Match at Least", + "sms:displayName": "Genome FASTA", "sms:required": "sms:true", - "sms:validationRules": [ - "matchAtLeastOne Patient.PatientID set" - ] + "sms:validationRules": [] }, { - "@id": "bts:CheckMatchExactly", + "@id": "bts:GRCh37", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "CheckMatchExactly", + "rdfs:label": "GRCh37", "rdfs:subClassOf": [ { - "@id": "bts:DataProperty" + "@id": "bts:GenomeBuild" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Check Match Exactly", - "sms:required": "sms:true", - "sms:validationRules": [ - "matchExactlyOne MockComponent.checkMatchExactly set" - ] + "sms:displayName": "GRCh37", + "sms:validationRules": [] }, { - "@id": "bts:CheckMatchatLeastvalues", + "@id": "bts:GRCh38", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "CheckMatchatLeastvalues", + "rdfs:label": "GRCh38", "rdfs:subClassOf": [ { - "@id": "bts:DataProperty" + "@id": "bts:GenomeBuild" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Check Match at Least values", - "sms:required": "sms:true", - "sms:validationRules": [ - "matchAtLeastOne MockComponent.checkMatchatLeastvalues value" - ] + "sms:displayName": "GRCh38", + "sms:validationRules": [] }, { - "@id": "bts:CheckMatchExactlyvalues", + "@id": "bts:GRCm38", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "CheckMatchExactlyvalues", + "rdfs:label": "GRCm38", "rdfs:subClassOf": [ { - "@id": "bts:DataProperty" + "@id": "bts:GenomeBuild" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Check Match Exactly values", - "sms:required": "sms:true", - "sms:validationRules": [ - "matchExactlyOne MockComponent.checkMatchExactlyvalues value" - ] + "sms:displayName": "GRCm38", + "sms:validationRules": [] }, { - "@id": "bts:CheckRecommended", + "@id": "bts:GRCm39", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "CheckRecommended", + "rdfs:label": "GRCm39", "rdfs:subClassOf": [ { - "@id": "bts:DataProperty" + "@id": "bts:GenomeBuild" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Check Recommended", - "sms:required": "sms:false", - "sms:validationRules": [ - "recommended" - ] + "sms:displayName": "GRCm39", + "sms:validationRules": [] }, { - "@id": "bts:CheckAges", + "@id": "bts:MockComponent", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "CheckAges", + "rdfs:label": "MockComponent", "rdfs:subClassOf": [ { - "@id": "bts:DataProperty" + "@id": "bts:DataType" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Check Ages", - "sms:required": "sms:true", - "sms:validationRules": [ - "protectAges" - ] + "sms:displayName": "MockComponent", + "sms:requiresDependency": [ + { + "@id": "bts:Component" + }, + { + "@id": "bts:CheckList" + }, + { + "@id": "bts:CheckRegexList" + }, + { + "@id": "bts:CheckRegexSingle" + }, + { + "@id": "bts:CheckRegexFormat" + }, + { + "@id": "bts:CheckRegexInteger" + }, + { + "@id": "bts:CheckNum" + }, + { + "@id": "bts:CheckFloat" + }, + { + "@id": "bts:CheckInt" + }, + { + "@id": "bts:CheckString" + }, + { + "@id": "bts:CheckURL" + }, + { + "@id": "bts:CheckMatchatLeast" + }, + { + "@id": "bts:CheckMatchatLeastvalues" + }, + { + "@id": "bts:CheckMatchExactly" + }, + { + "@id": "bts:CheckMatchExactlyvalues" + }, + { + "@id": "bts:CheckRecommended" + }, + { + "@id": "bts:CheckAges" + }, + { + "@id": "bts:CheckUnique" + }, + { + "@id": "bts:CheckRange" + }, + { + "@id": "bts:CheckDate" + }, + { + "@id": "bts:CheckNA" + } + ], + "sms:validationRules": [] }, { - "@id": "bts:CheckUnique", + "@id": "bts:CheckList", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "CheckUnique", + "rdfs:label": "CheckList", "rdfs:subClassOf": [ { "@id": "bts:DataProperty" @@ -2881,17 +922,31 @@ "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Check Unique", + "schema:rangeIncludes": [ + { + "@id": "bts:Ab" + }, + { + "@id": "bts:Cd" + }, + { + "@id": "bts:Ef" + }, + { + "@id": "bts:Gh" + } + ], + "sms:displayName": "Check List", "sms:required": "sms:true", "sms:validationRules": [ - "unique error" + "list strict" ] }, { - "@id": "bts:CheckRange", + "@id": "bts:CheckRegexList", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "CheckRange", + "rdfs:label": "CheckRegexList", "rdfs:subClassOf": [ { "@id": "bts:DataProperty" @@ -2900,17 +955,18 @@ "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Check Range", + "sms:displayName": "Check Regex List", "sms:required": "sms:true", "sms:validationRules": [ - "inRange 50 100 error" + "list strict", + "regex match [a-f]" ] }, { - "@id": "bts:CheckDate", + "@id": "bts:CheckRegexSingle", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "CheckDate", + "rdfs:label": "CheckRegexSingle", "rdfs:subClassOf": [ { "@id": "bts:DataProperty" @@ -2919,17 +975,17 @@ "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Check Date", + "sms:displayName": "Check Regex Single", "sms:required": "sms:true", "sms:validationRules": [ - "date" + "regex search [a-f]" ] }, { - "@id": "bts:CheckNA", + "@id": "bts:CheckRegexFormat", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "CheckNA", + "rdfs:label": "CheckRegexFormat", "rdfs:subClassOf": [ { "@id": "bts:DataProperty" @@ -2938,46 +994,36 @@ "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Check NA", + "sms:displayName": "Check Regex Format", "sms:required": "sms:true", "sms:validationRules": [ - "int", - "IsNA" + "regex match [a-f]" ] }, { - "@id": "bts:MockRDB", + "@id": "bts:CheckRegexInteger", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "MockRDB", + "rdfs:label": "CheckRegexInteger", "rdfs:subClassOf": [ { - "@id": "bts:DataType" + "@id": "bts:DataProperty" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "MockRDB", - "sms:required": "sms:false", - "sms:requiresDependency": [ - { - "@id": "bts:Component" - }, - { - "@id": "bts:MockRDBId" - }, - { - "@id": "bts:SourceManifest" - } - ], - "sms:validationRules": [] + "sms:displayName": "Check Regex Integer", + "sms:required": "sms:true", + "sms:validationRules": [ + "regex search ^\\d+$" + ] }, { - "@id": "bts:MockRDBId", + "@id": "bts:CheckNum", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "MockRDBId", + "rdfs:label": "CheckNum", "rdfs:subClassOf": [ { "@id": "bts:DataProperty" @@ -2986,17 +1032,17 @@ "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "MockRDB_id", + "sms:displayName": "Check Num", "sms:required": "sms:true", "sms:validationRules": [ - "int" + "num" ] }, { - "@id": "bts:SourceManifest", + "@id": "bts:CheckFloat", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "SourceManifest", + "rdfs:label": "CheckFloat", "rdfs:subClassOf": [ { "@id": "bts:DataProperty" @@ -3005,287 +1051,312 @@ "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "SourceManifest", + "sms:displayName": "Check Float", "sms:required": "sms:true", - "sms:validationRules": [] + "sms:validationRules": [ + "float" + ] }, { - "@id": "bts:Component", + "@id": "bts:CheckInt", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "Component", + "rdfs:label": "CheckInt", "rdfs:subClassOf": [ { - "@id": "bts:Patient" + "@id": "bts:DataProperty" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Component", - "sms:required": "sms:false", - "sms:validationRules": [] + "sms:displayName": "Check Int", + "sms:required": "sms:true", + "sms:validationRules": [ + "int" + ] }, { - "@id": "bts:Female", + "@id": "bts:CheckString", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "Female", + "rdfs:label": "CheckString", "rdfs:subClassOf": [ { - "@id": "bts:Sex" + "@id": "bts:DataProperty" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Female", - "sms:required": "sms:false", - "sms:validationRules": [] + "sms:displayName": "Check String", + "sms:required": "sms:true", + "sms:validationRules": [ + "str" + ] }, { - "@id": "bts:Male", + "@id": "bts:CheckURL", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "Male", + "rdfs:label": "CheckURL", "rdfs:subClassOf": [ { - "@id": "bts:Sex" + "@id": "bts:DataProperty" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Male", - "sms:required": "sms:false", - "sms:validationRules": [] + "sms:displayName": "Check URL", + "sms:required": "sms:true", + "sms:validationRules": [ + "url" + ] }, { - "@id": "bts:Other", + "@id": "bts:CheckMatchatLeast", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "Other", + "rdfs:label": "CheckMatchatLeast", "rdfs:subClassOf": [ { - "@id": "bts:Sex" + "@id": "bts:DataProperty" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Other", - "sms:required": "sms:false", - "sms:validationRules": [] + "sms:displayName": "Check Match at Least", + "sms:required": "sms:true", + "sms:validationRules": [ + "matchAtLeastOne Patient.PatientID set" + ] }, { - "@id": "bts:Healthy", + "@id": "bts:CheckMatchatLeastvalues", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "Healthy", + "rdfs:label": "CheckMatchatLeastvalues", "rdfs:subClassOf": [ { - "@id": "bts:Diagnosis" + "@id": "bts:DataProperty" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Healthy", - "sms:required": "sms:false", - "sms:validationRules": [] + "sms:displayName": "Check Match at Least values", + "sms:required": "sms:true", + "sms:validationRules": [ + "matchAtLeastOne MockComponent.checkMatchatLeastvalues value" + ] }, { - "@id": "bts:Breast", + "@id": "bts:CheckMatchExactly", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "Breast", + "rdfs:label": "CheckMatchExactly", "rdfs:subClassOf": [ { - "@id": "bts:CancerType" + "@id": "bts:DataProperty" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Breast", - "sms:required": "sms:false", - "sms:validationRules": [] + "sms:displayName": "Check Match Exactly", + "sms:required": "sms:true", + "sms:validationRules": [ + "matchExactlyOne MockComponent.checkMatchExactly set" + ] }, { - "@id": "bts:Colorectal", + "@id": "bts:CheckMatchExactlyvalues", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "Colorectal", + "rdfs:label": "CheckMatchExactlyvalues", "rdfs:subClassOf": [ { - "@id": "bts:CancerType" + "@id": "bts:DataProperty" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Colorectal", - "sms:required": "sms:false", - "sms:validationRules": [] + "sms:displayName": "Check Match Exactly values", + "sms:required": "sms:true", + "sms:validationRules": [ + "matchExactlyOne MockComponent.checkMatchExactlyvalues value" + ] }, { - "@id": "bts:Lung", + "@id": "bts:CheckRecommended", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "Lung", + "rdfs:label": "CheckRecommended", "rdfs:subClassOf": [ { - "@id": "bts:CancerType" + "@id": "bts:DataProperty" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Lung", - "sms:required": "sms:false", - "sms:validationRules": [] + "sms:displayName": "Check Recommended", + "sms:validationRules": [ + "recommended" + ] }, { - "@id": "bts:Prostate", + "@id": "bts:CheckAges", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "Prostate", + "rdfs:label": "CheckAges", "rdfs:subClassOf": [ { - "@id": "bts:CancerType" + "@id": "bts:DataProperty" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Prostate", - "sms:required": "sms:false", - "sms:validationRules": [] + "sms:displayName": "Check Ages", + "sms:required": "sms:true", + "sms:validationRules": [ + "protectAges" + ] }, { - "@id": "bts:Skin", + "@id": "bts:CheckUnique", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "Skin", + "rdfs:label": "CheckUnique", "rdfs:subClassOf": [ { - "@id": "bts:CancerType" + "@id": "bts:DataProperty" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Skin", - "sms:required": "sms:false", - "sms:validationRules": [] + "sms:displayName": "Check Unique", + "sms:required": "sms:true", + "sms:validationRules": [ + "unique error" + ] }, { - "@id": "bts:Malignant", + "@id": "bts:CheckRange", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "Malignant", + "rdfs:label": "CheckRange", "rdfs:subClassOf": [ { - "@id": "bts:TissueStatus" + "@id": "bts:DataProperty" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Malignant", - "sms:required": "sms:false", - "sms:validationRules": [] + "sms:displayName": "Check Range", + "sms:required": "sms:true", + "sms:validationRules": [ + "inRange 50 100 error" + ] }, { - "@id": "bts:FASTQ", + "@id": "bts:CheckDate", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "FASTQ", + "rdfs:label": "CheckDate", "rdfs:subClassOf": [ { - "@id": "bts:FileFormat" + "@id": "bts:DataProperty" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "FASTQ", - "sms:required": "sms:false", - "sms:validationRules": [] + "sms:displayName": "Check Date", + "sms:required": "sms:true", + "sms:validationRules": [ + "date" + ] }, { - "@id": "bts:GRCh37", + "@id": "bts:CheckNA", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "GRCh37", + "rdfs:label": "CheckNA", "rdfs:subClassOf": [ { - "@id": "bts:GenomeBuild" + "@id": "bts:DataProperty" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "GRCh37", - "sms:required": "sms:false", - "sms:validationRules": [] + "sms:displayName": "Check NA", + "sms:required": "sms:true", + "sms:validationRules": [ + "int", + "IsNA" + ] }, { - "@id": "bts:GRCh38", + "@id": "bts:Ab", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "GRCh38", + "rdfs:label": "Ab", "rdfs:subClassOf": [ { - "@id": "bts:GenomeBuild" + "@id": "bts:CheckList" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "GRCh38", - "sms:required": "sms:false", + "sms:displayName": "ab", "sms:validationRules": [] }, { - "@id": "bts:GRCm38", + "@id": "bts:Cd", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "GRCm38", + "rdfs:label": "Cd", "rdfs:subClassOf": [ { - "@id": "bts:GenomeBuild" + "@id": "bts:CheckList" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "GRCm38", - "sms:required": "sms:false", + "sms:displayName": "cd", "sms:validationRules": [] }, { - "@id": "bts:GRCm39", + "@id": "bts:Ef", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "GRCm39", + "rdfs:label": "Ef", "rdfs:subClassOf": [ { - "@id": "bts:GenomeBuild" + "@id": "bts:CheckList" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "GRCm39", - "sms:required": "sms:false", + "sms:displayName": "ef", "sms:validationRules": [] }, { - "@id": "bts:Ab", + "@id": "bts:Gh", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "Ab", + "rdfs:label": "Gh", "rdfs:subClassOf": [ { "@id": "bts:CheckList" @@ -3294,59 +1365,70 @@ "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "ab", - "sms:required": "sms:false", + "sms:displayName": "gh", "sms:validationRules": [] }, { - "@id": "bts:Cd", + "@id": "bts:MockRDB", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "Cd", + "rdfs:label": "MockRDB", "rdfs:subClassOf": [ { - "@id": "bts:CheckList" + "@id": "bts:DataType" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "cd", - "sms:required": "sms:false", + "sms:displayName": "MockRDB", + "sms:requiresDependency": [ + { + "@id": "bts:Component" + }, + { + "@id": "bts:MockRDBId" + }, + { + "@id": "bts:SourceManifest" + } + ], "sms:validationRules": [] }, { - "@id": "bts:Ef", + "@id": "bts:MockRDBId", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "Ef", + "rdfs:label": "MockRDBId", "rdfs:subClassOf": [ { - "@id": "bts:CheckList" + "@id": "bts:DataProperty" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "ef", - "sms:required": "sms:false", - "sms:validationRules": [] + "sms:displayName": "MockRDB_id", + "sms:required": "sms:true", + "sms:validationRules": [ + "int" + ] }, { - "@id": "bts:Gh", + "@id": "bts:SourceManifest", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "Gh", + "rdfs:label": "SourceManifest", "rdfs:subClassOf": [ { - "@id": "bts:CheckList" + "@id": "bts:DataProperty" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "gh", - "sms:required": "sms:false", + "sms:displayName": "SourceManifest", + "sms:required": "sms:true", "sms:validationRules": [] } ], From 89764d4e30c5073108cdc5aaec72960742989425 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Sun, 27 Aug 2023 15:59:02 -0700 Subject: [PATCH 044/239] remove old schemas files that are no longer used --- schematic/schemas/df_parser.py | 787 ------------------------ schematic/schemas/explorer.py | 1041 -------------------------------- schematic/schemas/generator.py | 707 ---------------------- 3 files changed, 2535 deletions(-) delete mode 100644 schematic/schemas/df_parser.py delete mode 100644 schematic/schemas/explorer.py delete mode 100644 schematic/schemas/generator.py diff --git a/schematic/schemas/df_parser.py b/schematic/schemas/df_parser.py deleted file mode 100644 index a2eaceb36..000000000 --- a/schematic/schemas/df_parser.py +++ /dev/null @@ -1,787 +0,0 @@ -import os -import string -import re -import io -import requests -import logging - -from typing import ( - Any, - Dict, - Optional, - Text, -) # allows specifying explicit variable types - -import pandas as pd -import numpy as np - -from schematic.schemas.explorer import SchemaExplorer -from schematic import LOADER - -from schematic.utils.validate_rules_utils import validate_schema_rules -from schematic.utils.df_utils import load_df - -logger = logging.getLogger(__name__) - - -""" -Utility for converting csv file containing a data model definition schema (see scRNA-seq.csv for an example) into schema.org schema. -""" - -# required headers for schema; may or may not abstract further; for now hardcode -required_headers = set( - [ - "Attribute", - "Description", - "Valid Values", - "DependsOn", - "Required", - "Parent", - "Properties", - "DependsOn Component", - "Source", - "Validation Rules", - ] -) - - -def get_class( - se: SchemaExplorer, - class_display_name: str, - description: str = None, - subclass_of: list = [], - requires_dependencies: list = None, - requires_range: list = None, - requires_components: list = None, - required: bool = None, - validation_rules: list = None, -) -> dict: - - """Constructs a new schema.org compliant class given a set of schema object attributes - - Args: - se: a schema explorer object allowing the traversal and modification of a schema graph - display_class_name: human readable label for the schema object/attribute: key characteristic X of the assay, related protocol, or downstream data that we want to record as metadata feature - description: definition or a reference containing the definition of attribute X. Preferably provide a source ontology link or code in addition to the definition. - subclass_of: *schema* label of this attribute/object's parent node in the schema - requires_dependencies: important characteristics, if any, of attribute X that need to be recorded as metadata features given attribute X is specified. These characteristics are attributes themselves and need to pre-exist in the schema as such - requires_range: a set/range of values that this attribute can be assigned to. this domain is stored in the rangeIncludes property of this object. - requires_components: a set of associated components/categories that this object/entity requires for its full specification; each component is a high level ontology class in which entities/objects are categorized/componentized and it is an entity on its own that needs to exist in the schema. - required: indicates if this attribute is required or optional in a schema - validation_rules: a list of validation rules defined for this class (e.g. defining what is a valid object of this class) - - Returns: a json schema.org object - """ - - class_name = se.get_class_label_from_display_name(class_display_name) - - # setup biothings object template with mandatory elements - class_attributes = { - "@id": "bts:" + class_name, - "@type": "rdfs:Class", - "rdfs:comment": description - if description and not pd.isnull(description) - else "TBD", - "rdfs:label": class_name, - "schema:isPartOf": {"@id": "http://schema.biothings.io"}, - } - - # determine parent class of element and add subclass relationship to schema - required by biothings - # if no subclass is provided, set a default to schema.org Thing - if subclass_of: - if len(subclass_of) == 1 and pd.isnull(subclass_of[0]): - parent = {"rdfs:subClassOf": [{"@id": "schema:Thing"}]} - else: - parent = { - "rdfs:subClassOf": [ - {"@id": "bts:" + se.get_class_label_from_display_name(sub)} - for sub in subclass_of - ] - } - else: - parent = {"rdfs:subClassOf": [{"@id": "schema:Thing"}]} - - class_attributes.update(parent) - - # add optional attribute specifying attributes/objects that are required for the specification of this object - # useful for specifying annotation requirements, for example - if requires_dependencies: - requirement = { - "sms:requiresDependency": [ - {"@id": "bts:" + dep} for dep in requires_dependencies - ] - } - class_attributes.update(requirement) - - # add optional attribute specifying the possible values this object can be set to; can be other objects, including primitives - if requires_range: - value_constraint = { - "schema:rangeIncludes": [ - {"@id": "bts:" + se.get_class_label_from_display_name(val)} - for val in requires_range - ] - } - class_attributes.update(value_constraint) - - # add optional attribute specifying validation patterns associated with this object (e.g. precise definition of the object range) - if validation_rules: - class_attributes.update({"sms:validationRules": validation_rules}) - else: - class_attributes.update({"sms:validationRules": []}) - - # add optional attribute specifying the required components (i.e. high level ontology class in which entities/objects are categorized/componentized) - # that are required for the specification of this object - if requires_components: - requirement = { - "sms:requiresComponent": [{"@id": "bts:" + c} for c in requires_components] - } - class_attributes.update(requirement) - - if required: - class_attributes.update({"sms:required": "sms:true"}) - else: - class_attributes.update({"sms:required": "sms:false"}) - - # ensure display name does not contain leading/trailing white spaces - class_attributes.update({"sms:displayName": class_display_name.strip()}) - - return class_attributes - - -def get_property( - se: SchemaExplorer, - property_display_name: str, - property_class_names: list, - description: str = None, - requires_range: list = None, - requires_dependencies: list = None, - required: bool = None, - validation_rules: str = None, -) -> dict: - - """Constructs a new schema.org compliant property of an existing schema.org object/class; note that the property itself is a schema.org object class. - - Args: - se: a schema explorer object allowing the traversal and modification of a schema graph - property_display_name: human readable label for the schema object/attribute: key characteristic X of the assay, related protocol, or downstream data that we want to record as metadata feature - property_class_name: *schema* label of the classes/objects that this is a property of - description: definition or a reference containing the definition of attribute X. Preferably provide a source ontology link or code in addition to the definition. - requires_range: what is the set/domain of values that this attribute can be assigned to; currently only used to specify primitive types. TODO: extend to reg exp patterns - requires_dependencies: important characteristics, if any, of property X that need to be recorded as metadata features given property X is specified. These characteristics are attributes themselves and need to pre-exist in the schema as such - validation_rules: a list of validation rules defined for this class (e.g. defining what is a valid object of this property) - - - Returns: a json schema.org property object - """ - property_name = se.get_property_label_from_display_name(property_display_name) - - property_attributes = { - "@id": "bts:" + property_name, - "@type": "rdf:Property", - "rdfs:comment": description - if description and not pd.isnull(description) - else "TBD", - "rdfs:label": property_name, - "sms:displayName": property_display_name, - "schema:isPartOf": {"@id": "http://schema.biothings.io"}, - } - - domain_includes = { - "schema:domainIncludes": [ - {"@id": "bts:" + se.get_class_label_from_display_name(val)} - for val in property_class_names - ] - } - property_attributes.update(domain_includes) - - if requires_range: - value_constraint = { - "schema:rangeIncludes": [ - {"@id": "bts:" + se.get_class_label_from_display_name(val)} - for val in requires_range - ] - } - property_attributes.update(value_constraint) - - if requires_dependencies: - requirement = { - "sms:requiresDependency": [ - {"@id": "bts:" + dep} for dep in requires_dependencies - ] - } - property_attributes.update(requirement) - - # add optional attribute specifying validation patterns associated with this object (e.g. precise definition of the object range) - if validation_rules: - property_attributes.update({"sms:validationRules": validation_rules}) - else: - property_attributes.update({"sms:validationRules": []}) - - if required: - property_attributes.update({"sms:required": "sms:true"}) - else: - property_attributes.update({"sms:required": "sms:false"}) - - #'http://schema.org/domainIncludes':{'@id': 'bts:' + property_class_name}, - #'http://schema.org/rangeIncludes':{'@id': 'schema:' + allowed_values}, - - # ensure display name does not contain leading/trailing white spaces - property_attributes.update({"sms:displayName": property_display_name.strip()}) - - return property_attributes - - -def attribute_exists(se: SchemaExplorer, attribute_label: str) -> bool: - - """Check if a given attribute exists already in schema - - Args: - se: a schema explorer object allowing the traversal and modification of a schema graph - attribute_label: a schema label for the attribute to check - - Returns: - True/False indicating if attribute exists or not - """ - schema_graph = se.get_nx_schema() - - if attribute_label in schema_graph.nodes: - return True - return False - - -def check_schema_definition(schema_definition: pd.DataFrame) -> bool: - - """Checks if a schema definition data frame contains the right required headers. - - See schema definition guide for more details - TODO: post and link schema definition guide - - Args: - schema_definition: a pandas dataframe containing schema definition; see example here: https://docs.google.com/spreadsheets/d/1J2brhqO4kpeHIkNytzlqrdIiRanXDr6KD2hqjOTC9hs/edit#gid=0 - Raises: Exception - """ - - if required_headers.issubset(set(list(schema_definition.columns))): - return - elif "Requires" in list(schema_definition.columns) or "Requires Component" in list( - schema_definition.columns - ): - raise ValueError( - "The input CSV schema file contains the 'Requires' and/or the 'Requires " - "Component' column headers. These columns were renamed to 'DependsOn' and " - "'DependsOn Component', respectively. Switch to the new column names." - ) - -def _prop_2_classes(properties: dict) -> dict: - - """Create a dictionary linking all properties to their classes. - Args: - properties (dict): attributes and their properties (if applicable) - Returns: - Dictionary linking properties to all the classes in their domain. - """ - prop_2_classes = {} - for record in properties: - if not pd.isnull(record["Properties"]): - props = record["Properties"].strip().split(",") - for pr in props: - prop_2_classes.setdefault(pr.strip(),[]).append(record["Attribute"]) - - return prop_2_classes - -def create_nx_schema_objects( - schema_extension: pd.DataFrame, se: SchemaExplorer -) -> SchemaExplorer: - """Creates classes for all attributes and adds them to the schema. - Args: - schema_extension: a pandas dataframe containing schema definition; see example here: https://docs.google.com/spreadsheets/d/1J2brhqO4kpeHIkNytzlqrdIiRanXDr6KD2hqjOTC9hs/edit#gid=0 - se: a schema explorer object allowing the traversal and modification of a schema graph - base_schema_path: a path to a json-ld file containing an existing schema - Returns: - An updated schema explorer object - """ - - try: - check_schema_definition(schema_extension) - logger.debug("Schema definition csv ready for processing!") - except: - raise ValueError( - f"Schema extension headers: {set(list(schema_extension.columns))} " - f"do not match required schema headers: {required_headers}" - ) - - rel_dict = { - "rdfs:subClassOf": {"parentOf": "in"}, - "schema:domainIncludes": {"domainValue": "in"}, - "sms:requiresDependency": {"requiresDependency": "out"}, - "sms:requiresComponent": {"requiresComponent": "out"}, - "schema:rangeIncludes": {"rangeValue": "out"}, - } - - # get attributes from Attribute column - attributes = schema_extension[list(required_headers)].to_dict("records") - - # get all properties across all attributes from Properties column - props = set(schema_extension[["Properties"]].dropna().values.flatten()) - - # clean properties strings - all_properties = [] - for prop in props: - all_properties += [p.strip() for p in prop.split(",")] - - # get both attributes and their properties (if any) - properties = schema_extension[["Attribute", "Properties"]].to_dict("records") - - prop_2_classes = _prop_2_classes(properties) - - logger.debug("Adding attributes") - for attribute in attributes: - - required = None - if not pd.isnull(attribute["Required"]): - required = attribute["Required"] - - if not attribute["Attribute"] in all_properties: - # Attribute is not a property - display_name = attribute["Attribute"] - - subclass_of = None - if not pd.isnull(attribute["Parent"]): - subclass_of = [ - parent for parent in attribute["Parent"].strip().split(",") - ] - - new_class = get_class( - se, - display_name, - description=attribute["Description"], - subclass_of=subclass_of, - required=required, - ) - - se.add_schema_object_nx(new_class, **rel_dict) - - """ - print(se.get_nx_schema().nodes[new_class["rdfs:label"]]) - # check if attribute doesn't already exist and add it - if not attribute_exists(se, new_class["rdfs:label"]): - se.add_schema_object_nx(new_class, **rel_dict) - else: - print("ATTRIBUTE EXISTS") - print(new_class) - """ - - else: - # Attribute is a property - display_name = attribute["Attribute"] - - new_property = get_property( - se, - display_name, - prop_2_classes[display_name], - description=attribute["Description"], - required=required, - ) - - # check if attribute doesn't already exist and add it - if not attribute_exists(se, new_property["rdfs:label"]): - se.add_schema_object_nx(new_property, **rel_dict) - - logger.debug("Done adding attributes") - - # TODO check if schema already contains property - may require property context in csv schema definition - - logger.debug("Adding and editing properties") - - for prop in properties: - if not pd.isnull(prop["Properties"]): # a class may have or not have properties - for p in ( - prop["Properties"].strip().split(",") - ): # a class may have multiple properties - attribute = prop["Attribute"] - - # check if property is already present as attribute under attributes column - # TODO: adjust logic below to compactify code - p = p.strip() - if p in list(schema_extension["Attribute"]): - description = schema_extension.loc[ - schema_extension["Attribute"] == p - ]["Description"].values[0] - property_info = se.explore_property( - se.get_property_label_from_display_name(p) - ) - range_values = ( - property_info["range"] if "range" in property_info else None - ) - requires_dependencies = ( - property_info["dependencies"] - if "dependencies" in property_info - else None - ) - required = ( - property_info["required"] - if "required" in property_info - else None - ) - - new_property = get_property( - se, - p, - property_info["domain"], - description=description, - requires_range=range_values, - requires_dependencies=requires_dependencies, - required=required, - ) - se.edit_schema_object_nx(new_property) - else: - description = None - new_property = get_property( - se, p, attribute, description=description - ) - se.add_schema_object_nx(new_property, **rel_dict) - - logger.debug("Done adding properties") - - # # set range values and dependency requirements for each attribute - # # if not already added, add each attribute in required values and dependencies to the schema extension - # print("Editing attributes and properties to add requirements and value ranges") - # print("====================================================================================") - - for attribute in attributes: - - # TODO: refactor processing of multi-valued cells in columns and corresponding schema updates; it would compactify code below if class and property are encapsulated as objects inheriting from a common attribute parent object - - # get values in range for this attribute, if any are specified - range_values = attribute["Valid Values"] - if not pd.isnull(range_values): - # prepare the range values list and split based on appropriate delimiter - # if the string "range_values" starts with double quotes, then extract all "valid values" within double quotes - range_values_list = [] - if range_values[0] == '"': - range_values_list = re.findall(r'"([^"]*)"', range_values) - else: - range_values_list = range_values.strip().split(",") - - for val in range_values_list: - # check if value is in attributes column; add it as a class if not - if not val.strip() in list(schema_extension["Attribute"]): - - # determine parent class of the new value class - # if this attribute is not a property, set it as a parent class - if not attribute["Attribute"] in all_properties: - parent = [attribute["Attribute"]] - else: - # this attribute is a property, set the parent to the domain class of this attribute - - parent = se.get_class_by_property(attribute["Attribute"]) - - if not parent: - raise ValueError( - f"Listed valid value: {val}, for attribute: {attribute['Attribute']} " - "must have a class parent. The extension could not be added to the schema." - ) - new_class = get_class( - se, val, description=None, subclass_of=parent - ) - - # check if attribute doesn't already exist and add it - if not attribute_exists(se, new_class["rdfs:label"]): - se.add_schema_object_nx(new_class, **rel_dict) - - # update rangeIncludes of attribute - # if attribute is not a property, then assume it is a class - if not attribute["Attribute"] in all_properties: - class_info = se.explore_class( - se.get_class_label_from_display_name(attribute["Attribute"]) - ) - class_info["range"].append( - se.get_class_label_from_display_name(val) - ) - - class_range_edit = get_class( - se, - attribute["Attribute"], - description=attribute["Description"], - subclass_of=[attribute["Parent"]], - requires_dependencies=class_info["dependencies"], - requires_range=class_info["range"], - required=class_info["required"], - validation_rules=class_info["validation_rules"], - ) - se.edit_schema_object_nx(class_range_edit) - - else: - # the attribute is a property - property_info = se.explore_property( - se.get_property_label_from_display_name(attribute["Attribute"]) - ) - property_info["range"].append( - se.get_class_label_from_display_name(val) - ) - - property_range_edit = get_property( - se, - attribute["Attribute"], - property_info["domain"], - description=property_info["description"], - requires_dependencies=property_info["dependencies"], - requires_range=property_info["range"], - required=property_info["required"], - validation_rules=property_info["validation_rules"], - ) - se.edit_schema_object_nx(property_range_edit) - - logger.debug(val + " added to value range") - - # get validation rules for this attribute, if any are specified - validation_rules = attribute["Validation Rules"] - - if not pd.isnull(validation_rules): - - # TODO: make validation rules delimiter configurable parameter - - validation_rules = [ - val_rule.strip() for val_rule in validation_rules.strip().split("::") - ] - - validate_vr = validate_schema_rules( - validation_rules, - attribute["Attribute"], - input_filetype = 'csv_schema') - - - # update validation rules of attribute - # if attribute is not a property, then assume it is a class - if not attribute["Attribute"] in all_properties: - class_info = se.explore_class( - se.get_class_label_from_display_name(attribute["Attribute"]) - ) - class_info["validation_rules"] = validation_rules - class_val_rule_edit = get_class( - se, - attribute["Attribute"], - description=attribute["Description"], - subclass_of=[attribute["Parent"]], - requires_dependencies=class_info["dependencies"], - requires_range=class_info["range"], - required=class_info["required"], - validation_rules=class_info["validation_rules"], - ) - se.edit_schema_object_nx(class_val_rule_edit) - else: - # the attribute is a property - property_info = se.explore_property( - se.get_property_label_from_display_name(attribute["Attribute"]) - ) - property_info["validation_rules"] = validation_rules - property_val_rule_edit = get_property( - se, - attribute["Attribute"], - property_info["domain"], - description=property_info["description"], - requires_dependencies=property_info["dependencies"], - requires_range=property_info["range"], - required=property_info["required"], - validation_rules=property_info["validation_rules"], - ) - se.edit_schema_object_nx(property_val_rule_edit) - try: - logger.debug(val + "validation rules added") - except: - logger.debug("Validation rules added") - - # get dependencies for this attribute, if any are specified - requires_dependencies = attribute["DependsOn"] - - if not pd.isnull(requires_dependencies): - for dep in requires_dependencies.strip().split(","): - # check if dependency is a property or not - dep = dep.strip() - dep_is_property = dep in all_properties - dep_label = "" - # set dependency label based on kind of dependency: class or property - if dep_is_property: - dep_label = se.get_property_label_from_display_name(dep) - else: - dep_label = se.get_class_label_from_display_name(dep) - - # check if dependency is in attributes column; add it to the list if not - if not dep.strip() in list(schema_extension["Attribute"]): - # if dependency is a property create a new property; else create a new class - if not dep_is_property: - # if this attribute is not a property, set it as a parent class - if not attribute["Attribute"] in all_properties: - parent = attribute["Attribute"] - else: - # this attribute is a property, set the parent to the domain class of this attribute - parent = se.get_class_by_property(attribute["Attribute"]) - if not parent: - raise ValueError( - f"Listed required dependency: {dep}, for attribute: {attribute['Attribute']} " - "must have a class parent. The extension could not be added to the schema." - ) - - new_class = get_class( - se, dep, description=None, subclass_of=[parent] - ) - # se.add_schema_object_nx(new_class, **rel_dict) - # check if attribute doesn't already exist and add it - if not attribute_exists(se, new_class["rdfs:label"]): - se.add_schema_object_nx(new_class, **rel_dict) - - else: - if not attribute["Attribute"] in all_properties: - domain_attribute = attribute["Attribute"] - else: - # this attribute is a property, set the domain of this property to the domain class of the attribute - domain_attribute = se.get_class_by_property( - attribute["Attribute"] - ) - if not domain_attribute: - raise ValueError( - f"Listed required dependency: {dep}, must have a class parent. " - "The extension could not be added to the schema." - ) - - description = None - new_property = get_property( - se, dep, domain_attribute, description=description - ) - # check if attribute doesn't already exist and add it - if not attribute_exists(se, new_property["rdfs:label"]): - se.add_schema_object_nx(new_property, **rel_dict) - - # update required dependencies of attribute - # if attribute is not a property then assume it is a class - if not attribute["Attribute"] in all_properties: - class_info = se.explore_class( - se.get_class_label_from_display_name(attribute["Attribute"]) - ) - class_info["dependencies"].append(dep_label) - class_dependencies_edit = get_class( - se, - attribute["Attribute"], - description=attribute["Description"], - subclass_of=[attribute["Parent"]], - requires_dependencies=class_info["dependencies"], - requires_range=class_info["range"], - required=class_info["required"], - validation_rules=class_info["validation_rules"], - ) - se.edit_schema_object_nx(class_dependencies_edit) - else: - # the attribute is a property then update as a property - property_info = se.explore_property( - se.get_property_label_from_display_name(attribute["Attribute"]) - ) - property_info["dependencies"].append(dep_label) - property_dependencies_edit = get_property( - se, - attribute["Attribute"], - property_info["domain"], - description=property_info["description"], - requires_dependencies=property_info["dependencies"], - requires_range=property_info["range"], - required=property_info["required"], - validation_rules=property_info["validation_rules"], - ) - se.edit_schema_object_nx(property_dependencies_edit) - - logger.debug(dep + " added to dependencies") - - # TODO check for cycles in attribute dependencies schema subgraph - - # check if the attribute requires any components - if not pd.isnull(attribute["DependsOn Component"]): - component_dependencies = attribute["DependsOn Component"] - else: - continue - - # iterate over potentially multiple dependency components - for comp_dep in component_dependencies.strip().split(","): - - # check if a component is already defined as an attribute; if not define it in the schema - if not comp_dep.strip() in list(schema_extension["Attribute"]): - - # component is not in csv schema so try adding it as a class with a parent Thing - new_class = get_class(se, comp_dep, description=None) - - # check if attribute doesn't already exist in schema.org schema and add it - # (component may not be in csv schema, but could be in the base schema we are extending) - if not attribute_exists(se, new_class["rdfs:label"]): - se.add_schema_object_nx(new_class, **rel_dict) - - # update this attribute requirements to include component - class_info = se.explore_class( - se.get_class_label_from_display_name(attribute["Attribute"]) - ) - class_info["component_dependencies"].append( - se.get_class_label_from_display_name(comp_dep) - ) - class_component_dependencies_edit = get_class( - se, - attribute["Attribute"], - description=class_info["description"], - subclass_of=class_info["subClassOf"], - requires_dependencies=class_info["dependencies"], - requires_range=class_info["range"], - validation_rules=class_info["validation_rules"], - requires_components=class_info["component_dependencies"], - ) - se.edit_schema_object_nx(class_component_dependencies_edit) - - logger.debug(comp_dep + " added to dependencies") - - # TODO check for cycles in component dependencies schema subgraph - - logger.info("Done adding requirements and value ranges to attributes") - - return se - - -def _get_base_schema_path(base_schema: str = None) -> str: - """Evaluate path to base schema. - - Args: - base_schema: Path to base data model. BioThings data model is loaded by default. - - Returns: - base_schema_path: Path to base schema based on provided argument. - """ - biothings_schema_path = LOADER.filename("data_models/biothings.model.jsonld") - base_schema_path = biothings_schema_path if base_schema is None else base_schema - - return base_schema_path - - -def _convert_csv_to_data_model( - schema_csv: str, base_schema: str = None -) -> SchemaExplorer: - """Convert provided CSV spec. in CSV format to data model in JSON-LD format. - - Args: - schema_csv: Path to CSV file containing data to be translated to - JSON-LD data model. Can be path to local CSV or URL. - - Returns: - base_se: SchemaExplorer object which has updated properties - (base_se.schema and base_se.schema_nx). - """ - # create data model from provided RFC - rfc_df = load_df(schema_csv, data_model=True) - - # instantiate schema explorer - base_se = SchemaExplorer() - - # determine base schema path - base_schema_path = _get_base_schema_path(base_schema) - - # load base schema (BioThings) - base_se.load_schema(base_schema_path) - - # call parser code that converts a dataframe of the RFC - # specs. into a JSON-LD data model - base_se = create_nx_schema_objects(rfc_df, base_se) - - return base_se \ No newline at end of file diff --git a/schematic/schemas/explorer.py b/schematic/schemas/explorer.py deleted file mode 100644 index 5938520e8..000000000 --- a/schematic/schemas/explorer.py +++ /dev/null @@ -1,1041 +0,0 @@ -import os -import string -import json -import logging - -from typing import Any, Dict, Optional, Text, List - -import inflection -import networkx as nx - -from rdflib import Graph, Namespace, plugin, query -from networkx.algorithms.cycles import find_cycle -from networkx.readwrite import json_graph - -from schematic.utils.curie_utils import ( - expand_curies_in_schema, - uri2label, - extract_name_from_uri_or_curie, -) -from schematic.utils.general import find_duplicates -from schematic.utils.io_utils import load_default, load_json, load_schemaorg -from schematic.utils.schema_utils import ( - load_schema_into_networkx, - node_attrs_cleanup, - class_to_node, - relationship_edges, -) -from schematic.utils.general import dict2list, unlist -from schematic.utils.viz_utils import visualize -from schematic.utils.validate_utils import ( - validate_class_schema, - validate_property_schema, - validate_schema, -) -from schematic.schemas.curie import uri2curie, curie2uri - -namespaces = dict(rdf=Namespace("http://www.w3.org/1999/02/22-rdf-syntax-ns#")) - - -logger = logging.getLogger(__name__) - - -class SchemaExplorer: - """Class for exploring schema""" - - def __init__(self): - self.load_default_schema() - - def load_schema(self, schema): - """Load schema and convert it to networkx graph""" - self.schema = load_json(schema) - self.schema_nx = load_schema_into_networkx(self.schema) - - def export_schema(self, file_path): - with open(file_path, "w",encoding="utf8") as f: - json.dump(self.schema, f, sort_keys=True, indent=4, ensure_ascii=False) - - def load_default_schema(self): - """Load default schema, either schema.org or biothings""" - self.schema = load_default() - self.schema_nx = load_schema_into_networkx(self.schema) - - def get_nx_schema(self): - return self.schema_nx - - def get_edges_by_relationship( - self, class_label: str, relationship: str - ) -> List[str]: - """Get a list of out-edges of a node where the edges match a specifc type of relationship. - - i.e., the edges connecting a node to its neighbors are of relationship type -- "parentOf" (set of edges to children / sub-class nodes). - Note: possible edge relationships are -- parentOf, rangeValue, requiresDependency. - - Args: - node: the node whose edges we need to look at. - relationship: the type of link(s) that the above node and its immediate neighbors share. - - Returns: - List of edges that are connected to the node. - """ - edges = [] - - mm_graph = self.get_nx_schema() - - for (u, v, key, c) in mm_graph.out_edges(node, data=True, keys=True): - if key == relationship: - edges.append((u, v)) - - return edges - - def get_descendants_by_edge_type( - self, - source_node: str, - relationship: str, - connected: bool = True, - ordered: bool = False, - ) -> List[str]: - """Get all nodes that are descendants of a given source node, based on a specific type of edge / relationship type. - - Args: - source_node: The node whose descendants need to be retreived. - relationship: Edge / link relationship type with possible values same as in above docs. - connected: If True, we need to ensure that all descendant nodes are reachable from the source node, i.e., they are part of the same connected component. - If False, the descendants could be in multiple connected components. - Default value is True. - ordered: If True, the list of descendants will be topologically ordered. - If False, the list has no particular order (depends on the order in which the descendats were traversed in the subgraph). - - Returns: - List of nodes that are descendants from a particular node (sorted / unsorted) - """ - mm_graph = self.get_nx_schema() - - # if mm_graph.has_node(source_node): - # get all nodes that are reachable from a specified root /source node in the data model - - root_descendants = nx.descendants(mm_graph, source_node) - # else: - # print("The specified source node could not be found im the Networkx graph.") - # return [] - - subgraph_nodes = list(root_descendants) - subgraph_nodes.append(source_node) - descendants_subgraph = mm_graph.subgraph(subgraph_nodes) - - # prune the descendants subgraph so as to include only those edges that match the relationship type - rel_edges = [] - for (u, v, key, c) in descendants_subgraph.edges(data=True, keys=True): - if key == relationship: - rel_edges.append((u, v)) - - relationship_subgraph = nx.DiGraph() - relationship_subgraph.add_edges_from(rel_edges) - - descendants = relationship_subgraph.nodes() - - if not descendants: - # return empty list if there are no nodes that are reachable from the source node based on this relationship type - return [] - - if connected and ordered: - # get the set of reachable nodes from the source node - descendants = nx.descendants(relationship_subgraph, source_node) - descendants.add(source_node) - - # normally, the descendants from a node are unordered (peculiarity of nx descendants call) - # form the subgraph on descendants and order it topologically - # this assumes an acyclic subgraph - descendants = nx.topological_sort( - relationship_subgraph.subgraph(descendants) - ) - elif connected: - # get the nodes that are reachable from a given source node - # after the pruning process above some nodes in the root_descendants subgraph might have become disconnected and will be omitted - descendants = nx.descendants(relationship_subgraph, source_node) - descendants.add(source_node) - elif ordered: - # sort the nodes topologically - # this requires the graph to be an acyclic graph - descendants = nx.topological_sort(relationship_subgraph) - - return list(descendants) - - def get_adjacent_nodes_by_relationship( - self, node: str, relationship: str - ) -> List[str]: - """Get a list of nodes that is / are adjacent to a given node, based on a relationship type. - - Args: - node: the node whose edges we need to look at. - relationship: the type of link(s) that the above node and its immediate neighbors share. - - Returns: - List of nodes that are adjacent to the given node. - """ - nodes = set() - - mm_graph = self.get_nx_schema() - - for (u, v, key, c) in mm_graph.out_edges(node, data=True, keys=True): - if key == relationship: - nodes.add(v) - - return list(nodes) - - def is_class_in_schema(self, class_label): - if self.schema_nx.nodes[class_label]: - return True - else: - return False - - def full_schema_graph(self, size=None): - edges = self.schema_nx.edges() - return visualize(edges, size=size) - - def sub_schema_graph(self, source, direction, size=None): - if direction == "down": - edges = list(nx.edge_bfs(self.schema_nx, [source])) - return visualize(edges, size=size) - elif direction == "up": - paths = self.find_parent_classes(source) - edges = [] - for _path in paths: - _path.append(source) - for i in range(0, len(_path) - 1): - edges.append((_path[i], _path[i + 1])) - return visualize(edges, size=size) - elif direction == "both": - paths = self.find_parent_classes(source) - edges = list(nx.edge_bfs(self.schema_nx, [source])) - for _path in paths: - _path.append(source) - for i in range(0, len(_path) - 1): - edges.append((_path[i], _path[i + 1])) - return visualize(edges, size=size) - - def find_parent_classes(self, schema_class): - """Find all parents of the class""" - - digraph = self.get_digraph_by_edge_type("parentOf") - - root_node = list(nx.topological_sort(digraph))[0] - # root_node = list(nx.topological_sort(self.schema_nx))[0] - - paths = nx.all_simple_paths( - self.schema_nx, source=root_node, target=schema_class - ) - # print(root_node) - return [_path[:-1] for _path in paths] - - def find_class_specific_properties(self, schema_class): - """Find properties specifically associated with a given class""" - schema_uri = self.schema_nx.nodes[schema_class]["uri"] - properties = [] - for record in self.schema["@graph"]: - if record["@type"] == "rdf:Property": - if ( - type(record["schema:domainIncludes"]) == dict - and record["schema:domainIncludes"]["@id"] == schema_uri - ): - properties.append(record["rdfs:label"]) - elif ( - type(record["schema:domainIncludes"]) == list - and [ - item - for item in record["schema:domainIncludes"] - if item["@id"] == schema_uri - ] - != [] - ): - - properties.append(record["rdfs:label"]) - return properties - - def find_all_class_properties(self, schema_class, display_as_table=False): - """Find all properties associated with a given class - # TODO : need to deal with recursive paths - """ - parents = self.find_parent_classes(schema_class) - # print(schema_class) - # print(parents) - properties = [ - { - "class": schema_class, - "properties": self.find_class_specific_properties(schema_class), - } - ] - for path in parents: - path.reverse() - for _parent in path: - # print(_parent) - properties.append( - { - "class": _parent, - "properties": self.find_class_specific_properties(_parent), - } - ) - if not display_as_table: - return properties - else: - content = [["Property", "Expected Type", "Description", "Class"]] - for record in properties: - for _property in record["properties"]: - property_info = self.explore_property(_property) - if "range" in property_info: - content.append( - [ - _property, - property_info["range"], - property_info["description"], - record["class"], - ] - ) - else: - content.append( - [_property, property_info["description"], record["class"]] - ) - - # TODO: Log content - - def find_class_usages(self, schema_class): - """Find where a given class is used as a value of a property""" - usages = [] - schema_uri = self.schema_nx.nodes[schema_class]["uri"] - for record in self.schema["@graph"]: - usage = {} - if record["@type"] == "rdf:Property": - if "schema:rangeIncludes" in record: - p_range = dict2list(record["schema:rangeIncludes"]) - for _doc in p_range: - if _doc["@id"] == schema_uri: - usage["property"] = record["rdfs:label"] - p_domain = dict2list(record["schema:domainIncludes"]) - usage["property_used_on_class"] = unlist( - [self.uri2label(record["@id"]) for record in p_domain] - ) - usage["description"] = record["rdfs:comment"] - if usage: - usages.append(usage) - return usages - - def find_child_classes(self, schema_class): - """Find schema classes that inherit from the given class""" - return unlist(list(self.schema_nx.successors(schema_class))) - - def find_adjacent_child_classes(self, schema_class): - - return self.get_adjacent_nodes_by_relationship(schema_class, "parentOf") - - def explore_class(self, schema_class): - """Find details about a specific schema class""" - parents = [] - if "subClassOf" in self.schema_nx.nodes[schema_class]: - schema_node_val = self.schema_nx.nodes[schema_class]["subClassOf"] - - parents_list = [] - if isinstance(schema_node_val, dict): - parents_list.append(self.schema_nx.nodes[schema_class]["subClassOf"]) - else: - parents_list = schema_node_val - - for parent in parents_list: - parents.append(extract_name_from_uri_or_curie(parent["@id"])) - - requires_range = [] - if "rangeIncludes" in self.schema_nx.nodes[schema_class]: - schema_node_val = self.schema_nx.nodes[schema_class]["rangeIncludes"] - - if isinstance(schema_node_val, dict): - subclass_list = [] - subclass_list.append( - self.schema_nx.nodes[schema_class]["rangeIncludes"] - ) - else: - subclass_list = schema_node_val - - for range_class in subclass_list: - requires_range.append( - extract_name_from_uri_or_curie(range_class["@id"]) - ) - - requires_dependencies = [] - if "requiresDependency" in self.schema_nx.nodes[schema_class]: - schema_node_val = self.schema_nx.nodes[schema_class]["requiresDependency"] - - if isinstance(schema_node_val, dict): - subclass_list = [] - subclass_list.append( - self.schema_nx.nodes[schema_class]["requiresDependency"] - ) - else: - subclass_list = schema_node_val - - for dep_class in subclass_list: - requires_dependencies.append( - extract_name_from_uri_or_curie(dep_class["@id"]) - ) - - requires_components = [] - if "requiresComponent" in self.schema_nx.nodes[schema_class]: - schema_node_val = self.schema_nx.nodes[schema_class]["requiresComponent"] - - if isinstance(schema_node_val, dict): - subclass_list = [] - subclass_list.append( - self.schema_nx.nodes[schema_class]["requiresComponent"] - ) - else: - subclass_list = schema_node_val - - for comp_dep_class in subclass_list: - requires_components.append( - extract_name_from_uri_or_curie(comp_dep_class["@id"]) - ) - - required = False - if "required" in self.schema_nx.nodes[schema_class]: - required = self.schema_nx.nodes[schema_class]["required"] - - validation_rules = [] - if "validationRules" in self.schema_nx.nodes[schema_class]: - validation_rules = self.schema_nx.nodes[schema_class]["validationRules"] - - # TODO: make class_info keys here the same as keys in schema graph nodes(e.g. schema_class above); note that downstream code using explore_class would have to be updated as well (e.g. csv_2_schemaorg) - - class_info = { - "properties": self.find_class_specific_properties(schema_class), - "description": self.schema_nx.nodes[schema_class]["description"], - "uri": curie2uri(self.schema_nx.nodes[schema_class]["uri"], namespaces), - #'usage': self.find_class_usages(schema_class), - "usage": "NA", - "child_classes": self.find_adjacent_child_classes(schema_class), - "subClassOf": parents, - "range": requires_range, - "dependencies": requires_dependencies, - "validation_rules": validation_rules, - "required": required, - "component_dependencies": requires_components, - "parent_classes": parents - #'parent_classes': self.find_parent_classes(schema_class) - } - - if "displayName" in self.schema_nx.nodes[schema_class]: - class_info["displayName"] = self.schema_nx.nodes[schema_class][ - "displayName" - ] - - return class_info - - def get_class_validation_rules(self,class_label): - rules=[] - class_info = self.explore_class(class_label) - - if 'validation_rules' in class_info: - rules=class_info['validation_rules'] - - return rules - - def get_property_label_from_display_name(self, display_name, strict_camel_case = False): - """Convert a given display name string into a proper property label string""" - """ - label = ''.join(x.capitalize() or ' ' for x in display_name.split(' ')) - label = label[:1].lower() + label[1:] if label else '' - """ - # This is the newer more strict method - if strict_camel_case: - display_name = display_name.strip().translate({ord(c): "_" for c in string.whitespace}) - label = inflection.camelize(display_name, uppercase_first_letter=False) - - # This method remains for backwards compatibility - else: - display_name = display_name.translate({ord(c): None for c in string.whitespace}) - label = inflection.camelize(display_name.strip(), uppercase_first_letter=False) - - return label - - def get_class_label_from_display_name(self, display_name, strict_camel_case = False): - """Convert a given display name string into a proper class label string""" - """ - label = ''.join(x.capitalize() or ' ' for x in display_name.split(' '))""" - # This is the newer more strict method - if strict_camel_case: - display_name = display_name.strip().translate({ord(c): "_" for c in string.whitespace}) - label = inflection.camelize(display_name, uppercase_first_letter=True) - - # This method remains for backwards compatibility - else: - display_name = display_name.translate({ord(c): None for c in string.whitespace}) - label = inflection.camelize(display_name.strip(), uppercase_first_letter=True) - - return label - - def get_class_by_property(self, property_display_name): - schema_property = self.get_property_label_from_display_name( - property_display_name - ) - - for record in self.schema["@graph"]: - if record["@type"] == "rdf:Property": - if record["rdfs:label"] == schema_property: - p_domain = record["schema:domainIncludes"] - - return [ - self.uri2label(record["@id"]) - for record in p_domain - ] - #return unlist( - # [ - # self.uri2label(schema_class["@id"]) - # for schema_class in p_domain - # ] - #) - - return None - - def uri2label(self, uri): - return uri.split(":")[1] - - def explore_property(self, schema_property): - """Find details about a specific property - TODO: refactor so that explore class and explore property reuse logic - they are *very* similar - """ - property_info = {} - for record in self.schema["@graph"]: - if record["@type"] == "rdf:Property": - if record["rdfs:label"] == schema_property: - property_info["id"] = record["rdfs:label"] - property_info["description"] = record["rdfs:comment"] - property_info["uri"] = curie2uri(record["@id"], namespaces) - - p_domain = record["schema:domainIncludes"] - if type(p_domain) == list: - property_info["domain"] = [self.uri2label(record["@id"]) for record in p_domain] - elif type(p_domain) == dict: - property_info["domain"] = [self.uri2label(record["@id"])] - - if "schema:rangeIncludes" in record: - p_range = dict2list(record["schema:rangeIncludes"]) - property_info["range"] = [ - self.uri2label(record["@id"]) for record in p_range - ] - else: - property_info["range"] = [] - - if "sms:required" in record: - if "sms:true" == record["sms:required"]: - property_info["required"] = True - else: - property_info["required"] = False - - validation_rules = [] - if "sms:validationRules" in record: - property_info["validation_rules"] = record[ - "sms:validationRules" - ] - - if "sms:requiresDependency" in record: - p_dependencies = dict2list(record["sms:requiresDependency"]) - property_info["dependencies"] = [ - self.uri2label(record["@id"]) for record in p_dependencies - ] - else: - property_info["dependencies"] = [] - - if "sms:displayName" in record: - property_info["displayName"] = record["sms:displayName"] - - break - - # check if properties are added multiple times - - return property_info - - def generate_class_template(self): - """Generate a template for schema class""" - template = { - "@id": "uri or curie of the class", - "@type": "rdfs:Class", - "rdfs:comment": "description of the class", - "rdfs:label": "class label, should match @id", - "rdfs:subClassOf": {"@id": "parent class, could be list"}, - "schema:isPartOf": {"@id": "http://schema.biothings.io"}, - } - return template - - def generate_property_template(self): - """Generate a template for schema property""" - template = { - "@id": "url or curie of the property", - "@type": "rdf:Property", - "rdfs:comment": "description of the property", - "rdfs:label": "carmel case, should match @id", - "schema:domainIncludes": { - "@id": "class which use it as a property, could be list" - }, - "schema:isPartOf": {"@id": "http://schema.biothings.io"}, - "schema:rangeIncludes": { - "@id": "relates a property to a class that constitutes (one of) the expected type(s) for values of the property" - }, - } - return template - - def edit_class(self, class_info): - """Edit an existing class into schema""" - for i, schema_class in enumerate(self.schema["@graph"]): - if schema_class["rdfs:label"] == class_info["rdfs:label"]: - validate_class_schema(class_info) # why are we doing this in a loop? - - self.schema["@graph"][i] = class_info - break - - # TODO: do we actually need to validate the entire schema if a class is just edited and the class passes validation? - # validate_schema(self.schema) - - logger.info(f"Edited the class {class_info['rdfs:label']} successfully.") - self.schema_nx = load_schema_into_networkx(self.schema) - - def update_class(self, class_info): - """Add a new class into schema""" - # print(class_info) - validate_class_schema(class_info) - self.schema["@graph"].append(class_info) - validate_schema(self.schema) - logger.info(f"Updated the class {class_info['rdfs:label']} successfully.") - self.schema_nx = load_schema_into_networkx(self.schema) - - def edit_property(self, property_info): - """Edit an existing property into schema""" - for i, schema_property in enumerate(self.schema["@graph"]): - if schema_property["rdfs:label"] == property_info["rdfs:label"]: - validate_property_schema(property_info) - self.schema["@graph"][i] = property_info - - # TODO: check if properties are added/edited multiple times (e.g. look at explore_property) - break - - validate_schema(self.schema) - logger.info(f"Edited the property {property_info['rdfs:label']} successfully.") - self.schema_nx = load_schema_into_networkx(self.schema) - - def update_property(self, property_info): - """Add a new property into schema""" - validate_property_schema(property_info) - self.schema["@graph"].append(property_info) - validate_schema(self.schema) - logger.info(f"Updated the property {property_info['rdfs:label']} successfully.") - - def get_nodes_descendants(self, graph, component): - """ - Return a list of nodes reachable from source in graph - graph: networkx graph object - component: any given node - """ - all_descendants = list(nx.descendants(graph, component)) - - return all_descendants - - def get_nodes_ancestors(self, graph, component): - """ - Return a list of nodes reachable from source in graph - graph: networkx graph object - component: any given node - """ - all_ancestors = list(nx.ancestors(graph, component)) - - return all_ancestors - - def get_digraph_by_edge_type(self, edge_type): - - multi_digraph = self.schema_nx - - digraph = nx.DiGraph() - for (u, v, key, c) in multi_digraph.edges(data=True, keys=True): - if key == edge_type: - digraph.add_edge(u, v) - - # print(nx.find_cycle(digraph, orientation = "ignore")) - - return digraph - - # version of edit_class() method that directly acts on the networkx graph - def edit_schema_object_nx(self, schema_object: dict) -> None: - node_to_replace = class_to_node(class_to_convert=schema_object) - - # get the networkx graph associated with the SchemaExplorer object in its current state - schema_graph_nx = self.get_nx_schema() - - # outer loop to loop over all the nodes in the graph constructed from master schema - for node, data in schema_graph_nx.nodes(data=True): - - # innner loop to loop over the single node that is to be replaced/edited in the master graph - for replace_node, replace_data in node_to_replace.nodes(data=True): - - # find the node to be replaced in the graph - if node == replace_node: - - # for the "comment", "required", "displayName", "validationRules" fields/keys it's okay to do a direct replacement - # without having to worry about adding/removing any associated edges - - # ques. is it more expensive to do a checking operation (diff b/w fields) or a replace operation? - - if ( - "comment" in data and "comment" in replace_data - ): # replace contents of "comment" from replacement node - schema_graph_nx.nodes[node]["comment"] = node_to_replace.nodes[ - replace_node - ]["comment"] - schema_graph_nx.nodes[node][ - "description" - ] = node_to_replace.nodes[replace_node]["description"] - - if ( - "required" in data and "required" in replace_data - ): # replace boolean value of "required" from replacement node - schema_graph_nx.nodes[node]["required"] = node_to_replace.nodes[ - replace_node - ]["required"] - - if ( - "displayName" in data and "displayName" in replace_data - ): # replace contents of "displayName" from replacement node - schema_graph_nx.nodes[node][ - "displayName" - ] = node_to_replace.nodes[replace_node]["displayName"] - - if ( - "validationRules" in data and "validationRules" in replace_data - ): # replace contents of "validationRules" from replacement node - schema_graph_nx.nodes[node][ - "validationRules" - ] = node_to_replace.nodes[replace_node]["validationRules"] - - # for the "subClassOf", "requiresDependency", "requiresComponent", "rangeIncludes" fields/keys require rejiggering - # of associated edges - # general strategy we follow for rejiggering is remove edges that existed formerly and add new edges based on contents - # of the replacement node - - # "subClassOf" key related edge manipulation - if "subClassOf" in replace_data: - - # if the "subClassOf" attribute already exists on the node, then remove all the "parentOf" in-edges - # associated with that node - if "subClassOf" in data: - # remove formerly existent edges from the master schema/graph - for (u, v) in list(schema_graph_nx.in_edges([node])): - - # there are certain nodes which have "subClassOf" data in list format - if type(data["subClassOf"]) == list: - for _edges_to_replace in data["subClassOf"]: - edge_repl = extract_name_from_uri_or_curie( - _edges_to_replace["@id"] - ) - - if edge_repl == u: - - try: - # we need to make sure to remove only edges that are tagged with the "parentOf" label - schema_graph_nx.remove_edges_from( - [(u, v, "parentOf")] - ) - except TypeError: - pass - - # there are certain nodes which have "subClassOf" data in dict format - elif type(data["subClassOf"]) == dict: - for k_id, v_curie in data["subClassOf"].items(): - edge_repl = extract_name_from_uri_or_curie( - v_curie - ) - - if edge_repl == u: - - try: - schema_graph_nx.remove_edges_from( - [(u, v, "parentOf")] - ) - except TypeError: - pass - - # extract node names from replacement node and use it to add edges to the master schema/graph - parents = replace_data["subClassOf"] - if type(parents) == list: - for _parent in parents: - target_node = extract_name_from_uri_or_curie( - _parent["@id"] - ) - - # label to be associated with "subClassOf" keys is "parentOf" - if target_node != replace_node: - - # make note of the fact that we are changing in-edges here - schema_graph_nx.add_edge( - target_node, replace_node, key="parentOf" - ) - elif type(parents) == dict: - for _k_parent, _v_parent in parents.items(): - target_node = extract_name_from_uri_or_curie(_v_parent) - - # label to be associated with "subClassOf" keys is "parentOf" - if target_node != replace_node: - - # make note of the fact that we are changing in-edges here - schema_graph_nx.add_edge( - target_node, replace_node, key="parentOf" - ) - - # once the edges have been added, change the contents of the node - schema_graph_nx.nodes[node][ - "subClassOf" - ] = node_to_replace.nodes[replace_node]["subClassOf"] - - # "requiresDependency" key related edge manipulation - if "requiresDependency" in replace_data: - - # if the "requiresDependency" attribute already exists on the node, then remove all the "requiresDependency" in-edges - # associated with that node - if "requiresDependency" in data: - - for (u, v) in list(schema_graph_nx.out_edges([node])): - # there are certain nodes which have "requiresDependency" data in list format - if type(data["requiresDependency"]) == list: - for _edges_to_replace in data["requiresDependency"]: - edge_repl = extract_name_from_uri_or_curie( - _edges_to_replace["@id"] - ) - - if edge_repl == v: - - try: - schema_graph_nx.remove_edges_from( - [u, v, "requiresDependency"] - ) - except TypeError: - pass - - # there are certain nodes which have "requiresDependency" data in dict format - elif type(data["requiresDependency"]) == dict: - for k_id, v_curie in data[ - "requiresDependency" - ].items(): - edge_repl = extract_name_from_uri_or_curie( - v_curie - ) - - if edge_repl == u: - - try: - schema_graph_nx.remove_edges_from( - [u, v, "requiresDependency"] - ) - except TypeError: - pass - - deps = replace_data["requiresDependency"] - if type(deps) == list: - for _dep in deps: - target_node = extract_name_from_uri_or_curie( - _dep["@id"] - ) - - if target_node != replace_node: - - # make not of the fact that edges being added here are out-edges - schema_graph_nx.add_edge( - replace_node, - target_node, - key="requiresDependency", - ) - elif type(deps) == dict: - for _k_dep, _v_dep in deps.items(): - target_node = extract_name_from_uri_or_curie(_v_dep) - - if target_node != replace_node: - - # make not of the fact that edges being added here are out-edges - schema_graph_nx.add_edge( - replace_node, - target_node, - key="requiresDependency", - ) - - schema_graph_nx.nodes[node][ - "requiresDependency" - ] = node_to_replace.nodes[replace_node]["requiresDependency"] - - # "requiresComponent" key related edge manipulation - if "requiresComponent" in replace_data: - - if "requiresComponent" in data: - for (u, v) in list(schema_graph_nx.out_edges([node])): - # there are certain nodes which have "requiresComponent" data in list format - if type(data["requiresComponent"]) == list: - for _edges_to_replace in data["requiresComponent"]: - edge_repl = extract_name_from_uri_or_curie( - _edges_to_replace["@id"] - ) - - if edge_repl == v: - - try: - schema_graph_nx.remove_edges_from( - [u, v, "requiresComponent"] - ) - except TypeError: - pass - - elif type(data["requiresComponent"]) == dict: - for k_id, v_curie in data[ - "requiresComponent" - ].items(): - edge_repl = extract_name_from_uri_or_curie( - v_curie - ) - - if edge_repl == v: - - try: - schema_graph_nx.remove_edges_from( - [u, v, "requiresComponent"] - ) - except TypeError: - pass - - comps = replace_data["requiresComponent"] - if type(comps) == list: - for _comp in comps: - target_node = extract_name_from_uri_or_curie( - _comp["@id"] - ) - - if target_node != replace_node: - schema_graph_nx.add_edge( - replace_node, - target_node, - key="requiresComponent", - ) - elif type(comps) == dict: - for _k_comp, _v_comp in deps.items(): - target_node = extract_name_from_uri_or_curie(_v_comp) - - if target_node != replace_node: - - # make not of the fact that edges being added here are out-edges - schema_graph_nx.add_edge( - replace_node, - target_node, - key="requiresDependency", - ) - - schema_graph_nx.nodes[node][ - "requiresComponent" - ] = node_to_replace.nodes[replace_node]["requiresComponent"] - - # "rangeIncludes" key related edge manipulation - if "rangeIncludes" in replace_data: - - if "rangeIncludes" in data: - for (u, v) in list(schema_graph_nx.out_edges([node])): - # there are certain nodes which have "rangeIncludes" data in list format - if type(data["rangeIncludes"]) == list: - for _edges_to_replace in data["rangeIncludes"]: - edge_repl = extract_name_from_uri_or_curie( - _edges_to_replace["@id"] - ) - - if edge_repl == v: - try: - schema_graph_nx.remove_edges_from( - [u, v, "rangeIncludes"] - ) - except TypeError: - pass - - elif type(data["rangeIncludes"]) == dict: - for k_id, v_curie in data["rangeIncludes"].items(): - edge_repl = extract_name_from_uri_or_curie( - v_curie - ) - - if edge_repl == v: - try: - schema_graph_nx.remove_edges_from( - [u, v, "rangeIncludes"] - ) - except TypeError: - pass - - range_inc = replace_data["rangeIncludes"] - if type(range_inc) == list: - for _rinc in range_inc: - target_node = extract_name_from_uri_or_curie( - _rinc["@id"] - ) - - if target_node != replace_node: - schema_graph_nx.add_edge( - replace_node, target_node, key="rangeValue" - ) - elif type(range_inc) == dict: - for _k_rinc, _v_rinc in deps.items(): - target_node = extract_name_from_uri_or_curie(_v_rinc) - - if target_node != replace_node: - - # make not of the fact that edges being added here are out-edges - schema_graph_nx.add_edge( - replace_node, target_node, key="rangeValue" - ) - - schema_graph_nx.nodes[node][ - "rangeIncludes" - ] = node_to_replace.nodes[replace_node]["rangeIncludes"] - - # set the networkx schema graph to the the modified networkx schema - self.schema_nx = schema_graph_nx - - # print("Added node {} to the graph successfully.".format(schema_object["rdfs:label"])) - - # part of the code that replaces the modified class in the original JSON-LD schema (not in the data/ folder though) - for i, schema_class in enumerate(self.schema["@graph"]): - if schema_class["rdfs:label"] == schema_object["rdfs:label"]: - # validate_class_schema(schema_object) # validate that the class to be modified follows the structure for any generic class (node) - - self.schema["@graph"][i] = schema_object - break - - # version of update_class() method that directly acts on the networkx graph - def add_schema_object_nx(self, schema_object: dict, **kwargs: dict) -> None: - node = node_attrs_cleanup(schema_object) - - if "required" in node: - if "sms:true" == schema_object["sms:required"]: - node["required"] = True - else: - node["required"] = False - - if "sms:validationRules" in schema_object: - node["validationRules"] = schema_object["sms:validationRules"] - else: - node["validationRules"] = [] - - node["uri"] = schema_object["@id"] - node["description"] = schema_object["rdfs:comment"] - - # get the networkx graph associated with the SchemaExplorer object in its current state - schema_graph_nx = self.get_nx_schema() - - # add node to graph - schema_graph_nx.add_node(schema_object["rdfs:label"], **node) - - schema_graph_nx = relationship_edges(schema_graph_nx, schema_object, **kwargs) - - # set the networkx schema graph to the the modified networkx schema - self.schema_nx = schema_graph_nx - - # print("Edited node {} successfully.".format(schema_object["rdfs:label"])) - - # update the JSON-LD schema after modifying the networkx graph - # validate_class_schema(schema_object) - self.schema["@graph"].append(schema_object) - # validate_schema(self.schema) \ No newline at end of file diff --git a/schematic/schemas/generator.py b/schematic/schemas/generator.py deleted file mode 100644 index 8cb392470..000000000 --- a/schematic/schemas/generator.py +++ /dev/null @@ -1,707 +0,0 @@ -import gc -import os -import json -import logging -from typing import Any, Dict, Optional, Text, List - -import networkx as nx - -from schematic.schemas.explorer import SchemaExplorer -from schematic.utils.io_utils import load_json -from schematic.utils.cli_utils import query_dict -from schematic.utils.schema_utils import load_schema_into_networkx -from schematic.utils.validate_utils import validate_schema, rule_in_rule_list - - -logger = logging.getLogger(__name__) - - -class SchemaGenerator(object): - def __init__( - self, - path_to_json_ld: str = None, - schema_explorer: SchemaExplorer = None, - requires_dependency_relationship: str = "requiresDependency", # optional parameter(s) with default value - requires_range: str = "rangeIncludes", - range_value_relationship: str = "rangeValue", - requires_component_relationship: str = "requiresComponent", - ) -> None: - """Create / Initialize object of type SchemaGenerator(). - - Methods / utilities that are part of this module can be used to generate JSON validation schemas for different schema.org - specification models. - - Args: - path_to_json_ld: Path to the JSON-LD file that is representing the schema.org data model that we want to validate. - schema_explorer: SchemaExplorer instance containing the schema.org data model that we want to validate. - requires_dependency_relationship: Edge relationship between two nodes indicating that they are dependent on each other. - requires_range: A node propertly indicating that a term can assume a value equal to any of the terms that are in the current term's range. - range_value_relationship: Edge relationship that indicates a term / node that another node depends on, is part of the other node's range. - requires_component_relationship: A node property indicating that this node requires a component for its full characterization. - - Returns: - None - """ - - self.jsonld_path = path_to_json_ld - - if schema_explorer is None: - - assert ( - self.jsonld_path is not None - ), "You must provide either `path_to_json_ld` or `schema_explorer`." - - self.jsonld_path_root, jsonld_ext = os.path.splitext(self.jsonld_path) - - assert jsonld_ext == ".jsonld", ( - "Please make sure the 'path_to_json_ld' parameter " - "is pointing to a valid JSON-LD file." - ) - - # create an instance of SchemaExplorer - self.se = SchemaExplorer() - - # convert the JSON-LD data model to networkx object - self.se.load_schema(self.jsonld_path) - - else: - - # Confirm that given SchemaExplorer instance is valid - assert ( - getattr(schema_explorer, "schema") is not None - and getattr(schema_explorer, "schema_nx") is not None - ), ( - "SchemaExplorer instance given to `schema_explorer` argument " - "does not have both the `schema` and `schema_nx` attributes." - ) - - # User given instance of SchemaExplorer - self.se = schema_explorer - - # custom value(s) of following relationship attributes are passed during initialization - self.requires_dependency_relationship = requires_dependency_relationship - self.requires_range = requires_range - self.range_value_relationship = range_value_relationship - self.requires_component_relationship = requires_component_relationship - - def get_edges_by_relationship(self, node: str, relationship: str) -> List[str]: - """ - See class definition in SchemaExplorer - TODO: possibly remove this wrapper and refactor downstream code to call from SchemaExplorer - """ - - return self.se.get_edges_by_relationship(node, relationship) - - def get_adjacent_nodes_by_relationship( - self, node: str, relationship: str - ) -> List[str]: - - """ - See class definition in SchemaExplorer - TODO: possibly remove this wrapper and refactor downstream code to call from SchemaExplorer - """ - - return self.se.get_adjacent_nodes_by_relationship(node, relationship) - - def get_subgraph_by_edge_type( - self, graph: nx.MultiDiGraph, relationship: str - ) -> nx.DiGraph: - """Get a subgraph containing all edges of a given type (aka relationship). - TODO: possibly move method to SchemaExplorer and refactor downstream code to call from SchemaExplorer - - Args: - graph: input multi digraph (aka hypergraph) - relationship: edge / link relationship type with possible values same as in above docs. - - Returns: - Directed graph on edges of a particular type (aka relationship) - """ - - # prune the metadata model graph so as to include only those edges that match the relationship type - rel_edges = [] - for (u, v, key, c) in graph.out_edges(data=True, keys=True): - if key == relationship: - rel_edges.append((u, v)) - - relationship_subgraph = nx.DiGraph() - relationship_subgraph.add_edges_from(rel_edges) - - return relationship_subgraph - - def get_descendants_by_edge_type( - self, - source_node: str, - relationship: str, - connected: bool = True, - ordered: bool = False, - ) -> List[str]: - - """ - See class definition in SchemaExplorer - TODO: possibly remove this wrapper and refactor downstream code to call from SchemaExplorer - """ - - return self.se.get_descendants_by_edge_type( - source_node, relationship, connected, ordered - ) - - def get_component_requirements(self, source_component: str) -> List[str]: - """Get all components that are associated with a given source component and are required by it. - - Args: - source_component: source component for which we need to find all required downstream components. - - Returns: - List of nodes that are descendants from the source component are are related to the source through a specific component relationship. - """ - - req_components = list( - reversed( - self.get_descendants_by_edge_type( - source_component, self.requires_component_relationship, ordered=True - ) - ) - ) - - return req_components - - def get_component_requirements_graph(self, source_component: str) -> nx.DiGraph: - """Get all components that are associated with a given source component and are required by it; return the components as a dependency graph (i.e. a DAG). - - Args: - source_component: source component for which we need to find all required downstream components. - - Returns: - A subgraph of the schema graph induced on nodes that are descendants from the source component and are related to the source through a specific component relationship. - """ - - # get a list of required component nodes - req_components = self.get_component_requirements(source_component) - - # get the schema graph - mm_graph = self.se.get_nx_schema() - - # get the subgraph induced on required component nodes - req_components_graph = self.get_subgraph_by_edge_type( - mm_graph, self.requires_component_relationship - ).subgraph(req_components) - - return req_components_graph - - def get_node_dependencies( - self, source_node: str, display_names: bool = True, schema_ordered: bool = True - ) -> List[str]: - """Get the immediate dependencies that are related to a given source node. - - Args: - source_node: The node whose dependencies we need to compute. - display_names: if True, return list of display names of each of the dependencies. - if False, return list of node labels of each of the dependencies. - schema_ordered: if True, return the dependencies of the node following the order of the schema (slower). - if False, return dependencies from graph without guaranteeing schema order (faster) - - Returns: - List of nodes that are dependent on the source node. - """ - mm_graph = self.se.get_nx_schema() - - if schema_ordered: - # get dependencies in the same order in which they are defined in the schema - required_dependencies = self.se.explore_class(source_node)["dependencies"] - else: - required_dependencies = self.get_adjacent_nodes_by_relationship( - source_node, self.requires_dependency_relationship - ) - - if display_names: - # get display names of dependencies - dependencies_display_names = [] - - for req in required_dependencies: - dependencies_display_names.append(mm_graph.nodes[req]["displayName"]) - - return dependencies_display_names - - return required_dependencies - - def get_node_range(self, node_label: str, display_names: bool = True) -> List[str]: - """Get the range, i.e., all the valid values that are associated with a node label. - - Args: - node_label: Node / termn for which you need to retrieve the range. - - Returns: - List of display names of nodes associateed with the given node. - """ - mm_graph = self.se.get_nx_schema() - - try: - # get node range in the order defined in schema for given node - required_range = self.se.explore_class(node_label)["range"] - except KeyError: - raise ValueError( - f"The source node {node_label} does not exist in the graph. " - "Please use a different node." - ) - - if display_names: - # get the display name(s) of all dependencies - dependencies_display_names = [] - - for req in required_range: - dependencies_display_names.append(mm_graph.nodes[req]["displayName"]) - - return dependencies_display_names - - return required_range - - def get_node_label(self, node_display_name: str) -> str: - """Get the node label for a given display name. - - Args: - node_display_name: Display name of the node which you want to get the label for. - - Returns: - Node label associated with given node. - - Raises: - KeyError: If the node cannot be found in the graph. - """ - mm_graph = self.se.get_nx_schema() - - node_class_label = self.se.get_class_label_from_display_name(node_display_name) - node_property_label = self.se.get_property_label_from_display_name( - node_display_name - ) - - if node_class_label in mm_graph.nodes: - node_label = node_class_label - elif node_property_label in mm_graph.nodes: - node_label = node_property_label - else: - node_label = "" - - return node_label - - def get_node_definition(self, node_display_name: str) -> str: - """Get the node definition, i.e., the "comment" associated with a given node display name. - - Args: - node_display_name: Display name of the node which you want to get the label for. - - Returns: - Comment associated with node, as a string. - """ - node_label = self.get_node_label(node_display_name) - - if not node_label: - return "" - - mm_graph = self.se.get_nx_schema() - node_definition = mm_graph.nodes[node_label]["comment"] - - return node_definition - - def get_node_validation_rules(self, node_display_name: str) -> str: - """Get validation rules associated with a node, - - Args: - node_display_name: Display name of the node which you want to get the label for. - - Returns: - A set of validation rules associated with node, as a list. - """ - node_label = self.get_node_label(node_display_name) - - if not node_label: - return [] - - mm_graph = self.se.get_nx_schema() - node_validation_rules = mm_graph.nodes[node_label]["validationRules"] - - return node_validation_rules - - def is_node_required(self, node_display_name: str) -> bool: - """Check if a given node is required or not. - - Note: The possible options that a node can be associated with -- "required" / "optional". - - Args: - node_display_name: Display name of the node which you want to get the label for. - - Returns: - True: If the given node is a "required" node. - False: If the given node is not a "required" (i.e., an "optional") node. - """ - node_label = self.get_node_label(node_display_name) - - mm_graph = self.se.get_nx_schema() - node_required = mm_graph.nodes[node_label]["required"] - - return node_required - - def get_nodes_display_names( - self, node_list: List[str], mm_graph: nx.MultiDiGraph - ) -> List[str]: - """Get display names associated with the given list of nodes. - - Args: - node_list: List of nodes whose display names we need to retrieve. - - Returns: - List of display names. - """ - node_list_display_names = [ - mm_graph.nodes[node]["displayName"] for node in node_list - ] - - return node_list_display_names - - def get_range_schema( - self, node_range: List[str], node_name: str, blank=False - ) -> Dict[str, Dict[str, List[str]]]: - """Add a list of nodes to the "enum" key in a given JSON schema object. - - Args: - node_name: Name of the "main" / "head" key in the JSON schema / object. - node_range: List of nodes to be added to the JSON object. - blank: If True, add empty node to end of node list. - If False, do not add empty node to end of node list. - - Returns: - JSON object with nodes. - """ - if blank: - schema_node_range = {node_name: {"enum": node_range + [""]}} - else: - schema_node_range = {node_name: {"enum": node_range}} - - return schema_node_range - - def get_array_schema( - self, node_range: List[str], node_name: str, blank=False - ) -> Dict[str, Dict[str, List[str]]]: - """Add a list of nodes to the "enum" key in a given JSON schema object. - Allow a node to be mapped to any subset of the list - - Args: - node_name: Name of the "main" / "head" key in the JSON schema / object. - node_range: List of nodes to be added to the JSON object. - blank: If True, add empty node to end of node list. - If False, do not add empty node to end of node list. - - Returns: - JSON object with array validation rule. - """ - - schema_node_range_array = { - node_name: { - "type": "array", - "items": {"enum": node_range + [""] if blank else node_range}, - "maxItems": len(node_range), - } - } - - return schema_node_range_array - - def get_non_blank_schema( - self, node_name: str - ) -> Dict: # can't define heterogenous Dict generic types - """Get a schema rule that does not allow null or empty values. - - Args: - node_name: Name of the node on which the schema rule is to be applied. - - Returns: - Schema rule as a JSON object. - """ - non_blank_schema = {node_name: {"not": {"type": "null"}, "minLength": 1}} - - return non_blank_schema - - def is_required(self, node_name: str, mm_graph: nx.MultiDiGraph) -> bool: - """ - Check if a node is required - - Args: - node_name: Name of the node on which the check is to be applied. - - Returns: - Boolean value indicating if the node is required or not. - True: yes, it is required. - False: no, it is not required. - """ - return mm_graph.nodes[node_name]["required"] - - def get_json_schema_requirements(self, source_node: str, schema_name: str) -> Dict: - """Consolidated method that aims to gather dependencies and value constraints across terms / nodes in a schema.org schema and store them in a jsonschema /JSON Schema schema. - - It does so for any given node in the schema.org schema (recursively) using the given node as starting point in the following manner: - 1) Find all the nodes / terms this node depends on (which are required as "additional metadata" given this node is "required"). - 2) Find all the allowable metadata values / nodes that can be assigned to a particular node (if such a constraint is specified on the schema). - - Args: - source_node: Node from which we can start recursive dependancy traversal (as mentioned above). - schema_name: Name assigned to JSON-LD schema (to uniquely identify it via URI when it is hosted on the Internet). - - Returns: - JSON Schema as a dictionary. - """ - json_schema = { - "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "http://example.com/" + schema_name, - "title": schema_name, - "type": "object", - "properties": {}, - "required": [], - "allOf": [], - } - - # get graph corresponding to data model schema - mm_graph = self.se.get_nx_schema() - - nodes_to_process = ( - [] - ) # list of nodes to be checked for dependencies, starting with the source node - processed_nodes = ( - [] - ) # keep of track of nodes whose dependencies have been processed - reverse_dependencies = ( - {} - ) # maintain a map between conditional nodes and their dependencies (reversed) -- {dependency : conditional_node} - range_domain_map = ( - {} - ) # maintain a map between range nodes and their domain nodes {range_value : domain_value} - # the domain node is very likely the parentof ("parentOf" relationship) of the range node - - root_dependencies = self.get_adjacent_nodes_by_relationship( - source_node, self.requires_dependency_relationship - ) - - # if root_dependencies is empty it means that a class with name 'source_node' exists - # in the schema, but it is not a valid component - if not root_dependencies: - raise ValueError(f"'{source_node}' is not a valid component in the schema.") - - nodes_to_process += root_dependencies - - process_node = nodes_to_process.pop(0) - - while process_node: - - if not process_node in processed_nodes: - # node is being processed - node_is_processed = True - - node_range = self.get_adjacent_nodes_by_relationship( - process_node, self.range_value_relationship - ) - - # get node range display name - node_range_d = self.get_nodes_display_names(node_range, mm_graph) - - node_dependencies = self.get_adjacent_nodes_by_relationship( - process_node, self.requires_dependency_relationship - ) - - # get process node display name - node_display_name = mm_graph.nodes[process_node]["displayName"] - - # updating map between node and node's valid values - for n in node_range_d: - if not n in range_domain_map: - range_domain_map[n] = [] - range_domain_map[n].append(node_display_name) - - # can this node be map to the empty set (if required no; if not required yes) - # TODO: change "required" to different term, required may be a bit misleading (i.e. is the node required in the schema) - node_required = self.is_required(process_node, mm_graph) - - # get any additional validation rules associated with this node (e.g. can this node be mapped to a list of other nodes) - node_validation_rules = self.get_node_validation_rules( - node_display_name - ) - - if node_display_name in reverse_dependencies: - # if node has conditionals set schema properties and conditional dependencies - # set schema properties - if node_range: - # if process node has valid value range set it in schema properties - schema_valid_vals = self.get_range_schema( - node_range_d, node_display_name, blank=True - ) - - if node_validation_rules: - # if this node has extra validation rules process them - # TODO: abstract this into its own validation rule constructor/generator module/class - if rule_in_rule_list("list", node_validation_rules): - # if this node can be mapped to a list of nodes - # set its schema accordingly - schema_valid_vals = self.get_array_schema( - node_range_d, node_display_name, blank=True - ) - - else: - # otherwise, by default allow any values - schema_valid_vals = {node_display_name: {}} - - json_schema["properties"].update(schema_valid_vals) - - # set schema conditional dependencies - for node in reverse_dependencies[node_display_name]: - # set all of the conditional nodes that require this process node - - # get node domain if any - # ow this node is a conditional requirement - if node in range_domain_map: - domain_nodes = range_domain_map[node] - conditional_properties = {} - - for domain_node in domain_nodes: - - # set range of conditional node schema - conditional_properties.update( - { - "properties": {domain_node: {"enum": [node]}}, - "required": [domain_node], - } - ) - - # given node conditional are satisfied, this process node (which is dependent on these conditionals) has to be set or not depending on whether it is required - if node_range: - dependency_properties = self.get_range_schema( - node_range_d, - node_display_name, - blank=not node_required, - ) - - if node_validation_rules: - if rule_in_rule_list("list", node_validation_rules): - # TODO: get_range_schema and get_range_schema have similar behavior - combine in one module - dependency_properties = self.get_array_schema( - node_range_d, - node_display_name, - blank=not node_required, - ) - - else: - if node_required: - dependency_properties = self.get_non_blank_schema( - node_display_name - ) - else: - dependency_properties = {node_display_name: {}} - schema_conditional_dependencies = { - "if": conditional_properties, - "then": { - "properties": dependency_properties, - "required": [node_display_name], - }, - } - - # update conditional-dependency rules in json schema - json_schema["allOf"].append( - schema_conditional_dependencies - ) - - else: - # node doesn't have conditionals - if node_required: - if node_range: - schema_valid_vals = self.get_range_schema( - node_range_d, node_display_name, blank=False - ) - - if node_validation_rules: - # If there are valid values AND they are expected to be a list, - # reformat the Valid Values. - if rule_in_rule_list("list", node_validation_rules): - schema_valid_vals = self.get_array_schema( - node_range_d, node_display_name, blank=False - ) - else: - schema_valid_vals = self.get_non_blank_schema( - node_display_name - ) - - json_schema["properties"].update(schema_valid_vals) - # add node to required fields - json_schema["required"] += [node_display_name] - - elif process_node in root_dependencies: - # node doesn't have conditionals and is not required; it belongs in the schema only if it is in root's dependencies - - if node_range: - schema_valid_vals = self.get_range_schema( - node_range_d, node_display_name, blank=True - ) - - if node_validation_rules: - if rule_in_rule_list("list", node_validation_rules): - schema_valid_vals = self.get_array_schema( - node_range_d, node_display_name, blank=True - ) - - else: - schema_valid_vals = {node_display_name: {}} - - json_schema["properties"].update(schema_valid_vals) - - else: - # node doesn't have conditionals and it is not required and it is not a root dependency - # the node doesn't belong in the schema - # do not add to processed nodes since its conditional may be traversed at a later iteration (though unlikely for most schemas we consider) - node_is_processed = False - - # add process node as a conditional to its dependencies - node_dependencies_d = self.get_nodes_display_names( - node_dependencies, mm_graph - ) - - for dep in node_dependencies_d: - if not dep in reverse_dependencies: - reverse_dependencies[dep] = [] - - reverse_dependencies[dep].append(node_display_name) - - # add nodes found as dependencies and range of this processed node - # to the list of nodes to be processed - nodes_to_process += node_range - nodes_to_process += node_dependencies - - # if the node is processed add it to the processed nodes set - if node_is_processed: - processed_nodes.append(process_node) - - # if the list of nodes to process is not empty - # set the process node the next remaining node to process - if nodes_to_process: - process_node = nodes_to_process.pop(0) - else: - # no more nodes to process - # exit the loop - break - - logger.info("JSON schema successfully generated from schema.org schema!") - - # if no conditional dependencies were added we can't have an empty 'AllOf' block in the schema, so remove it - if not json_schema["allOf"]: - del json_schema["allOf"] - - # If no config value and SchemaGenerator was initialized with - # a JSON-LD path, construct - if self.jsonld_path is not None: - prefix = self.jsonld_path_root - prefix_root, prefix_ext = os.path.splitext(prefix) - if prefix_ext == ".model": - prefix = prefix_root - json_schema_log_file = f"{prefix}.{source_node}.schema.json" - - logger.info( - "The JSON schema file can be inspected by setting the following " - "nested key in the configuration: (model > input > log_location)." - ) - - logger.info(f"JSON schema file log stored as {json_schema_log_file}") - - return json_schema From 7fd5dfcb1bfe54b44d8552f981179292a1192cae Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Sun, 27 Aug 2023 16:01:19 -0700 Subject: [PATCH 045/239] change the name of the old validator.py to json_schema_validator.py to be more clear as to the function --- schematic/schemas/{validator.py => json_schema_validator.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename schematic/schemas/{validator.py => json_schema_validator.py} (100%) diff --git a/schematic/schemas/validator.py b/schematic/schemas/json_schema_validator.py similarity index 100% rename from schematic/schemas/validator.py rename to schematic/schemas/json_schema_validator.py From 3161710b8ea24c0374d253da4ed0a4a2f46bcef4 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 28 Aug 2023 09:44:15 -0700 Subject: [PATCH 046/239] updatate inline docstrings in manifest/generator.py --- schematic/manifest/generator.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index 0c883ed49..ec7dc614e 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -82,7 +82,7 @@ def __init__( "when there is no manifest file for the dataset in question." ) - # Data Model Explorer object + # Instantiate Data Model Explorer object self.DME = DataModelGraphExplorer(self.graph) # additional metadata to add to manifest @@ -363,10 +363,7 @@ def _get_json_schema(self, json_schema_filepath: str) -> Dict: TODO: Do we even allow people to provide a json_schema_filepath anyore? """ if not json_schema_filepath: - # if no json schema is provided; there must be - # schema explorer defined for schema.org schema - # o.w. this will throw an error - # TODO: catch error + # TODO Catch error if no JSONLD or JSON path provided. data_model_js = DataModelJSONSchema(jsonld_path=self.jsonld_path, graph=self.graph) json_schema = data_model_js.get_json_validation_schema(source_node=self.root, schema_name=self.title) else: From c60b0f01676d3fdcee7d5563c929c9b6fb23e404 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 6 Sep 2023 12:26:47 -0700 Subject: [PATCH 047/239] clean/document data_model_relationships --- schematic/schemas/data_model_relationships.py | 81 +++++++++---------- 1 file changed, 36 insertions(+), 45 deletions(-) diff --git a/schematic/schemas/data_model_relationships.py b/schematic/schemas/data_model_relationships.py index 4bf18f360..5f593e77e 100644 --- a/schematic/schemas/data_model_relationships.py +++ b/schematic/schemas/data_model_relationships.py @@ -5,53 +5,42 @@ class DataModelRelationships(): def __init__(self) -> None: self.relationships_dictionary = self.define_data_model_relationships() - #self.delimiters = ['@', ':'] - return def define_data_model_relationships(self) -> Dict: - """ Define the relationships in the model so they can be accessed in a central location. - If adding anew relationship make sure to follow the conventions closely. - key:{ - jsonld_key:, - - csv_header: - jsonld_default: if at the end of processing there is no value present, this is the value we want to fill. - can also fill with type to ensure the key does not get deleted. + """ Define the relationships and their attributes so they can be accessed through other classes. + The key is how it the relationship will be referenced througout Schematic. + Note: Though we could use other keys to determine which keys define nodes and edges, + edge_rel is used as an explicit definition, for easier code readablity. + key: + jsonld_key: Name for relationship in the JSONLD. + Include in all sub-dictionaries. + csv_header: Str, name for this relationshp in the CSV data model. + Enter None if not part of the CSV data model. + node_label: Name for relationship in the graph representation of the data model. + Do not include this key for edge relationships. + type: type, type of expected to be read into graph creation. edge_rel: True, if this relationship defines an edge - False, if is a value relationship + False, if is a value relationship + Include in all sub-dictionaries. required_header: True, if relationship header is required for the csv - node_dict: set default values for this relationship + jsonld_default: Defines default values to fill for JSONLD generation. + Used during func DataModelJsonLD.clean_template(), to fill value with a default, if not supplied in the data model. + node_attr_dict: This is used to add information to nodes in the model. Only include for nodes not edges. + set default values for this relationship key is the node relationship name, value is the default value. If want to set default as a function create a nested dictionary. {'default': default_function, 'standard': alternative function to call if relationship is present for a node} - } If adding new functions to node_dict will + } + If adding new functions to node_dict will need to modify data_model_nodes.generate_node_dict in - } - TODO: - Key: - jsonld_key: get_json_key_from_context - csv_header: - jsonld_default: if at the end of processing there is no - edge_rel: - required_header: - node_label: - node_attr_dict: + edge_dir: str, 'in'/'out' is the edge an in or out edge. Define for edge relationships + jsonld_dir: str, 'in'/out is the direction in or out in the JSONLD. - TODO: - - Functionally implement jsonld_edge key - - Add JSONLD Directionality: - Default Forward: - Reverse Domain Includes - - Add edge directionality: - Default in. - Out domainIncludes. - TODO: - - Use class inheritance to set up - - Check 'subClassOf' edge_dir + TODO: + - Use class inheritance to set up """ map_data_model_relationships = { - 'displayName': { 'jsonld_key': 'sms:displayName', 'csv_header': 'Attribute', @@ -118,6 +107,7 @@ def define_data_model_relationships(self) -> Dict: 'csv_header': 'Required', 'node_label': 'required', 'type': bool, + 'jsonld_default': 'sms:false', 'edge_rel': False, 'required_header': True, 'node_attr_dict':{'default': False, @@ -185,6 +175,10 @@ def define_data_model_relationships(self) -> Dict: return map_data_model_relationships def define_required_csv_headers(self): + """Helper function to retrieve required CSV headers, alert if required header was not provided. + Returns: + required_headers: lst, Required CSV headers. + """ required_headers = [] for k, v in self.relationships_dictionary.items(): try: @@ -196,6 +190,10 @@ def define_required_csv_headers(self): return required_headers def define_edge_relationships(self): + """Helper function to retrieve CSV headers for edge relationships. + Returns: + edge_relationships: dict, key: csv_header if the key represents an edge relationship. + """ edge_relationships = {} for k, v in self.relationships_dictionary.items(): try: @@ -207,22 +205,15 @@ def define_edge_relationships(self): return edge_relationships def define_value_relationships(self): - """ - Think about changing outputs. - Change to node_relationships. - Use node_label to pull info. Save node_label instead? + """Helper function to retrieve CSV headers for non-edge (value) relationships. + Returns: + edge_relationships: dict, key: csv_header if the key represents a value relationship. """ value_relationships = {} for k, v in self.relationships_dictionary.items(): try: if not v['edge_rel']: value_relationships.update({k:v['csv_header']}) - ''' - if ':' in v['jsonld_key']: - value_relationships.update({k:v['jsonld_key'].split(':')[1]}) - elif '@' in v['jsonld_key']: - value_relationships.update({k:v['jsonld_key'].split('@')[1]}) - ''' except KeyError: print(f"Did not provide a 'edge_rel' for key {k}") From 95249e8a3cd4cf747a79edb29502b4c904c81d00 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Fri, 8 Sep 2023 16:06:30 -0700 Subject: [PATCH 048/239] clean/document schemas/commands.py --- schematic/schemas/commands.py | 28 ++++++++++++++++------------ 1 file changed, 16 insertions(+), 12 deletions(-) diff --git a/schematic/schemas/commands.py b/schematic/schemas/commands.py index a9f0198b3..bd400ed4b 100644 --- a/schematic/schemas/commands.py +++ b/schematic/schemas/commands.py @@ -40,32 +40,26 @@ def schema(): # use as `schematic model ...` @click.argument( "schema", type=click.Path(exists=True), metavar="", nargs=1 ) -@click.option( - "--base_schema", - "-b", - type=click.Path(exists=True), - metavar="", - help=query_dict(schema_commands, ("schema", "convert", "base_schema")), -) + @click.option( "--output_jsonld", "-o", metavar="", help=query_dict(schema_commands, ("schema", "convert", "output_jsonld")), ) -def convert(schema, base_schema, output_jsonld): +def convert(schema, output_jsonld): """ Running CLI to convert data model specification in CSV format to data model in JSON-LD format. - TODO: Throw actual errors in the future rather than just logging. + Note: Currently, not configured to build off of base model, so removing --base_schema argument for now """ # get the start time st = time.time() # Instantiate Parser - data_model_parser = DataModelParser(schema, base_schema) + data_model_parser = DataModelParser(schema) #Parse Model logger.info("Parsing data model.") @@ -82,7 +76,7 @@ def convert(schema, base_schema, output_jsonld): # Validate generated data model. logger.info("Validating the data model internally.") data_model_validator = DataModelValidator(graph=graph_data_model) - data_model_errors = data_model_validator.run_checks() + data_model_errors, data_model_warnings = data_model_validator.run_checks() # If there are errors log them. if data_model_errors: @@ -92,11 +86,21 @@ def convert(schema, base_schema, output_jsonld): elif isinstance(err, list): for e in err: logger.error(e) + + # If there are warnings log them. + if data_model_warnings: + for war in data_model_warnings: + if isinstance(war, str): + logger.warning(war) + elif isinstance(war, list): + for w in war: + logger.warning(w) + logger.info("Converting data model to JSON-LD") jsonld_data_model = convert_graph_to_jsonld(Graph=graph_data_model) - # output JSON-LD file alongside CSV file by default + # output JSON-LD file alongside CSV file by default, get path. if output_jsonld is None: csv_no_ext = re.sub("[.]csv$", "", schema) output_jsonld = csv_no_ext + ".jsonld" From 86b5d8aea0a484a46d71d4ba5e22aec97e90504f Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Fri, 8 Sep 2023 16:07:00 -0700 Subject: [PATCH 049/239] clean document schemas/data_model_edges --- schematic/schemas/data_model_edges.py | 50 +++++++++++++++++---------- 1 file changed, 31 insertions(+), 19 deletions(-) diff --git a/schematic/schemas/data_model_edges.py b/schematic/schemas/data_model_edges.py index 351d2316a..5cdcaf9b4 100644 --- a/schematic/schemas/data_model_edges.py +++ b/schematic/schemas/data_model_edges.py @@ -1,3 +1,5 @@ +import networkx as nx + from schematic.schemas.data_model_relationships import ( DataModelRelationships ) @@ -7,47 +9,57 @@ def __init__(self): self.dmr = DataModelRelationships() self.data_model_relationships = self.dmr.relationships_dictionary - def generate_edge(self, G, node, all_node_dict, data_model, edge_relationships): - """ + def generate_edge(self, G: nx.MultiDiGraph, node: str, all_node_dict: dict, attr_rel_dict: dict, edge_relationships: dict) -> nx.MultiDiGraph: + """Generate an edge between a target node and relevant other nodes the data model Args: + G, nx.MultiDiGraph: networkx graph representation of the data model, that is in the process of being fully built. + node, str: target node to look for connecting edges + all_node_dict, dict: a dictionary containing information about all nodes in the model + key: node display name + value: node attribute dict, containing attributes to attach to each node. + attr_rel_dict, dict: + {Attribute Display Name: { + Relationships: { + CSV Header: Value}}} + edge_relationships: dict, key: csv_header if the key represents a value relationship. Returns: + G, nx.MultiDiGraph: networkx graph representation of the data model, that has had new edges attached. """ # For each attribute in the model. - for attribute_display_name, relationship in data_model.items(): - # Get the relationships for the current attribure + for attribute_display_name, relationship in attr_rel_dict.items(): + # Get the relationships associated with the current attribute relationships = relationship['Relationships'] - # For each edge relationship + # Add edge relationships one at a time for key, csv_header in edge_relationships.items(): - # For a given relationship in the model + # If the attribute has a relationship that matches the current edge being added if csv_header in relationships.keys(): # If the current node is part of that relationship and is not the current node # Connect node to attribute as an edge. if node in relationships[csv_header] and node != attribute_display_name: - # Find position of node in the list, this is the weight - # This will help us ensure things like valid values, or depends on are preserved in the proper order. - - # TODO: Move adding weights to its own helper. - # TODO: create a new attribute in the rel dictionary looking for directionality. Save as out for domainIncludes, save as in for others. + # Generate weights based on relationship type. + # Weights will allow us to preserve the order of entries order in the data model in later steps. if key == 'domainIncludes': - # Get weight from the order of the attributes. - weight = list(data_model.keys()).index(attribute_display_name) + # For 'domainIncludes'/properties relationship, users do not explicitly provide a list order (like for valid values, or dependsOn) + # so we pull the order/weight from the order of the attributes. + weight = list(attr_rel_dict.keys()).index(attribute_display_name) elif type(relationships[csv_header]) == list: + # For other relationships that pull in lists of values, we can explicilty pull the weight by their order in the provided list weight = relationships[csv_header].index(node) else: + # For single (non list) entries, add weight of 0 weight = 0 - # Here the first added node to the edge is the value that would be the valid value to the second node which is the attribute. + # Get the edge_key for the edge relationship we are adding at this step edge_key = self.data_model_relationships[key]['edge_key'] + + # Add edges, in a manner that preserves directionality + # TODO: rewrite to use edge_dir if key in ['subClassOf', 'domainIncludes']: G.add_edge(all_node_dict[node]['label'], all_node_dict[attribute_display_name]['label'], key=edge_key, weight=weight) else: G.add_edge(all_node_dict[attribute_display_name]['label'], all_node_dict[node]['label'], key=edge_key, weight=weight) - # Add additional valid value edges + # Add add rangeIncludes/valid value relationships in reverse as well, making the attribute the parent of the valid value. if key == 'rangeIncludes': - # Add this relationships for classes. G.add_edge(all_node_dict[attribute_display_name]['label'], all_node_dict[node]['label'], key='parentOf', weight=weight) return G - - def edit_edge(): - return \ No newline at end of file From 4a1c519a48a98f9f47673a0079ab07a4144524a0 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 11 Sep 2023 14:09:59 -0700 Subject: [PATCH 050/239] clean/document data_model_graph.py --- schematic/schemas/data_model_graph.py | 265 ++++++++++++++++---------- 1 file changed, 165 insertions(+), 100 deletions(-) diff --git a/schematic/schemas/data_model_graph.py b/schematic/schemas/data_model_graph.py index 42839670b..d0bc19dcf 100644 --- a/schematic/schemas/data_model_graph.py +++ b/schematic/schemas/data_model_graph.py @@ -1,3 +1,4 @@ +import graphviz import os import string import json @@ -61,16 +62,22 @@ def __call__(cls, *args, **kwargs): class DataModelGraph(): ''' Generate graph network (networkx) from the attributes and relationships returned - fromt he data model parser. + fromt the data model parser. Create a singleton. ''' __metaclass__ = DataModelGraphMeta - def __init__(self, attribute_relationships_dict): + def __init__(self, attribute_relationships_dict: dict) -> None: '''Load parsed data model. + Args: + attributes_relationship_dict, dict: generated in data_model_parser + {Attribute Display Name: { + Relationships: { + CSV Header: Value}}} + Raises: + ValueError, attribute_relationship_dict not loaded. ''' - self.attribute_relationships_dict = attribute_relationships_dict self.dmn = DataModelNodes(self.attribute_relationships_dict) self.dme = DataModelEdges() @@ -83,29 +90,34 @@ def __init__(self, attribute_relationships_dict): self.graph = self.generate_data_model_graph() - def generate_data_model_graph(self): + def generate_data_model_graph(self) -> nx.MultiDiGraph: '''Generate NetworkX Graph from the Relationships/attributes dictionary - + Returns: + G: nx.MultiDiGraph, networkx graph representation of the data model ''' # Get all relationships with edges edge_relationships = self.dmr.define_edge_relationships() + # Find all nodes + all_nodes = self.dmn.gather_all_nodes(attr_rel_dict=self.attribute_relationships_dict) + # Instantiate NetworkX MultiDigraph G = nx.MultiDiGraph() - - # Find all nodes - all_nodes = self.dmn.gather_all_nodes(self.attribute_relationships_dict) + all_node_dict = {} - ## Fill in MultiDigraph with nodes and edges + + ## Fill in MultiDigraph with nodes for node in all_nodes: - # Gather information for each node node_dict = self.dmn.generate_node_dict(node, self.attribute_relationships_dict) + # Add each node to the all_node_dict to be used for generating edges all_node_dict[node] = node_dict + # Generate node and attach information G = self.dmn.generate_node(G, node_dict) + ## Connect nodes via edges for node in all_nodes: # Generate edges G = self.dme.generate_edge(G, node, all_node_dict, self.attribute_relationships_dict, edge_relationships) @@ -114,62 +126,62 @@ def generate_data_model_graph(self): class DataModelGraphExplorer(): def __init__(self, G,): - ''' - Load data model graph as a singleton. + ''' Load data model graph as a singleton. + Args: + G: nx.MultiDiGraph, networkx graph representation of the data model ''' self.graph = G self.dmr = DataModelRelationships() self.rel_dict = self.dmr.relationships_dictionary - # TODO: Clean up to create variables within a loop. - # Creating variables here so its cleaner to know all the references at the top of the class - # Get node labels and edge keys for all referenced relationships - # Edge Keys - self.domainIncludes_ek = self.rel_dict['domainIncludes']['edge_key'] - self.reqComp_ek = self.rel_dict['requiresComponent']['edge_key'] - self.reqDep_ek = self.rel_dict['requiresDependency']['edge_key'] - self.subClassOf_ek = self.rel_dict['subClassOf']['edge_key'] - self.rangeIncludes_ek = self.rel_dict['rangeIncludes']['edge_key'] - - # Node Labels - self.displayName_nl = self.rel_dict['displayName']['node_label'] - self.comment_nl = self.rel_dict['comment']['node_label'] - self.validationRules_nl = self.rel_dict['validationRules']['node_label'] - - def find_properties(self): - """ + def find_properties(self) -> set: + """Identify all properties, as defined by the first node in a pair, connected with 'domainIncludes' edge type + Returns: + properties, set: All properties defined in the data model, each property name is defined by its label. """ - properties=[] for node_1, node_2, rel in self.graph.edges: - if rel == self.domainIncludes_ek: + if rel == self.rel_dict['domainIncludes']['edge_key']: properties.append(node_1) properties = set(properties) return properties - def find_classes(self): - #checked + def find_classes(self) -> set: + """Identify all classes, as defined but all nodes, minus all properties (which are explicitly defined) + Returns: + classes, set: All classes defined in the data model, each class name is defined by its label. + """ nodes = self.graph.nodes properties = self.find_properties() classes = nodes - properties return classes - def find_node_range(self, attribute): + def find_node_range(self, node_label:Optional[bool], node_display_name:Optional[bool]) -> list: + """Get valid values for the given node (attribute) + Args: + node_label, str, Optional[bool]: label of the node for which to retrieve valid values + node_display_name, str, Optional[bool]: Display Name of the node for which to retrieve valid values + Returns: + valid_values, list: List of valid values associated with the provided node. + """ + if not node_label: + node_label = self.get_node_label(display_name) + valid_values=[] for node_1, node_2, rel in self.graph.edges: - if node_1 == attribute and rel == self.rangeIncludes_ek: + if node_1 == node_label and rel == self.rel_dict['rangeIncludes']['edge_key']: valid_values.append(node_2) valid_values = list(set(valid_values)) return valid_values def get_adjacent_nodes_by_relationship(self, - node: str, + node_label: str, relationship: str) -> List[str]: """Get a list of nodes that is / are adjacent to a given node, based on a relationship type. Args: - node: the node whose edges we need to look at. + node_label: label of the the node whose edges we need to look at. relationship: the type of link(s) that the above node and its immediate neighbors share. Returns: @@ -178,7 +190,7 @@ def get_adjacent_nodes_by_relationship(self, """ nodes = set() - for (u, v, key, c) in self.graph.out_edges(node, data=True, keys=True): + for (u, v, key, c) in self.graph.out_edges(node=node_label, data=True, keys=True): if key == relationship: nodes.add(v) @@ -194,13 +206,12 @@ def get_component_requirements(self, Returns: List of nodes that are descendants from the source component are are related to the source through a specific component relationship. - # Tested """ req_components = list( reversed( self.get_descendants_by_edge_type( - source_component, self.reqComp_ek, ordered=True + source_component, self.rel_dict['requiresComponent']['edge_key'], ordered=True ) ) ) @@ -213,7 +224,7 @@ def get_component_requirements_graph(self, """Get all components that are associated with a given source component and are required by it; return the components as a dependency graph (i.e. a DAG). Args: - source_component: source component for which we need to find all required downstream components. + source_component, str: source component for which we need to find all required downstream components. Returns: A subgraph of the schema graph induced on nodes that are descendants from the source component and are related to the source through a specific component relationship. @@ -224,7 +235,7 @@ def get_component_requirements_graph(self, # get the subgraph induced on required component nodes req_components_graph = self.get_subgraph_by_edge_type( - self.reqComp_ek, + self.rel_dict['requiresComponent']['edge_key'], ).subgraph(req_components) return req_components_graph @@ -248,7 +259,6 @@ def get_descendants_by_edge_type(self, Returns: List of nodes that are descendants from a particular node (sorted / unsorted) - # Tested """ root_descendants = nx.descendants(self.graph, source_node) @@ -295,16 +305,17 @@ def get_descendants_by_edge_type(self, return list(descendants) - def get_digraph_by_edge_type(self, edge_type): - ''' - TODO: rename to get_digraph, since edge type parameter is not used, will take it now for legacy. + def get_digraph_by_edge_type(self, edge_type:str) -> nx.DiGraph: + '''Get a networkx digraph of the nodes connected via a given edge_type. + Args: + edge_type: + Edge type to search for, possible types are defined by 'edge_key' in relationship class + Returns: ''' - digraph = nx.DiGraph() for (u, v, key, c) in self.graph.edges(data=True, keys=True): if key == edge_type: digraph.add_edge(u, v) - return digraph def get_edges_by_relationship(self, @@ -314,7 +325,6 @@ def get_edges_by_relationship(self, """Get a list of out-edges of a node where the edges match a specifc type of relationship. i.e., the edges connecting a node to its neighbors are of relationship type -- "parentOf" (set of edges to children / sub-class nodes). - Note: possible edge relationships are -- parentOf, rangeValue, requiresDependency. Args: node: the node whose edges we need to look at. @@ -331,9 +341,22 @@ def get_edges_by_relationship(self, return edges - def get_ordered_entry(self, key: str, source_node_label:str): - + def get_ordered_entry(self, key: str, source_node_label:str) -> list: + """Order the values associated with a particular node and edge_key to match original ordering in schema. + Args: + key: a key representing and edge relationship in DataModelRelationships.relationships_dictionary + source_node_label, str: node to look for edges of and order + Returns: + sorted_nodes, list: list of sorted nodes, that share the specified relationship with the source node + Example: + For the example data model, for key='rangeIncludes', source_node_label='CancerType' the return would be ['Breast, 'Colorectal', 'Lung', 'Prostate', 'Skin'] in that exact order. + Raises: + KeyError, cannot find source node in graph + """ # Check if node is in the graph, if not throw an error. + if not self.is_class_in_schema(node_label=source_node_label): + raise KeyError(f"Cannot find node: {source_node_label} in the graph, please check entry.") + edge_key = self.rel_dict[key]['edge_key'] if self.rel_dict[key]['jsonld_direction'] == 'out': #use outedges @@ -349,20 +372,19 @@ def get_ordered_entry(self, key: str, source_node_label:str): if edge_key in self.graph[attached_node][source_node] } - sorted_edges = list(dict(sorted(original_edge_weights_dict.items(), key=lambda item: item[1])).keys()) + sorted_nodes = list(dict(sorted(original_edge_weights_dict.items(), key=lambda item: item[1])).keys()) - return sorted_edges + return sorted_nodes # Get values associated with a node - # TODO: make sure all these gets follow the same pattern for clarity - - def get_nodes_ancestors(self, graph, component): - """ - Return a list of nodes reachable from source in graph - graph: networkx graph object - component: any given node + def get_nodes_ancestors(self, node_label:str) -> list: + """Get a list of nodes reachable from source component in graph + Args: + node_labe, str: label of node to find ancestors for + Returns: + all_ancestors, list: nodes reachable from source in graph """ - all_ancestors = list(nx.ancestors(graph, component)) + all_ancestors = list(nx.ancestors(self.graph, component)) return all_ancestors @@ -370,11 +392,10 @@ def get_node_comment(self, node_display_name: str = None, node_label: str= None) """Get the node definition, i.e., the "comment" associated with a given node display name. Args: - node_display_name: Display name of the node which you want to get the label for. - + node_display_name, str: Display name of the node which you want to get the comment for. + node_label, str: Label of the node you would want to get the comment for. Returns: Comment associated with node, as a string. - TODO: add to args """ if not node_label: node_label = self.get_node_label(node_display_name) @@ -382,7 +403,7 @@ def get_node_comment(self, node_display_name: str = None, node_label: str= None) if not node_label: return "" - node_definition = self.graph.nodes[node_label][self.comment_nl] + node_definition = self.graph.nodes[node_label][self.rel_dict['comment']['node_label']] return node_definition @@ -409,24 +430,25 @@ def get_node_dependencies(self, required_dependencies = self.get_ordered_entry(key=self.reqDep_ek, source_node_label=source_node) else: required_dependencies = self.get_adjacent_nodes_by_relationship( - node = source_node, relationship = self.reqDep_ek) + node_label = source_node, relationship = self.reqDep_ek) if display_names: # get display names of dependencies dependencies_display_names = [] for req in required_dependencies: - dependencies_display_names.append(self.graph.nodes[req][self.displayName_nl]) + dependencies_display_names.append(self.graph.nodes[req][self.rel_dict['displayName']['node_label']]) return dependencies_display_names return required_dependencies - def get_nodes_descendants(self, component): - """ - Return a list of nodes reachable from source in graph - graph: networkx graph object - component: any given node + def get_nodes_descendants(self, node_label:str) -> list: + """Return a list of nodes reachable from source in graph + Args: + node_label, str: any given node + Return: + all_descendants, list: nodes reachable from source in graph """ all_descendants = list(nx.descendants(self.graph, component)) @@ -444,7 +466,7 @@ def get_nodes_display_names( List of display names. """ node_list_display_names = [ - self.graph.nodes[node][self.displayName_nl] for node in node_list + self.graph.nodes[node][self.rel_dict['displayName']['node_label']] for node in node_list ] return node_list_display_names @@ -454,10 +476,8 @@ def get_node_label(self, node_display_name: str) -> str: Args: node_display_name: Display name of the node which you want to get the label for. - Returns: Node label associated with given node. - Raises: KeyError: If the node cannot be found in the graph. """ @@ -472,23 +492,29 @@ def get_node_label(self, node_display_name: str) -> str: elif node_property_label in self.graph.nodes: node_label = node_property_label else: - node_label = "" + raise KeyError(f"Cannot find node: {node_display_name} in the graph, please check entry.") return node_label - def get_node_range(self, node_label: str, display_names: bool = True) -> List[str]: + def get_node_range(self, node_label: Optional[bool], node_display_name: Optional[bool]) -> List[str]: """Get the range, i.e., all the valid values that are associated with a node label. Args: - node_label: Node / termn for which you need to retrieve the range. - + node_label: Node for which you need to retrieve the range. + display_names, bool: True Returns: - List of display names of nodes associateed with the given node. + required_range: Returned if display_names=False, list of valid values (labels) associated with a given node. + dependencies_display_name: Returned if display_names=True, + List of valid values (display names) associated with a given node + Raises: + ValueError: If the node cannot be found in the graph. """ + if not node_label: + node_label = self.get_node_label(node_display_name) + try: # get node range in the order defined in schema for given node - #required_range = self.graph.explore_class(node_label)["range"] - required_range = self.find_node_range(attribute = node_label) + required_range = self.find_node_range(node_label = node_label) except KeyError: raise ValueError( f"The source node {node_label} does not exist in the graph. " @@ -506,13 +532,14 @@ def get_node_range(self, node_label: str, display_names: bool = True) -> List[st return required_range - def get_node_required(self, node_display_name: str = None, node_label:str = None) -> bool: + def get_node_required(self, node_label:Optional[bool], node_display_name: Optional[bool]) -> bool: """Check if a given node is required or not. Note: The possible options that a node can be associated with -- "required" / "optional". Args: - node_display_name: Display name of the node which you want to get the label for. + node_display_name: Display name of the node for which you want look up. + node_label: Label of the node for which you need to look up. Returns: True: If the given node is a "required" node. @@ -525,7 +552,7 @@ def get_node_required(self, node_display_name: str = None, node_label:str = None node_required = self.graph.nodes[node_label][rel_node_label] return node_required - def get_node_validation_rules(self, node_display_name: str = None, node_label: str = None) -> str: + def get_node_validation_rules(self, node_label: Optional[bool], node_display_name: Optional[bool]) -> str: """Get validation rules associated with a node, Args: @@ -550,7 +577,6 @@ def get_subgraph_by_edge_type( """Get a subgraph containing all edges of a given type (aka relationship). Args: - graph: input multi digraph (aka hypergraph) relationship: edge / link relationship type with possible values same as in above docs. Returns: @@ -569,12 +595,23 @@ def get_subgraph_by_edge_type( return relationship_subgraph + def find_adjacent_child_classes(self, node_label: Optional[bool], node_display_name: Optional[bool])->List[str]: + '''Find child classes of a given node. + Args: + node_display_name: Display name of the node to look up. + node_label: Label of the node to look up. + Returns: + List of nodes that are adjacent to the given node, by SubclassOf relationship. + ''' + return self.get_adjacent_nodes_by_relationship(node_label = schema_class, relationship = self.rel_dict['subClassOf']['edge_key']) - def find_adjacent_child_classes(self, schema_class): - return self.get_adjacent_nodes_by_relationship(node = schema_class, relationship = self.subClassOf_ek) - - def find_child_classes(self, schema_class): - """Find schema classes that inherit from the given class""" + def find_child_classes(self, schema_class: str) -> list: + """Find schema classes that inherit from the given class + Args: + schema_class: node label for the class to from which to look for children. + Returns: + list of children to the schema_class. + """ return unlist(list(self.graph.successors(schema_class))) def find_class_specific_properties(self, schema_class): @@ -610,30 +647,58 @@ def find_class_specific_properties(self, schema_class): ''' return properties - def find_parent_classes(self, schema_class): - """Find all parents of the class""" - + def find_parent_classes(self, node_label:str) -> List[list]: + """Find all parents of the provided node + Args: + node_label: label of the node to find parents of + Returns: + List of list of Parents to the given node. + """ + # Get digraph of nodes with parents digraph = self.get_digraph_by_edge_type("parentOf") + # Get root node root_node = list(nx.topological_sort(digraph))[0] + # Get paths between root_node and the target node. paths = nx.all_simple_paths( - self.graph, source=root_node, target=schema_class + self.graph, source=root_node, target=node_label ) - # print(root_node) + return [_path[:-1] for _path in paths] - def full_schema_graph(self, size=None): + def full_schema_graph(self, size:Optional[bool])-> graphviz.Digraph: + """Create a graph of the data model. + Args: + size, float: max height and width of the graph, if one value provided it is used for both. + Returns: + schema graph viz + """ edges = self.graph.edges() return visualize(edges, size=size) - def is_class_in_schema(self, class_label): - if self.graph.nodes[class_label]: + def is_class_in_schema(self, node_label: str) -> bool: + """Determine if provided node_label is in the schema graph/data model. + Args: + class_label: label of node to search for in the + Returns: + True, if node is in the graph schema + False, if node is not in graph schema + """ + if node_label in self.graph.nodes(): return True else: return False - def sub_schema_graph(self, source, direction, size=None): + def sub_schema_graph(self, source, direction, size=None) -> graphviz.Digraph: + """Create a sub-schema graph + Args: + source, str: source node label to start graph + direction, str: direction to create the vizualization, choose from "up", "down", "both" + size, float: max height and width of the graph, if one value provided it is used for both. + Returns: + Sub-schema graph viz + """ if direction == "down": edges = list(nx.edge_bfs(self.graph, [source])) return visualize(edges, size=size) From e8b4b9f6e8e884d8c67d310e8cfd5ae7b8e896a9 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 11 Sep 2023 14:10:30 -0700 Subject: [PATCH 051/239] clean document schematic/schemas/data_model_json_schema.py --- schematic/schemas/data_model_json_schema.py | 26 ++++----------------- 1 file changed, 5 insertions(+), 21 deletions(-) diff --git a/schematic/schemas/data_model_json_schema.py b/schematic/schemas/data_model_json_schema.py index 0699bb819..47c386c4f 100644 --- a/schematic/schemas/data_model_json_schema.py +++ b/schematic/schemas/data_model_json_schema.py @@ -17,15 +17,7 @@ def __init__(self, jsonld_path: str, graph:nx.MultiDiGraph, self.DME = DataModelGraphExplorer(self.graph) self.dmr = DataModelRelationships() self.rel_dict = self.dmr.relationships_dictionary - - # Edge Keys - self.reqDep_ek = self.rel_dict['requiresDependency']['edge_key'] - self.rangeIncludes_ek = self.rel_dict['rangeIncludes']['edge_key'] - self.reqComp_ek = self.rel_dict['requiresComponent']['edge_key'] - - # Node Labels - self.displayName_nl = self.rel_dict['displayName']['node_label'] - + def get_array_schema( self, node_range: List[str], node_name: str, blank=False ) -> Dict[str, Dict[str, List[str]]]: @@ -40,7 +32,6 @@ def get_array_schema( Returns: JSON object with array validation rule. - TODO: used? """ schema_node_range_array = { @@ -63,8 +54,6 @@ def get_non_blank_schema( Returns: Schema rule as a JSON object. - TODO: - Used? """ non_blank_schema = {node_name: {"not": {"type": "null"}, "minLength": 1}} @@ -83,8 +72,6 @@ def get_range_schema( Returns: JSON object with nodes. - TODO: - Used? """ if blank: schema_node_range = {node_name: {"enum": node_range + [""]}} @@ -120,9 +107,6 @@ def get_json_validation_schema(self, source_node: str, schema_name: str) -> Dict "allOf": [], } - # get graph corresponding to data model schema - #mm_graph = self.se.get_nx_schema() - nodes_to_process = ( [] ) # list of nodes to be checked for dependencies, starting with the source node @@ -138,7 +122,7 @@ def get_json_validation_schema(self, source_node: str, schema_name: str) -> Dict # the domain node is very likely the parentof ("parentOf" relationship) of the range node root_dependencies = self.DME.get_adjacent_nodes_by_relationship( - node = source_node, relationship=self.reqDep_ek, + node = source_node, relationship=self.rel_dict['requiresDependency']['edge_key'], ) # if root_dependencies is empty it means that a class with name 'source_node' exists @@ -157,7 +141,7 @@ def get_json_validation_schema(self, source_node: str, schema_name: str) -> Dict node_is_processed = True node_range = self.DME.get_adjacent_nodes_by_relationship( - node=process_node, relationship=self.rangeIncludes_ek, + node=process_node, relationship=self.rel_dict['rangeIncludes']['edge_key'], ) @@ -165,11 +149,11 @@ def get_json_validation_schema(self, source_node: str, schema_name: str) -> Dict node_range_d = self.DME.get_nodes_display_names(node_list=node_range) node_dependencies = self.DME.get_adjacent_nodes_by_relationship( - node=process_node, relationship=self.reqDep_ek, + node=process_node, relationship=self.rel_dict['requiresDependency']['edge_key'], ) # get process node display name - node_display_name = self.graph.nodes[process_node][self.displayName_nl] + node_display_name = self.graph.nodes[process_node][self.rel_dict['displayName']['node_label']] # updating map between node and node's valid values for n in node_range_d: From 153fac0cb2c7da76f7c1f518085034d1ba42fbda Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 11 Sep 2023 14:10:55 -0700 Subject: [PATCH 052/239] clean/document schematic/schemas/data_model_jsonld.py --- schematic/schemas/data_model_jsonld.py | 210 ++++++++++--------------- 1 file changed, 79 insertions(+), 131 deletions(-) diff --git a/schematic/schemas/data_model_jsonld.py b/schematic/schemas/data_model_jsonld.py index 78f8ad64f..f30798273 100644 --- a/schematic/schemas/data_model_jsonld.py +++ b/schematic/schemas/data_model_jsonld.py @@ -17,18 +17,14 @@ def __init__(self, Graph: nx.MultiDiGraph, output_path:str = ''): self.graph = Graph self.dmr = DataModelRelationships() self.rel_dict = self.dmr.relationships_dictionary - ''' - self.jsonld_object = JSONLD_object(DataModelJsonLD) - self.jsonld_class = JSONLD_class(self.jsonld_object) - self.jsonld_property = JSONLD_property(self.jsonld_object) - ''' self.DME = DataModelGraphExplorer(self.graph) self.output_path = output_path - def base_jsonld_template(self): - """ - #Base starter template, to be filled out with model. For entire file. + def base_jsonld_template(self) -> dict: + """Base starter JSONLD template, to be filled out with model. For entire file. + Returns: + base_template, dict: base JSONLD template TODO: when done adding contexts fill out this section here. """ base_template = { @@ -44,9 +40,15 @@ def base_jsonld_template(self): } return base_template - def create_object(self, template, node): + def create_object(self, template:dict, node:str)->dict: + """ Fill in a blank JSONLD template with information for each node. All relationships are filled from the graph, based on the type of information (node or edge) + Args: + template, dict: empty class or property template to be filled with information for the given node. + node, str: target node to fill the template out for. + Returns: + template, dict: filled class or property template, that has been processed and cleaned up. + """ data_model_relationships = self.dmr.relationships_dictionary - #edge_to_jsonld_keys = {rel_vals['edge_key']: rel_vals['jsonld_key'] for rel, rel_vals in data_model_relationships.items() if rel_vals['edge_rel']} # For each field in template fill out with information from the graph for rel, rel_vals in data_model_relationships.items(): @@ -59,21 +61,23 @@ def create_object(self, template, node): node_edges = list(self.graph.in_edges(node, data=True)) node_edges.extend(list(self.graph.out_edges(node,data=True))) + # Get node pairs and weights for each edge for node_1, node_2, weight in node_edges: - # Get 'AtlasView'('relationship':{weight:value}) of edge - # need to convert this relationship back to the JSONLD key_rel + + # Retrieve the relationship(s) and related info between the two nodes node_edge_relationships = self.graph[node_1][node_2] - edge_rel = rel_vals['edge_key'] - # Check if key_rel is even one of the relationships for this node pair. - #if key_rel in node_edge_relationships: - if edge_rel in node_edge_relationships: - + # Get the relationship edge key + edge_key = rel_vals['edge_key'] + + # Check if edge_key is even one of the relationships for this node pair. + if edge_key in node_edge_relationships: + # for each relationship between the given nodes for relationship, weight_dict in node_edge_relationships.items(): - #if relationship == key_rel: - if relationship == edge_rel: - - if edge_rel in ['domainIncludes', 'parentOf']: + # If the relationship defined and edge_key + if relationship == edge_key: + # TODO: rewrite to use edge_dir + if edge_key in ['domainIncludes', 'parentOf']: if node_2 == node: # Make sure the key is in the template (differs between properties and classes) if rel_vals['jsonld_key'] in template.keys(): @@ -92,19 +96,19 @@ def create_object(self, template, node): # TODO Move this to a helper function to clear up. if (isinstance(template[rel_vals['jsonld_key']], list) and node_2_id not in template[rel_vals['jsonld_key']]): - # could possibly keep track of weights here but that might slow things down template[rel_vals['jsonld_key']].append(node_2_id) else: template[rel_vals['jsonld_key']] == node_2 + # Fill in node value information else: - # attribute here refers to node attibutes (come up with better name.) - node_attribute_name = rel_vals['node_label'] + node_label = rel_vals['node_label'] # Get recorded info for current node, and the attribute type - node_info = nx.get_node_attributes(self.graph, node_attribute_name)[node] + node_info = nx.get_node_attributes(self.graph, node_label)[node] # Add this information to the template template[rel_vals['jsonld_key']] = node_info + # Clean up template template = self.clean_template(template=template, data_model_relationships=data_model_relationships, @@ -112,33 +116,48 @@ def create_object(self, template, node): # Reorder lists based on weights: template = self.reorder_template_entries(template=template,) - # Add contexts back + # Add contexts to certain values template = self.add_contexts_to_entries(template=template,) return template - def add_contexts_to_entries(self, template): + def add_contexts_to_entries(self, template:dict) -> dict: + """ + Args: + template, dict: JSONLD template that has been filled up to the current node, with information + Returns: + template, dict: JSONLD template where contexts have been added back to certain values. + Note: This will likely need to be modified when Contexts are truly added to the model + """ for jsonld_key, entry in template.items(): try: + # Retrieve the relationships key using the jsonld_key key= [k for k, v in self.rel_dict.items() if jsonld_key == v['jsonld_key']][0] except: continue + # If the current relationship can be defined with a 'node_attr_dict' if 'node_attr_dict' in self.rel_dict[key].keys(): - # Changes to data_model_relationships may mean this part will need to be updated. try: + # if possible pull standard function to get node information rel_func = self.rel_dict[key]['node_attr_dict']['standard'] except: + # if not pull default function to get node information rel_func = self.rel_dict[key]['node_attr_dict']['default'] + + # Add appropritae contexts that have been removed in previous steps (for JSONLD) or did not exist to begin with (csv) if key == 'id' and rel_func == get_label_from_display_name: template[jsonld_key] = 'bts:' + template[jsonld_key] elif key == 'required' and rel_func == convert_bool: - #clean up use of convert bool here. template[jsonld_key] = 'sms:' + str(template[jsonld_key]).lower() return template - def clean_template(self, template, data_model_relationships): - ''' - Get rid of empty k:v pairs. Fill with a default if specified in the relationships dictionary. + def clean_template(self, template: dict, data_model_relationships: dict) -> dict: + '''Get rid of empty k:v pairs. Fill with a default if specified in the relationships dictionary. + Args: + template, dict: JSONLD template for a single entry, keys specified in property and class templates. + data_model_relationships, dict: dictionary containing information for each relationship type supported. + Returns: + template: JSONLD template where unfilled entries have been removed, or filled with default depending on specifications in the relationships dictionary. ''' for rels in data_model_relationships.values(): if rels['jsonld_key'] in template.keys() and not template[rels['jsonld_key']]: @@ -148,23 +167,30 @@ def clean_template(self, template, data_model_relationships): del template[rels['jsonld_key']] return template - def strip_context(self, context_value): + def strip_context(self, context_value: str) -> tuple[str]: + """Strip contexts from str entry. + Args: + context_value, str: string from which to strip context from + Returns: + context, str: the original context + v, str: value separated from context + """ if ':' in context_value: context, v = context_value.split(':') elif '@' in context_value: context, v = context_value.split('@') return context, v - def reorder_template_entries(self, template): + def reorder_template_entries(self, template:dict) -> dict: '''In JSONLD some classes or property keys have list values. We want to make sure these lists are ordered according to the order supplied by the user. This will look specically in lists and reorder those. Args: - template (dict): + template, dict: JSONLD template for a single entry, keys specified in property and class templates. Returns: - template (dict): list entries re-ordered to match user supplied order. - + template, dict: list entries re-ordered to match user supplied order. + Note: + User order only matters for nodes that are also attributes ''' - # user order only matters for nodes that are also attributes template_label = template['rdfs:label'] for jsonld_key, entry in template.items(): @@ -175,6 +201,7 @@ def reorder_template_entries(self, template): if is_edge and isinstance(entry, list) and len(entry)>1: # Get edge key from data_model_relationships using the jsonld_key: key, edge_key = [(k, v['edge_key']) for k, v in self.rel_dict.items() if jsonld_key == v['jsonld_key']][0] + # Order edges sorted_edges = self.DME.get_ordered_entry(key=key, source_node_label=template_label) edge_weights_dict={edge:i for i, edge in enumerate(sorted_edges)} @@ -189,7 +216,9 @@ def reorder_template_entries(self, template): return template def property_template(self): - ''' + '''Generate a template for schema property + Returns: + property_template, dict: template for property schema ''' property_template = { "@id": "", @@ -206,7 +235,9 @@ def property_template(self): return property_template def class_template(self): - """ + """Generate a template for schema class + Returns: + class_template, dict: template for class schema """ class_template = { "@id": "", @@ -226,111 +257,28 @@ def class_template(self): def generate_jsonld_object(self): - ''' + '''Create the JSONLD object. + Returns: + jsonld_object, dict: JSONLD object containing all nodes and related information ''' # Get properties. properties = self.DME.find_properties() + # Get JSONLD Template - self.json_ld_object = self.base_jsonld_template() + json_ld_object = self.base_jsonld_template() - # Iterativly add graph nodes to json_ld_object as properties and classes + # Iterativly add graph nodes to json_ld_object as properties or classes for node in self.graph.nodes: if node in properties: obj = self.create_object(template = self.property_template(), node = node) else: obj = self.create_object(template = self.class_template(), node = node) - self.json_ld_object['@graph'].append(obj) - return self.json_ld_object + json_ld_object['@graph'].append(obj) + return json_ld_object -""" -class DataModelJsonLD(object): - ''' - #Interface to JSONLD_object - ''' - - def __init__(self, Graph: nx.MultiDiGraph): - # Setup - self.graph = Graph - self.jsonld_object = JSONLD_object(DataModelJsonLD) - self.jsonld_class = JSONLD_class(self.jsonld_object) - self.jsonld_property = JSONLD_property(self.jsonld_object) - self.DME = DataModelGraphExplorer(self.graph) - - def generate_jsonld_object(self): - ''' - #Will call JSONLD_object class to create properties and classes in the process. - ''' - - # Get properties and classes. - properties = self.DME.find_properties() - classes = self.DME.find_classes() - - # Get JSONLD Template - template = JSONLD_object - base - - # Generate properties and classes and add to the template. - - return - - def base_jsonld_template(self): - ''' - #Base starter template, to be filled out with model. - ''' - return - -class JSONLD_object(DataModelJsonLD): - ''' - #Decorator class design - #Base decorator class. - ''' - _DataModelJsonLD: DataModelJsonLD = None - - def __init__(self, DataModelJsonLD) -> None: - self.dataModelJsonLD = DataModelJsonLD - - def _create_template(self) -> DataModelJsonLD: - ''' - Returns jsonld_class_template or jsonld_property_template - ''' - return self._DataModelJsonLD - - @property - def to_template(self): - return self._DataModelJsonLD.to_template() - - - -class JSONLD_property(JSONLD_object): - ''' - Property Decorator - ''' - def to_template(self): - return JSONLD_property(self._DataModelJsonLD.to_template()) - - def explore_property(): - return - - def edit_property(): - return - -class JSONLD_class(JSONLD_object): - ''' - Class Decorator - ''' - def to_template(self): - return JSONLD_class(self._DataModelJsonLD.to_template()) - - def explore_class(): - return - - def edit_class(): - return -""" def convert_graph_to_jsonld(Graph): # Make the JSONLD object data_model_jsonld_converter = DataModelJsonLD(Graph=Graph) jsonld_dm = data_model_jsonld_converter.generate_jsonld_object() - return jsonld_dm From 4358abeb3536d790d1127e823cfb88ee46f1cb88 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 11 Sep 2023 14:11:17 -0700 Subject: [PATCH 053/239] clean/document schematic/schemas/data_model_nodes.py --- schematic/schemas/data_model_nodes.py | 181 ++++++++++++++++---------- 1 file changed, 110 insertions(+), 71 deletions(-) diff --git a/schematic/schemas/data_model_nodes.py b/schematic/schemas/data_model_nodes.py index 104133b95..8e50401ae 100644 --- a/schematic/schemas/data_model_nodes.py +++ b/schematic/schemas/data_model_nodes.py @@ -1,11 +1,13 @@ from inspect import isfunction +import networkx as nx from rdflib import Namespace +from typing import Any, Dict, Optional, Text, List, Callable from schematic.schemas.data_model_relationships import ( DataModelRelationships ) -from schematic.utils.schema_utils import get_label_from_display_name, get_display_name_from_label, convert_bool, parse_validation_rules +from schematic.utils.schema_utils import get_label_from_display_name, get_attribute_display_name_from_label, convert_bool_to_str, parse_validation_rules from schematic.utils.validate_rules_utils import validate_schema_rules from schematic.schemas.curie import uri2curie, curie2uri @@ -16,30 +18,28 @@ def __init__(self, attribute_relationships_dict): self.data_model_relationships = DataModelRelationships() self.value_relationships = self.data_model_relationships.define_value_relationships() self.edge_relationships_dictionary = self.data_model_relationships.define_edge_relationships() - self.ar_dict = attribute_relationships_dict - # Identify all properties - self.properties = self.get_data_model_properties(ar_dict=self.ar_dict) - - - return + self.properties = self.get_data_model_properties(attr_rel_dict=attribute_relationships_dict) + ''' def node_present(self, G, node_name): if node_name in G.nodes(): return True else: return False - - def gather_nodes(self, attr_info): + ''' + def gather_nodes(self, attr_info: tuple) -> list: """Take in a tuple containing attriute name and relationship dictionary, and find all nodes defined in attribute information. Args: - + attr_info, tuple: (Display Name, Relationships Dictionary portion of attribute_relationships dictionary) Returns: - list, nodes defined by attribute_info as being related to that attribute. + nodes, list: nodes related to the given node (specified in attr_info). + Note: + Extracting nodes in this fashion ensures order is preserved. """ # retrieve a list of relationship types that will produce nodes. self.node_relationships =list(self.edge_relationships_dictionary.values()) - # Extract attribure and relationship dictionary + # Extract attribute and relationship dictionary attribute, relationship = attr_info relationships = relationship['Relationships'] @@ -52,94 +52,129 @@ def gather_nodes(self, attr_info): for node in relationships[rel]]) return nodes - def gather_all_nodes(self, data_model): - """ + def gather_all_nodes(self, attr_rel_dict: dict): + """Gather all nodes in the data model, in order. Args: - + attr_rel_dict, dict: generated in data_model_parser + {Attribute Display Name: { + Relationships: { + CSV Header: Value}}} Returns: + all_nodes, list: List of all node display names in the data model preserving order entered. + Note: + Gathering nodes in this fashion ensures order is preserved. """ all_nodes = [] - for attr_info in data_model.items(): + for attr_info in attr_rel_dict.items(): nodes = self.gather_nodes(attr_info=attr_info) all_nodes.extend(nodes) + # Remove any duplicates preserving order all_nodes = list(dict.fromkeys(all_nodes).keys()) return all_nodes - def get_rel_default_info(self, relationship): - """ - For each display name fill out defaults. Maybe skip default. + def get_rel_node_dict_info(self, relationship: str) -> tuple: + """For each display name get defaults for nodes. + Args: + relationship, str: relationship key to match. + Returns: + rel_key, str: relationship node label + rel_node_dict, dict: node_attr_dict, from relationships dictionary for a given relationship + """ for k,v in self.data_model_relationships.relationships_dictionary.items(): if k == relationship: if 'node_attr_dict' in v.keys(): rel_key = v['node_label'] - rel_default = v['node_attr_dict'] - return rel_key, rel_default + rel_node_dict = v['node_attr_dict'] + return rel_key, rel_node_dict - def get_data_model_properties(self, ar_dict): + def get_data_model_properties(self, attr_rel_dict: dict) -> list: + """Identify all properties defined in the data model. + Args: + attr_rel_dict, dict: + {Attribute Display Name: { + Relationships: { + CSV Header: Value}}} + Returns: + properties,list: properties defined in the data model + """ properties=[] - for attribute, relationships in ar_dict.items(): + for attribute, relationships in attr_rel_dict.items(): if 'Properties' in relationships['Relationships'].keys(): properties.extend(relationships['Relationships']['Properties']) properties = list(set(properties)) return properties - def get_entry_type(self, node_display_name): + def get_entry_type(self, node_display_name:str) -> str: + """Get the entry type of the node, property or class. + Args: + node_display_name, str: display name of target node. + Returns: + entry_type, str: returns 'property' or 'class' based on data model specifications. + """ if node_display_name in self.properties: - entry_type = 'property' + entry_type = 'property' else: entry_type = 'class' return entry_type - def run_rel_functions(self, rel_func, node_display_name='', key='', attr_relationships='', csv_header='', entry_type=''): - ''' This function exists to centralzie handling of functions for filling out node information. - TODO: and an ending else statement to alert to no func being caught. - - Implement using a factory pattern. - elif key == 'id' and rel_func == get_property_label_from_display_name: - func_output = 'bts:' + get_property_label_from_display_name(node_display_name) - - elif rel_func == get_class_label_from_display_name: - func_output = get_class_label_from_display_name(node_display_name) - ''' + def run_rel_functions(self, rel_func:callable, node_display_name:str='', key:str='', attr_relationships={}, csv_header='', entry_type=''): + ''' This function exists to centralzie handling of functions for filling out node information, makes sure all the proper parameters are passed to each function. + Args: + rel_func, callable: Function to call to get information to attach to the node + node_display_name, str: node display name + key, str: relationship key + attr_relationships, dict: relationships portion of attributes_relationships dictionary + csv_header, str: csv header + entry_type, str: 'class' or 'property' defines how - func_output = '' - if rel_func == get_display_name_from_label: - func_output = get_display_name_from_label(node_display_name, attr_relationships) - elif rel_func == parse_validation_rules: - func_output = parse_validation_rules(attr_relationships[csv_header]) + Returns: + Outputs of specified rel_func (relationship function) + + For legacy: elif key == 'id' and rel_func == get_label_from_display_name: - #func_output = 'bts:' + get_label_from_display_name(display_name =node_display_name, entry_type=entry_type) func_output = get_label_from_display_name(display_name =node_display_name, entry_type=entry_type) + ''' + if rel_func == get_attribute_display_name_from_label: + return get_attribute_display_name_from_label(node_display_name, attr_relationships) + + elif rel_func == parse_validation_rules: + return parse_validation_rules(attr_relationships[csv_header]) + elif rel_func == get_label_from_display_name: - func_output = get_label_from_display_name(display_name =node_display_name, entry_type=entry_type) - elif rel_func == convert_bool: - #func_output = 'sms:' + convert_bool(attr_relationships[csv_header]).lower() + return get_label_from_display_name(display_name =node_display_name, entry_type=entry_type) + + elif rel_func == convert_bool_to_str: if type(attr_relationships[csv_header]) == str: if attr_relationships[csv_header].lower() == 'true': - func_output = True + return True elif attr_relationships[csv_header].lower() == 'false': - func_output = False + return False + elif type(attr_relationships[csv_header]) == bool: - func_output = attr_relationships[csv_header] + return attr_relationships[csv_header] + else: - # raise error here to catch non valid function. - breakpoint() - return func_output - - def generate_node_dict(self, node_display_name, data_model): + # Raise Error if the rel_func provided is not captured. + raise KeyError(f"The function provided ({rel_func}) to define the relationship {key} is not captured in the function run_rel_functions, please update.") + return + def generate_node_dict(self, node_display_name: str, attr_rel_dict: dict) -> dict: """Gather information to be attached to each node. Args: - node_display_name: display name for current node - data_model: + node_display_name, str: display name for current node + attr_rel_dict, dict: generated in data_model_parser + {Attribute Display Name: { + Relationships: { + CSV Header: Value}}} Returns: - node_dict + node_dict, dict: dictionary of relationship information about the current node + {'displayName': '', 'label': '', 'comment': 'TBD', 'required': None, 'validationRules': [], 'isPartOf': '', 'uri': ''} Note: If the default calls function, call that function for the default or alternate implementation. May need to update this logic for varying function calls. (for example the current function takes in the node display name - ould need to update if new function took in something else.) + would need to update if new function took in something else.) """ - # Strip whitespace from node display name node_display_name = node_display_name.strip() @@ -148,8 +183,8 @@ def generate_node_dict(self, node_display_name, data_model): # If the node is an attribute, find its relationships. attr_relationships = {} - if node_display_name in data_model.keys(): - attr_relationships = data_model[node_display_name]['Relationships'] + if node_display_name in attr_rel_dict.keys(): + attr_relationships = attr_rel_dict[node_display_name]['Relationships'] # Initialize node_dict node_dict = {} @@ -158,36 +193,40 @@ def generate_node_dict(self, node_display_name, data_model): for key, csv_header in self.value_relationships.items(): # Get key and defalt values current relationship type. - rel_key, rel_default = self.get_rel_default_info(key) + rel_key, rel_node_dict = self.get_rel_node_dict_info(key) - # If we have information to add about this particular node + # If we have information to add about this particular node, get it if csv_header in attr_relationships.keys(): - # Check if the default specifies calling a function. - if 'standard' in rel_default.keys() and isfunction(rel_default['standard']): + # Check if the 'standard' specifies calling a function. + if 'standard' in rel_node_dict.keys() and isfunction(rel_node_dict['standard']): # Add to node_dict The value comes from the standard function call. - node_dict.update({rel_key: self.run_rel_functions(rel_default['standard'], node_display_name=node_display_name, key=key, attr_relationships=attr_relationships, csv_header=csv_header, entry_type=entry_type)}) + node_dict.update({rel_key: self.run_rel_functions(rel_node_dict['standard'], node_display_name=node_display_name, key=key, attr_relationships=attr_relationships, csv_header=csv_header, entry_type=entry_type)}) else: # For standard entries, get information from attr_relationship dictionary node_dict.update({rel_key: attr_relationships[csv_header]}) # else, add default values else: # Check if the default specifies calling a function. - if 'default' in rel_default.keys() and isfunction(rel_default['default']): - node_dict.update({rel_key: self.run_rel_functions(rel_default['default'], node_display_name=node_display_name, key=key, attr_relationships=attr_relationships, csv_header=csv_header, entry_type=entry_type)}) + if 'default' in rel_node_dict.keys() and isfunction(rel_node_dict['default']): + node_dict.update({rel_key: self.run_rel_functions(rel_node_dict['default'], node_display_name=node_display_name, key=key, attr_relationships=attr_relationships, csv_header=csv_header, entry_type=entry_type)}) else: # Set value to defaults. - node_dict.update({rel_key: rel_default['default']}) + node_dict.update({rel_key: rel_node_dict['default']}) return node_dict - def generate_node(self, G, node_dict): - """ + def generate_node(self, G: nx.MultiDiGraph, node_dict: dict) -> nx.MultiDiGraph: + """Create a node and add it to the networkx multidigraph being built Args: - + G, nx.MultiDigraph: networkx multidigraph object, that is in the process of being fully built. + node_dict, dict: dictionary of relationship information about the current node Returns: + G, nx.MultiDigraph: networkx multidigraph object, that has had an additional node added to it. """ G.add_node(node_dict['label'], **node_dict) return G - def edit_node(): + def edit_node(self): + """Stub for future node editor. + """ return \ No newline at end of file From 965b0b37f01dc81b28796b27ce61e17f25a50b21 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 11 Sep 2023 14:18:39 -0700 Subject: [PATCH 054/239] clean/document schematic/schemas/data_model_parser.py --- schematic/schemas/data_model_parser.py | 321 +++++++++++-------------- 1 file changed, 146 insertions(+), 175 deletions(-) diff --git a/schematic/schemas/data_model_parser.py b/schematic/schemas/data_model_parser.py index f6c26ff06..9d4bf9d84 100644 --- a/schematic/schemas/data_model_parser.py +++ b/schematic/schemas/data_model_parser.py @@ -1,5 +1,3 @@ -#import numpy as np -import json import logging import pandas as pd import pathlib @@ -14,31 +12,26 @@ from schematic import LOADER -logger = logging.getLogger(__name__) - +logger = logging.getLogger("Synapse storage") class DataModelParser(): ''' - This class takes in a path to a data model (either CSV for JSONLD for now) - and will convert it to attributes and relationships that can then - be further converted into a graph data model. Other data model types - may be added in the future. - - TODO: - Change all naming to fit what we will be using with the graph later. Dictionary in data_model_edges. - - Make sure to build with namespace contexts in mind! - + This class takes in a path to a data model and will convert it to an + attributes:relationship dictionarythat can then be further converted into a graph data model. + Other data model types may be added in the future. ''' def __init__( self, path_to_data_model: str, - base_schema_path: str = None, ) -> None: + """ + Args: + path_to_data_model, str: path to data model. + """ self.path_to_data_model = path_to_data_model self.model_type = self.get_model_type(path_to_data_model) - self.base_schema_path = base_schema_path + self.base_schema_path = None def _get_base_schema_path(self, base_schema: str = None) -> str: """Evaluate path to base schema. @@ -50,59 +43,61 @@ def _get_base_schema_path(self, base_schema: str = None) -> str: base_schema_path: Path to base schema based on provided argument. """ biothings_schema_path = LOADER.filename("data_models/biothings.model.jsonld") - base_schema_path = biothings_schema_path if base_schema is None else base_schema + self.base_schema_path = biothings_schema_path if base_schema is None else base_schema - return base_schema_path + return self.base_schema_path - def get_model_type(self, path_to_data_model): - ''' - Parses the path to the data model to extract the extension and determine the data model type. + def get_model_type(self, path_to_data_model: str) -> str: + '''Parses the path to the data model to extract the extension and determine the data model type. + Args: + path_to_data_model, str: path to data model + Returns: + str: uppercase, data model file extension. ''' - model_type = pathlib.Path(path_to_data_model).suffix.replace('.', '').upper() - return model_type + return pathlib.Path(path_to_data_model).suffix.replace('.', '').upper() def parse_base_model(self): - ''' - Add biothings to both models for consistency. - - Do separately from both parsers for clarity. - - Should this be its own class? - - Input: Base model path, if None do not add base model. - + '''Parse base data model that new model could be built upon. + Returns: + base_model, dict: + {Attribute Display Name: { + Relationships: { + CSV Header: Value}}} + Note: Not configured yet to successfully parse biothings. ''' - if not self.base_schema_path: - return - else: - # determine base schema path - base_model_path = self._get_base_schema_path(self.base_schema_path) + # Determine base schema path + base_model_path = self._get_base_schema_path(self.base_schema_path) - # parse - jsonld_parser = DataModelJSONLDParser() - base_model = jsonld_parser.parse_jsonld_model(base_model_path) - return base_model + # Parse + jsonld_parser = DataModelJSONLDParser() + base_model = jsonld_parser.parse_jsonld_model(base_model_path) + return base_model def parse_model(self): + '''Given a data model type, instantiate and call the appropriate data model parser. + Returns: + model_dict, dict: + {Attribute Display Name: { + Relationships: { + CSV Header: Value}}} + Note: in future will add base model parsing in this step too and extend new model off base model. ''' - Given a data model type, instantiate and call the appropriate data model parser. - ''' + #base_model = self.parse_base_model() + + # Call appropriate data model parser and return parsed model. if self.model_type == 'CSV': csv_parser = DataModelCSVParser() model_dict = csv_parser.parse_csv_model(self.path_to_data_model) elif self.model_type == 'JSONLD': jsonld_parser = DataModelJSONLDParser() model_dict = jsonld_parser.parse_jsonld_model(self.path_to_data_model) + else: + raise ValueError(f"Schematic only accepts models of type CSV or JSONLD, you provided a model type {self.model_type}, please resubmit in the proper format.") - base_model = self.parse_base_model() return model_dict class DataModelCSVParser(): - ''' - - ''' - def __init__( self ): @@ -110,19 +105,13 @@ def __init__( self.dmr = DataModelRelationships() # Load relationships dictionary. self.rel_dict = self.dmr.define_data_model_relationships() + # Get edge relationships self.edge_relationships_dictionary = self.dmr.define_edge_relationships() # Load required csv headers self.required_headers = self.dmr.define_required_csv_headers() - - def check_schema_definition(self, model_df: pd.DataFrame) -> bool: - """Checks if a schema definition data frame contains the right required headers. - - See schema definition guide for more details - TODO: post and link schema definition guide - Args: schema_definition: a pandas dataframe containing schema definition; see example here: https://docs.google.com/spreadsheets/d/1J2brhqO4kpeHIkNytzlqrdIiRanXDr6KD2hqjOTC9hs/edit#gid=0 Raises: Exception @@ -147,66 +136,51 @@ def check_schema_definition(self, model_df: pd.DataFrame) -> bool: return - def gather_csv_attributes_relationships(self, model_df): - ''' - Note: Modeled after the current df_parser.create_nx_schema_objects but without reliance - on the SE. Will just try to gather all the attributes and their relationships to one another. - They will be loaded into a graph at a later stage. + def gather_csv_attributes_relationships(self, model_df: pd.DataFrame) -> Dict: + '''Parse csv into a attributes:relationshps dictionary to be used in downstream efforts. + Args: + model_df: pd.DataFrame, data model that has been loaded into pandas DataFrame. + Returns: + attr_rel_dictionary: dict, + {Attribute Display Name: { + Relationships: { + CSV Header: Value}}} ''' - # Check csv schema follows expectations. self.check_schema_definition(model_df) - # Get the type for each value that needs to be submitted. # using csv_headers as keys to match required_headers/relationship_types self.rel_val_types = {v['csv_header']:v['type']for k, v in self.rel_dict.items() if 'type' in v.keys()} - - #load into format that can be read by validator.py # get attributes from Attribute column attributes = model_df[list(self.required_headers)].to_dict("records") # Build attribute/relationship dictionary relationship_types = self.required_headers - #relationship_types.remove("Attribute") - - # TODO: using an attr_rel_dictionary will strip the order that attributes were submitted from - # the user. Will need to account for ordering later so the JSONLD fields are in the correct order. - # This will ensure the manifest dependencies are in the correct order. - # For now, just record order with a counter. - position = 0 attr_rel_dictionary = {} + for attr in attributes: - # For each attribute, record its position in the data model and its relationships. - - attr_rel_dictionary.update({ - attr['Attribute']: { - 'Position': position, - 'Relationships': {}, + # Add attribute to dictionary + attr_rel_dictionary.update({attr['Attribute']: {'Relationships': {}, }, - } ) + # Fill in relationship info for each attribute. for relationship in relationship_types: rel_val_type = self.rel_val_types[relationship] if not pd.isnull(attr[relationship]): # Fill in relationships based on type: if rel_val_type == bool and type(attr[relationship]) == bool: - rels = attr[relationship] - # Add other value types and adjust as needed. + parsed_rel_entry = attr[relationship] + # Move strings to list if they are comma separated. Schema order is preserved. elif rel_val_type == list: - # Move strings to list if they are comma separated. - # Order from CSV is preserved here. - rels = attr[relationship].strip().split(',') - rels = [r.strip() for r in rels] - # Extract string from list if necessary. - # TODO Catch situation where len does not equal 1. Throw error. + parsed_rel_entry = attr[relationship].strip().split(',') + parsed_rel_entry = [r.strip() for r in parsed_rel_entry] + # Extract string from list if necessary. elif rel_val_type == str: - rels = str(attr[relationship]).strip() - #rels = attr[relationship].strip() - attr_rel_dictionary[attr['Attribute']]['Relationships'].update({relationship:rels}) - position += 1 + parsed_rel_entry = str(attr[relationship]).strip() + attr_rel_dictionary[attr['Attribute']]['Relationships'].update({relationship:parsed_rel_entry}) return attr_rel_dictionary @@ -214,19 +188,18 @@ def parse_csv_model( self, path_to_data_model: str, ): - - ''' - Note: - Leave out loading the base schema for now. Add it later at the - model graph stage. - + '''Load csv data model and parse into an attributes:relationships dictionary + Args: + path_to_data_model, str: path to data model + Returns: + model_dict, dict:{Attribute Display Name: { + Relationships: { + CSV Header: Value}}} ''' - # Load the csv data model to DF model_df = load_df(path_to_data_model, data_model=True) # Gather info from the model - model_dict = self.gather_csv_attributes_relationships(model_df) return model_dict @@ -235,10 +208,6 @@ class DataModelJSONLDParser(): def __init__( self, ): - ''' - - ''' - # Instantiate DataModelRelationships self.dmr = DataModelRelationships() # Load relationships dictionary. @@ -246,95 +215,97 @@ def __init__( def gather_jsonld_attributes_relationships( self, - model_jsonld): + model_jsonld: List[dict]) -> Dict: ''' - Note: unlike a CSV the JSONLD might already have the biothings schema attached to it. - So the output may not initially look identical. - TODO Check relationship attribute types like in CSV - - It is also just about impossible to extract attributes explicitly. Using a dictionary should avoid duplications. - - This is a close approximation to finding attributes and relationships but will not be convertable between csv and jsonld - since jsonld does not have the concept of attributes. - - TODO: Simplify or change this dictionary capture. + Args: + model_jsonld: list of dictionaries, each dictionary is an entry in the jsonld data model + Returns: + attr_rel_dictionary: dict, + {Node Display Name: + {Relationships: { + CSV Header: Value}}} + Notes: + - Unlike a CSV the JSONLD might already have a base schema attached to it. + So the attributes:relationship dictionary for importing a CSV vs JSONLD may not match. + - It is also just about impossible to extract attributes explicitly. Using a dictionary should avoid duplications. + - This is a promiscuous capture and will create an attribute for each model entry. + - Currently only designed to capture the same information that would be encoded in CSV, + can be updated in the future. + TODO: + - Find a way to delete non-attribute keys, is there a way to reliable distinguish after the fact? + - Right now, here we are stripping contexts, will need to track them in the future. ''' - - # TODO: define this within the relationships class - jsonld_keys_to_extract = ['label', 'subClassOf', 'id'] - label_jsonld_key, subclassof_jsonld_key, id_jsonld_key = [self.rel_dict[key]['jsonld_key'] + # Retrieve relevant JSONLD keys. + jsonld_keys_to_extract = ['label', 'subClassOf', 'id', 'displayName'] + label_jsonld_key, subclassof_jsonld_key, id_jsonld_key, dn_jsonld_key = [self.rel_dict[key]['jsonld_key'] for key in jsonld_keys_to_extract ] - model_ids = [v[label_jsonld_key] for v in model_jsonld] + # Gather all labels from the model. + model_labels = [v[label_jsonld_key] for v in model_jsonld] + attr_rel_dictionary = {} - # For each entry in the jsonld model + # Move through each entry in the jsonld model for entry in model_jsonld: - # Check to see if it has been assigned as a subclass as an attribute or parent. - if subclassof_jsonld_key in entry.keys(): - - # Checking if subclass type is list, actually gets rid of Biothings. - # TODO: Allow biothings in future (would need to handle as a dictionary) - if type(entry[subclassof_jsonld_key]) == list and entry[subclassof_jsonld_key]: - - # Determine if the id the entry has been assigned as a sublcass of is also recoreded - # as a model id. If it is, then the entry is not an attribute itself, but a valid value. - subclass_id = entry[subclassof_jsonld_key][0][id_jsonld_key] + # Get the label of the entry + entry_display_name = entry[dn_jsonld_key] - if not subclass_id in model_ids: - - # Get the label of the entry - entry_id = entry[label_jsonld_key] - - # If the entry is an attribute that has not already been added to the dictionary, add it. - if entry_id not in attr_rel_dictionary.keys(): - attr_rel_dictionary.update({entry_id: {'Relationships': {}}}) - - # Add relationships for each attribute - # Right now, here we are stripping contexts, will need to track them in the future. - for key, val in self.rel_dict.items(): - if val['jsonld_key'] in entry.keys() and 'csv_header' in val.keys(): - rel_entry = entry[val['jsonld_key']] - if rel_entry != []: - try: - # add dictionary entry by itself. - if type(rel_entry) == dict: - rels = entry.get(val['jsonld_key'])['@id'] - # parse list of dictionaries to make a list of entries with context stripped (will update this section when contexts added.) - elif type(rel_entry[0]) == dict: - rels = [r[id_jsonld_key].split(':')[1] for r in rel_entry] - elif type(rel_entry) == str: - if ':' in rel_entry and 'http:' not in rel_entry: - rels = rel_entry.split(':')[1] - # Convert true/false strings to boolean - if rels.lower() =='true': - rels = True - elif rels.lower == 'false': - rels == False - else: - rels = rel_entry - else: - rels = rel_entry - except: - breakpoint() - - attr_rel_dictionary[ - entry_id]['Relationships'].update( - {self.rel_dict[key]['csv_header']: rels}) + # If the entry is an attribute that has not already been added to the dictionary, add it. + if entry_display_name not in attr_rel_dictionary.keys(): + attr_rel_dictionary.update({entry_display_name: {'Relationships': {}}}) + + # Add relationships for each attribute + # + # Go through each defined relationship type (key) and its attributes (val) + for key, val in self.rel_dict.items(): + # Determine if current entry can be defined by the current reationship. + if val['jsonld_key'] in entry.keys() and 'csv_header' in val.keys(): + # Retrieve entry value associated with the given relationship + rel_entry = entry[val['jsonld_key']] + # if there is an entry treat it by type and add to the attr:relationships dictionary. + if rel_entry: + # Retrieve ID from dictionary single value dictionary + if type(rel_entry) == dict and len(rel_entry.keys()) == 1: + parsed_rel_entry = entry.get(val['jsonld_key'])['@id'] + # Parse list of dictionaries to make a list of entries with context stripped (will update this section when contexts added.) + elif type(rel_entry)==list and type(rel_entry[0]) == dict: + parsed_rel_entry = [r[id_jsonld_key].split(':')[1] for r in rel_entry] + # Strip context from string and convert true/false to bool + elif type(rel_entry) == str: + # Remove contexts and treat strings as appropriate. + if ':' in rel_entry and 'http:' not in rel_entry: + parsed_rel_entry = rel_entry.split(':')[1] + # Convert true/false strings to boolean + if parsed_rel_entry.lower() =='true': + parsed_rel_entry = True + elif parsed_rel_entry.lower == 'false': + parsed_rel_entry == False + else: + parsed_rel_entry = rel_entry + # For anything else get that + else: + parsed_rel_entry = rel_entry + # Add relationships for each attribute and relationship to the dictionary + attr_rel_dictionary[ + entry_display_name]['Relationships'].update( + {self.rel_dict[key]['csv_header']: parsed_rel_entry}) return attr_rel_dictionary def parse_jsonld_model( self, path_to_data_model:str, ): - ''' - Note: Converting JSONLD to look *Exactly* like the csv output would get rid - of a lot of information. Will need to decide later if we want to - preserve this information in some way. - + '''Convert raw JSONLD data model to attributes relationship dictionary. + Args: + path_to_data_model: str, path to JSONLD data model + Returns: + model_dict: dict, + {Node Display Name: + {Relationships: { + CSV Header: Value}}} ''' # Load the json_ld model to df - json_load = load_json(path_to_data_model) + # Convert dataframe to attributes relationship dictionary. model_dict = self.gather_jsonld_attributes_relationships(json_load['@graph']) return model_dict From a169068e1d0a9da6ec37967555e1698849d14bc8 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 11 Sep 2023 14:19:26 -0700 Subject: [PATCH 055/239] update function names --- schematic/schemas/data_model_relationships.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/schematic/schemas/data_model_relationships.py b/schematic/schemas/data_model_relationships.py index 5f593e77e..133a292ea 100644 --- a/schematic/schemas/data_model_relationships.py +++ b/schematic/schemas/data_model_relationships.py @@ -1,5 +1,5 @@ from typing import Dict -from schematic.utils.schema_utils import get_label_from_display_name, get_display_name_from_label, convert_bool, parse_validation_rules +from schematic.utils.schema_utils import get_label_from_display_name, get_attribute_display_name_from_label, convert_bool_to_str, parse_validation_rules from schematic.schemas.curie import uri2curie, curie2uri class DataModelRelationships(): @@ -48,8 +48,8 @@ def define_data_model_relationships(self) -> Dict: 'type': str, 'edge_rel': False, 'required_header': True, - 'node_attr_dict':{'default': get_display_name_from_label, - 'standard': get_display_name_from_label, + 'node_attr_dict':{'default': get_attribute_display_name_from_label, + 'standard': get_attribute_display_name_from_label, }, }, 'label':{ @@ -111,7 +111,7 @@ def define_data_model_relationships(self) -> Dict: 'edge_rel': False, 'required_header': True, 'node_attr_dict':{'default': False, - 'standard': convert_bool, + 'standard': convert_bool_to_str, }, }, 'subClassOf': { From 45eff995a70f0f74c0498f5d481b5b0bb6799226 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 11 Sep 2023 14:20:02 -0700 Subject: [PATCH 056/239] clean/document schematic/schemas/data_model_validator.py --- schematic/schemas/data_model_validator.py | 56 ++++++++++++----------- 1 file changed, 30 insertions(+), 26 deletions(-) diff --git a/schematic/schemas/data_model_validator.py b/schematic/schemas/data_model_validator.py index a864c8814..a83944627 100644 --- a/schematic/schemas/data_model_validator.py +++ b/schematic/schemas/data_model_validator.py @@ -1,4 +1,5 @@ import networkx as nx +from typing import Any, Dict, Optional, Text, List,Tuple from schematic.schemas.data_model_relationships import ( DataModelRelationships @@ -10,18 +11,25 @@ class DataModelValidator(): ''' def __init__( self, - graph, + graph: nx.MultiDiGraph, ): ''' + Args: + graph, nx.MultiDiGraph: Graph representation of the data model. TODO: put blacklisted chars and reserved_names in some global space where they can be accessed centrally ''' self.graph = graph self.DMR = DataModelRelationships() - # Removed check for spaces in display name since we get rid of those. + # Define blacklisted characters, taken from store.synapse self.blacklisted_chars = ['(', ')', '.', '-'] + # Define reserved_names, taken from Documentation self.reserved_names = {'entityId'} - def run_checks(self): + def run_checks(self)->Tuple[list,list]: + """ Run all validation checks on the data model graph. + Returns, tuple(list, list): Returns a tuple of errors and warnings generated. + TODO: In future could design a way for groups to customize tests run for their groups, run additional tests, or move some to issuing only warnings, vice versa. + """ error_checks = [ self.check_graph_has_required_node_fields(), self.check_is_dag(), @@ -32,11 +40,12 @@ def run_checks(self): ] errors = [error for error in error_checks if error] warnings = [warning for warning in warning_checks if warning] - return errors + return errors, warnings - def check_graph_has_required_node_fields(self): - ''' - Checks that each node is assigned a label. + def check_graph_has_required_node_fields(self)-> List[str]: + '''Checks that the graph has the required node fields for all nodes. + Returns: + error, list: List of error messages for each missing field. ''' # Get all the fields that should be recorded per node rel_dict = self.DMR.relationships_dictionary @@ -47,27 +56,19 @@ def check_graph_has_required_node_fields(self): error = [] missing_fields = [] - # Check that nodes have labels + # Check that required fields are present for each node. for node, node_dict in self.graph.nodes(data=True): missing_fields.extend([(node, f) for f in node_fields if f not in node_dict.keys()]) if missing_fields: - for nf in missing_fields: - error.append(f'For entry: {nf[0]}, the required field {nf[1]} is missing in the data model graph, please double check your model and generate the graph again.') + for mf in missing_fields: + error.append(f'For entry: {mf[0]}, the required field {mf[1]} is missing in the data model graph, please double check your model and generate the graph again.') return error - def check_is_dag(self): - ''' - TODO: - - Check with Milen. This might be too simple of a check. - - Try wit topological sort as well. Benchmark against current approach. - - Add unit test to verify this works properly. - - - if nx.number_of_selfloops(self.graph)!=0 and nx.is_directed(self.graph) == False: - error = f'Schematic requires that data models are Directed Acyclic Graphs (DAGs). ' \ - f'Model supplied is not a DAG, please check your model.' - return error + def check_is_dag(self) -> List[str]: + '''Check that generated graph is a directed acyclic graph + Returns: + error, list: List of error messages if graph is not a DAG. List will include a message for each cycle found, if not there is a more generic message for the graph as a whole. ''' error = [] if not nx.is_directed_acyclic_graph(self.graph): @@ -80,8 +81,10 @@ def check_is_dag(self): error.append(f'Schematic requires models be a directed acyclic graph (DAG). Your graph is not a DAG, we could not locate the sorce of the error, please inspect your model.') return error - def check_blacklisted_characters(self): + def check_blacklisted_characters(self) -> List[str]: """ We strip these characters in store, so not sure if it matter if we have them now, maybe add warning + Returns: + warning, list: list of warnings for each node in the graph, that has a Display name that contains blacklisted characters. """ warning = [] for node, node_dict in self.graph.nodes(data=True): @@ -92,9 +95,10 @@ def check_blacklisted_characters(self): warning.append(f'Node: {node_display_name} contains a blacklisted character(s): {blacklisted_characters_str}, they will be striped if used in Synapse annotations.') return warning - def check_reserved_names(self): - ''' - # TODO: the error message is odd, what are the actual names that should be used? Not attribute or componenet... + def check_reserved_names(self) -> List[str]: + '''Identify if any names nodes in the data model graph are the same as reserved name. + Returns: + error, list: List of erros for every node in the graph whose name overlaps with the reserved names. ''' error = [] reserved_names_found = [(name, node) for node in self.graph.nodes From c2f6d51e1ce63c1e24af4b5b8c12d111bc0845f4 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 11 Sep 2023 14:20:36 -0700 Subject: [PATCH 057/239] remove unused import convert_size --- schematic/store/synapse.py | 1 - 1 file changed, 1 deletion(-) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index 300882ade..2cf1b5477 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -48,7 +48,6 @@ from schematic.utils.validate_utils import comma_separated_list_regex, rule_in_rule_list from schematic.utils.general import (entity_type_mapping, get_dir_size, - convert_size, convert_gb_to_bytes, create_temp_folder, profile, From 5eceedffc03e0d200a9e4bdf561cbc81c02aa0d6 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 11 Sep 2023 14:21:42 -0700 Subject: [PATCH 058/239] clean/document rename functions in schematic/utils/schema_utils.py --- schematic/utils/schema_util.py | 71 --------------------------------- schematic/utils/schema_utils.py | 71 +++++++++++++++++++++++++-------- 2 files changed, 55 insertions(+), 87 deletions(-) delete mode 100644 schematic/utils/schema_util.py diff --git a/schematic/utils/schema_util.py b/schematic/utils/schema_util.py deleted file mode 100644 index b294094be..000000000 --- a/schematic/utils/schema_util.py +++ /dev/null @@ -1,71 +0,0 @@ -import json -import string -import inflection - -''' - -General methods. - -TODO: Type hinting - -''' - -def get_property_label_from_display_name(display_name, strict_camel_case = False): - """Convert a given display name string into a proper property label string""" - """ - label = ''.join(x.capitalize() or ' ' for x in display_name.split(' ')) - label = label[:1].lower() + label[1:] if label else '' - """ - # This is the newer more strict method - if strict_camel_case: - display_name = display_name.strip().translate({ord(c): "_" for c in string.whitespace}) - label = inflection.camelize(display_name, uppercase_first_letter=False) - - # This method remains for backwards compatibility - else: - display_name = display_name.translate({ord(c): None for c in string.whitespace}) - label = inflection.camelize(display_name.strip(), uppercase_first_letter=False) - - return label - -def get_class_label_from_display_name(display_name, strict_camel_case = False): - """Convert a given display name string into a proper class label string""" - """ - label = ''.join(x.capitalize() or ' ' for x in display_name.split(' '))""" - # This is the newer more strict method - if strict_camel_case: - display_name = display_name.strip().translate({ord(c): "_" for c in string.whitespace}) - label = inflection.camelize(display_name, uppercase_first_letter=True) - - # This method remains for backwards compatibility - else: - display_name = display_name.translate({ord(c): None for c in string.whitespace}) - label = inflection.camelize(display_name.strip(), uppercase_first_letter=True) - - return label - -def get_display_name_from_label(node_name, attr_relationships): - ''' - TODO: if not display name raise error. - ''' - if 'Attribute' in attr_relationships.keys(): - display_name = attr_relationships['Attribute'] - else: - display_name = node_name - return display_name - -def get_label_from_display_name(display_name, entry_type, strict_camel_case = False): - - if entry_type.lower()=='class': - label = get_class_label_from_display_name(display_name=display_name, strict_camel_case=strict_camel_case) - - elif entry_type.lower()=='property': - label=get_property_label_from_display_name(display_name=display_name, strict_camel_case=strict_camel_case) - return label - -def convert_bool(provided_bool): - return str(provided_bool) - -def export_schema(schema, file_path): - with open(file_path, "w") as f: - json.dump(schema, f, sort_keys=True, indent=4, ensure_ascii=False) diff --git a/schematic/utils/schema_utils.py b/schematic/utils/schema_utils.py index d7a26eb11..c6b0809ae 100644 --- a/schematic/utils/schema_utils.py +++ b/schematic/utils/schema_utils.py @@ -4,11 +4,13 @@ import string -def get_property_label_from_display_name(display_name, strict_camel_case = False): - """Convert a given display name string into a proper property label string""" - """ - label = ''.join(x.capitalize() or ' ' for x in display_name.split(' ')) - label = label[:1].lower() + label[1:] if label else '' +def get_property_label_from_display_name(display_name:str, strict_camel_case:bool = False) -> str: + """Convert a given display name string into a proper property label string + Args: + display_name, str: node display name + strict_camel_case, bool: Default, False; defines whether or not to use strict camel case or not for conversion. + Returns: + label, str: property label of display name """ # This is the newer more strict method if strict_camel_case: @@ -22,10 +24,14 @@ def get_property_label_from_display_name(display_name, strict_camel_case = False return label -def get_class_label_from_display_name(display_name, strict_camel_case = False): - """Convert a given display name string into a proper class label string""" +def get_class_label_from_display_name(display_name:str, strict_camel_case:bool = False) -> str: + """Convert a given display name string into a proper class label string + Args: + display_name, str: node display name + strict_camel_case, bool: Default, False; defines whether or not to use strict camel case or not for conversion. + Returns: + label, str: class label of display name """ - label = ''.join(x.capitalize() or ' ' for x in display_name.split(' '))""" # This is the newer more strict method if strict_camel_case: display_name = display_name.strip().translate({ord(c): "_" for c in string.whitespace}) @@ -38,9 +44,13 @@ def get_class_label_from_display_name(display_name, strict_camel_case = False): return label -def get_display_name_from_label(node_name, attr_relationships): - ''' - TODO: if not display name raise error. +def get_attribute_display_name_from_label(node_name: str, attr_relationships: dict) -> str: + '''Get attribute display name for a node, using the node label, requires the attr_relationships dicitonary from the data model parser + Args: + node_name, str: node label + attr_relationships, dict: dictionary defining attributes and relationships, generated in data model parser. + Returns: + display_name, str: node display name, recorded in attr_relationships. ''' if 'Attribute' in attr_relationships.keys(): display_name = attr_relationships['Attribute'] @@ -48,23 +58,52 @@ def get_display_name_from_label(node_name, attr_relationships): display_name = node_name return display_name -def get_label_from_display_name(display_name, entry_type, strict_camel_case = False): - +def get_label_from_display_name(display_name:str, entry_type:str, strict_camel_case:bool = False) -> str: + """Get node label from provided display name, based on whether the node is a class or property + Args: + display_name, str: node display name + entry_type, str: 'class' or 'property', defines what type the entry is. + strict_camel_case, bool: Default, False; defines whether or not to use strict camel case or not for conversion. + Returns: + label, str: class label of display name + Raises: + ValueError if entry_type.lower(), is not either 'class' or 'property' + + """ if entry_type.lower()=='class': label = get_class_label_from_display_name(display_name=display_name, strict_camel_case=strict_camel_case) elif entry_type.lower()=='property': label=get_property_label_from_display_name(display_name=display_name, strict_camel_case=strict_camel_case) + else: + raise ValueError(f"The entry type submitted: {entry_type}, is not one of the permitted types: 'class' or 'property'") return label -def convert_bool(provided_bool): +def convert_bool_to_str(provided_bool: bool) -> str: + """Convert bool to string. + Args: + provided_bool, str: true or false bool + Returns: + Boolean converted to 'true' or 'false' str as appropriate. + """ return str(provided_bool) -def parse_validation_rules(validation_rules:list) -> list: +def parse_validation_rules(validation_rules:List[str]) -> List[str]: + """Split multiple validation rules based on :: delimiter + Args: + validation_rules, list: list containing a string validation rule + Returns: + validation_rules, list: if submitted List + """ if validation_rules and '::' in validation_rules[0]: validation_rules = validation_rules[0].split('::') return validation_rules -def export_schema(schema, file_path): +def export_schema(schema: dict, file_path: str) -> None: + """Export schema to given filepath. + Args: + schema, dict: JSONLD schema + filepath, str: path to store the schema + """ with open(file_path, "w") as f: json.dump(schema, f, sort_keys=True, indent=4, ensure_ascii=False) From 95c0b32ac380b68fb9424a5cce06f0053a1415b7 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Tue, 12 Sep 2023 12:05:17 -0700 Subject: [PATCH 059/239] import typing to schema_utils --- schematic/utils/schema_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic/utils/schema_utils.py b/schematic/utils/schema_utils.py index c6b0809ae..4c6c331a5 100644 --- a/schematic/utils/schema_utils.py +++ b/schematic/utils/schema_utils.py @@ -2,7 +2,7 @@ import json import networkx as nx import string - +from typing import List def get_property_label_from_display_name(display_name:str, strict_camel_case:bool = False) -> str: """Convert a given display name string into a proper property label string From b56b05f1fee6c543e6fe184806e6b1abfdbe5ddd Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Tue, 12 Sep 2023 12:06:23 -0700 Subject: [PATCH 060/239] add to docstring --- schematic/schemas/data_model_edges.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/schematic/schemas/data_model_edges.py b/schematic/schemas/data_model_edges.py index 5cdcaf9b4..9680e5e1b 100644 --- a/schematic/schemas/data_model_edges.py +++ b/schematic/schemas/data_model_edges.py @@ -10,7 +10,7 @@ def __init__(self): self.data_model_relationships = self.dmr.relationships_dictionary def generate_edge(self, G: nx.MultiDiGraph, node: str, all_node_dict: dict, attr_rel_dict: dict, edge_relationships: dict) -> nx.MultiDiGraph: - """Generate an edge between a target node and relevant other nodes the data model + """Generate an edge between a target node and relevant other nodes the data model. In short, does this current node belong to a recorded relationship in the attribute, relationshps dictionary. Go through each attribute and relationship to find where the node may be. Args: G, nx.MultiDiGraph: networkx graph representation of the data model, that is in the process of being fully built. node, str: target node to look for connecting edges @@ -51,7 +51,6 @@ def generate_edge(self, G: nx.MultiDiGraph, node: str, all_node_dict: dict, attr weight = 0 # Get the edge_key for the edge relationship we are adding at this step edge_key = self.data_model_relationships[key]['edge_key'] - # Add edges, in a manner that preserves directionality # TODO: rewrite to use edge_dir if key in ['subClassOf', 'domainIncludes']: From 87e3cdc6f608a48e854c586c2d5bc5396e1a822f Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Tue, 12 Sep 2023 13:46:29 -0700 Subject: [PATCH 061/239] pull label for entry if display name not recorded when parsing JSONLD --- schematic/schemas/data_model_parser.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/schematic/schemas/data_model_parser.py b/schematic/schemas/data_model_parser.py index 9d4bf9d84..41c193bf2 100644 --- a/schematic/schemas/data_model_parser.py +++ b/schematic/schemas/data_model_parser.py @@ -248,11 +248,16 @@ def gather_jsonld_attributes_relationships( # Move through each entry in the jsonld model for entry in model_jsonld: # Get the label of the entry - entry_display_name = entry[dn_jsonld_key] + try: + # Get the entry display name (if recorded) + entry_name = entry[dn_jsonld_key] + except: + # If no display name, get the label. + entry_name = entry[label_jsonld_key] # If the entry is an attribute that has not already been added to the dictionary, add it. - if entry_display_name not in attr_rel_dictionary.keys(): - attr_rel_dictionary.update({entry_display_name: {'Relationships': {}}}) + if entry_name not in attr_rel_dictionary.keys(): + attr_rel_dictionary.update({entry_name: {'Relationships': {}}}) # Add relationships for each attribute # @@ -287,7 +292,7 @@ def gather_jsonld_attributes_relationships( parsed_rel_entry = rel_entry # Add relationships for each attribute and relationship to the dictionary attr_rel_dictionary[ - entry_display_name]['Relationships'].update( + entry_name]['Relationships'].update( {self.rel_dict[key]['csv_header']: parsed_rel_entry}) return attr_rel_dictionary From 15c06e56738a079441736482cdace20f0eb28831 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Tue, 12 Sep 2023 13:47:11 -0700 Subject: [PATCH 062/239] update find_class_specific_properties to work from graph --- schematic/schemas/data_model_graph.py | 42 +++++++++------------------ 1 file changed, 14 insertions(+), 28 deletions(-) diff --git a/schematic/schemas/data_model_graph.py b/schematic/schemas/data_model_graph.py index d0bc19dcf..4f9e8afaf 100644 --- a/schematic/schemas/data_model_graph.py +++ b/schematic/schemas/data_model_graph.py @@ -615,36 +615,22 @@ def find_child_classes(self, schema_class: str) -> list: return unlist(list(self.graph.successors(schema_class))) def find_class_specific_properties(self, schema_class): - """Find properties specifically associated with a given class""" + """Find properties specifically associated with a given class + Args: + schema_class, str: node/class label, to identify properties for. + Returns: + properties, list: List of properties associate with a given schema class. + Raises: + KeyError: Key error is raised if the provded schema_class is not in the graph + """ - #This is called directly from the API - # Needs to be refactored no longer be JSONLD specific - breakpoint() - #schema_uri = self.graph.nodes[schema_class]["uri"] + if not self.is_class_in_schema(schema_class): + raise KeyError(f"Schema_class provided: {schema_class} is not in the data model, please check that you are providing the proper class/node label") + properties = [] - for k, v in self.graph[schema_class]: - if 'domainIncludes' in v.keys(): - properties.append(k) - ''' - - for record in self.schema["@graph"]: - if record["@type"] == "rdf:Property": - if ( - type(record["schema:domainIncludes"]) == dict - and record["schema:domainIncludes"]["@id"] == schema_uri - ): - properties.append(record["rdfs:label"]) - elif ( - type(record["schema:domainIncludes"]) == list - and [ - item - for item in record["schema:domainIncludes"] - if item["@id"] == schema_uri - ] - != [] - ): - properties.append(record["rdfs:label"]) - ''' + for n1, n2 in self.graph.edges(): + if n2==schema_class and 'domainValue' in self.graph[n1][schema_class]: + properties.append(n1) return properties def find_parent_classes(self, node_label:str) -> List[list]: From 7692253ca06e4343087e9994ebe8843e5188d07b Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Fri, 15 Sep 2023 15:14:17 -0700 Subject: [PATCH 063/239] add new line --- schematic/manifest/generator.py | 1 + 1 file changed, 1 insertion(+) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index e9b052827..fe478da64 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -58,6 +58,7 @@ def __init__( # Path to jsonld self.jsonld_path = path_to_json_ld + # Graph self.graph = graph From 6b24f66d87b9b59be90e8cf49f48a33b737bbd63 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Fri, 15 Sep 2023 15:16:55 -0700 Subject: [PATCH 064/239] remove unused code and explicitly call parameter when getting validation rules --- schematic/models/GE_Helpers.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/schematic/models/GE_Helpers.py b/schematic/models/GE_Helpers.py index 5cd70d05c..696b99ca7 100644 --- a/schematic/models/GE_Helpers.py +++ b/schematic/models/GE_Helpers.py @@ -159,14 +159,11 @@ def build_expectation_suite(self,): # remove trailing/leading whitespaces from manifest self.manifest.applymap(lambda x: x.strip() if isinstance(x, str) else x) - validation_rules = self.DME.get_node_validation_rules(col) + validation_rules = self.DME.get_node_validation_rules(node_display_name=col) #check if attribute has any rules associated with it if validation_rules: #iterate through all validation rules for an attribute - #TODO: Can remove when handling updated so split within graph - if '::' in validation_rules[0]: - validation_rules = validation_rules[0].split("::") for rule in validation_rules: base_rule = rule.split(" ")[0] From c4e9c5912414b9a32f8eed866f8712c778a7e0b6 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Fri, 15 Sep 2023 15:17:50 -0700 Subject: [PATCH 065/239] update data model handling in MetadataModel so can take in JSONLD or CSV, dont constrain to JSONLD --- schematic/models/metadata.py | 32 ++++++++++++++------------------ 1 file changed, 14 insertions(+), 18 deletions(-) diff --git a/schematic/models/metadata.py b/schematic/models/metadata.py index f7ddfaee3..bf71ced27 100644 --- a/schematic/models/metadata.py +++ b/schematic/models/metadata.py @@ -53,28 +53,24 @@ def __init__(self, inputMModelLocation: str, inputMModelLocationType: str,) -> N """ # extract extension of 'inputMModelLocation' # ensure that it is necessarily pointing to a '.jsonld' file - if inputMModelLocation.rpartition(".")[-1] == "jsonld": - logger.debug( - f"Initializing DataModelGraphExplorer object from {inputMModelLocation} schema." - ) - self.inputMModelLocation = inputMModelLocation - data_model_parser = DataModelParser(path_to_data_model = self.inputMModelLocation) - #Parse Model - parsed_data_model = data_model_parser.parse_model() + logger.debug( + f"Initializing DataModelGraphExplorer object from {inputMModelLocation} schema." + ) - # Instantiate DataModelGraph - data_model_grapher = DataModelGraph(parsed_data_model) + self.inputMModelLocation = inputMModelLocation - # Generate graph - self.graph_data_model = data_model_grapher.generate_data_model_graph() - - self.DME = DataModelGraphExplorer(self.graph_data_model) + data_model_parser = DataModelParser(path_to_data_model = self.inputMModelLocation) + #Parse Model + parsed_data_model = data_model_parser.parse_model() - else: - raise TypeError( - f"Please make sure {inputMModelLocation} is a .jsonld file." - ) + # Instantiate DataModelGraph + data_model_grapher = DataModelGraph(parsed_data_model) + + # Generate graph + self.graph_data_model = data_model_grapher.generate_data_model_graph() + + self.DME = DataModelGraphExplorer(self.graph_data_model) # check if the type of MModel file is "local" # currently, the application only supports reading from local JSON-LD files From f1da61b2792b8b9719079b36349cc3dccc76a409 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Fri, 15 Sep 2023 15:18:27 -0700 Subject: [PATCH 066/239] remove spaces --- schematic/models/metadata.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic/models/metadata.py b/schematic/models/metadata.py index bf71ced27..6aa75821b 100644 --- a/schematic/models/metadata.py +++ b/schematic/models/metadata.py @@ -69,7 +69,7 @@ def __init__(self, inputMModelLocation: str, inputMModelLocationType: str,) -> N # Generate graph self.graph_data_model = data_model_grapher.generate_data_model_graph() - + self.DME = DataModelGraphExplorer(self.graph_data_model) # check if the type of MModel file is "local" From bdb6469d739e2c7444d44248c2591e59c3ce20fa Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Fri, 15 Sep 2023 15:19:34 -0700 Subject: [PATCH 067/239] explicitly call node_display_name paramter rather than rely on position to get_node_validation_rules --- schematic/models/validate_attribute.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/schematic/models/validate_attribute.py b/schematic/models/validate_attribute.py index 9f8e6a31f..f5ffd0e4b 100644 --- a/schematic/models/validate_attribute.py +++ b/schematic/models/validate_attribute.py @@ -233,7 +233,7 @@ def generate_type_error( error_val = invalid_entry #TODO: not sure if this i needed (to split) - validation_rules=DME.get_node_validation_rules(attribute_name) + validation_rules=DME.get_node_validation_rules(node_display_name=attribute_name) #TODO: Can remove when handling updated so split within graph if validation_rules and '::' in validation_rules[0]: @@ -694,7 +694,7 @@ def regex_validation( errors = [] warnings = [] - validation_rules = DME.get_node_validation_rules(manifest_col.name) + validation_rules = DME.get_node_validation_rules(node_display_name=manifest_col.name) if validation_rules and '::' in validation_rules[0]: validation_rules = validation_rules[0].split("::") # Handle case where validating re's within a list. From da0c28d56857904613920b0c0280c3477f04092d Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Fri, 15 Sep 2023 15:21:10 -0700 Subject: [PATCH 068/239] explicitly call node_display_name paramter rather than rely on position to get_node_validation_rules --- schematic/models/validate_manifest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic/models/validate_manifest.py b/schematic/models/validate_manifest.py index cd1a425e1..83cbf169e 100644 --- a/schematic/models/validate_manifest.py +++ b/schematic/models/validate_manifest.py @@ -175,7 +175,7 @@ def validate_manifest_rules( # remove trailing/leading whitespaces from manifest manifest.applymap(lambda x: x.strip() if isinstance(x, str) else x) - validation_rules = DME.get_node_validation_rules(col) + validation_rules = DME.get_node_validation_rules(node_display_name=col) #TODO: Can remove when handling updated so split within graph if validation_rules and '::' in validation_rules[0]: From 2b301acf89f0d6cfd87b924fdc94f924d9e02677 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Fri, 15 Sep 2023 15:23:51 -0700 Subject: [PATCH 069/239] clean/document data_model_graph --- schematic/schemas/data_model_graph.py | 37 +++++++++++++-------------- 1 file changed, 18 insertions(+), 19 deletions(-) diff --git a/schematic/schemas/data_model_graph.py b/schematic/schemas/data_model_graph.py index 4f9e8afaf..0e5a9d408 100644 --- a/schematic/schemas/data_model_graph.py +++ b/schematic/schemas/data_model_graph.py @@ -156,11 +156,11 @@ def find_classes(self) -> set: classes = nodes - properties return classes - def find_node_range(self, node_label:Optional[bool], node_display_name:Optional[bool]) -> list: + def find_node_range(self, node_label:Optional[str]=None, node_display_name:Optional[str]=None) -> list: """Get valid values for the given node (attribute) Args: - node_label, str, Optional[bool]: label of the node for which to retrieve valid values - node_display_name, str, Optional[bool]: Display Name of the node for which to retrieve valid values + node_label, str, Optional[str]: label of the node for which to retrieve valid values + node_display_name, str, Optional[str]: Display Name of the node for which to retrieve valid values Returns: valid_values, list: List of valid values associated with the provided node. """ @@ -189,8 +189,7 @@ def get_adjacent_nodes_by_relationship(self, #checked """ nodes = set() - - for (u, v, key, c) in self.graph.out_edges(node=node_label, data=True, keys=True): + for (u, v, key, c) in self.graph.out_edges(node_label, data=True, keys=True): if key == relationship: nodes.add(v) @@ -377,14 +376,15 @@ def get_ordered_entry(self, key: str, source_node_label:str) -> list: return sorted_nodes # Get values associated with a node - def get_nodes_ancestors(self, node_label:str) -> list: + def get_nodes_ancestors(self, subgraph, node_label:str) -> list: """Get a list of nodes reachable from source component in graph Args: - node_labe, str: label of node to find ancestors for + subgraph: networkx graph object + node_label, str: label of node to find ancestors for Returns: all_ancestors, list: nodes reachable from source in graph """ - all_ancestors = list(nx.ancestors(self.graph, component)) + all_ancestors = list(nx.ancestors(subgraph, node_label)) return all_ancestors @@ -427,10 +427,10 @@ def get_node_dependencies(self, if schema_ordered: # get dependencies in the same order in which they are defined in the schema - required_dependencies = self.get_ordered_entry(key=self.reqDep_ek, source_node_label=source_node) + required_dependencies = self.get_ordered_entry(key=self.rel_dict['requiresDependency']['edge_key'], source_node_label=source_node) else: required_dependencies = self.get_adjacent_nodes_by_relationship( - node_label = source_node, relationship = self.reqDep_ek) + node_label = source_node, relationship = self.rel_dict['requiresDependency']['edge_key']) if display_names: # get display names of dependencies @@ -478,8 +478,7 @@ def get_node_label(self, node_display_name: str) -> str: node_display_name: Display name of the node which you want to get the label for. Returns: Node label associated with given node. - Raises: - KeyError: If the node cannot be found in the graph. + If display name not part of schema, return an empty string. """ node_class_label = get_class_label_from_display_name(display_name = node_display_name) @@ -492,11 +491,11 @@ def get_node_label(self, node_display_name: str) -> str: elif node_property_label in self.graph.nodes: node_label = node_property_label else: - raise KeyError(f"Cannot find node: {node_display_name} in the graph, please check entry.") - + node_label="" + return node_label - def get_node_range(self, node_label: Optional[bool], node_display_name: Optional[bool]) -> List[str]: + def get_node_range(self, node_label: Optional[str] = None, node_display_name: Optional[str] = None, display_names: bool=False) -> List[str]: """Get the range, i.e., all the valid values that are associated with a node label. Args: @@ -532,7 +531,7 @@ def get_node_range(self, node_label: Optional[bool], node_display_name: Optional return required_range - def get_node_required(self, node_label:Optional[bool], node_display_name: Optional[bool]) -> bool: + def get_node_required(self, node_label:Optional[str]=None, node_display_name: Optional[str]=None) -> bool: """Check if a given node is required or not. Note: The possible options that a node can be associated with -- "required" / "optional". @@ -552,7 +551,7 @@ def get_node_required(self, node_label:Optional[bool], node_display_name: Option node_required = self.graph.nodes[node_label][rel_node_label] return node_required - def get_node_validation_rules(self, node_label: Optional[bool], node_display_name: Optional[bool]) -> str: + def get_node_validation_rules(self, node_label: Optional[str]=None, node_display_name: Optional[str]=None) -> str: """Get validation rules associated with a node, Args: @@ -595,7 +594,7 @@ def get_subgraph_by_edge_type( return relationship_subgraph - def find_adjacent_child_classes(self, node_label: Optional[bool], node_display_name: Optional[bool])->List[str]: + def find_adjacent_child_classes(self, node_label: Optional[str]=None, node_display_name: Optional[str]=None)->List[str]: '''Find child classes of a given node. Args: node_display_name: Display name of the node to look up. @@ -653,7 +652,7 @@ def find_parent_classes(self, node_label:str) -> List[list]: return [_path[:-1] for _path in paths] - def full_schema_graph(self, size:Optional[bool])-> graphviz.Digraph: + def full_schema_graph(self, size:Optional[int]=None)-> graphviz.Digraph: """Create a graph of the data model. Args: size, float: max height and width of the graph, if one value provided it is used for both. From 7bf1733e71eefbd52391392d694bac22d3ea4b3f Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Fri, 15 Sep 2023 15:28:52 -0700 Subject: [PATCH 070/239] fix data model handling --- tests/test_manifest.py | 38 ++++++++++++++++++++++++++++++-------- 1 file changed, 30 insertions(+), 8 deletions(-) diff --git a/tests/test_manifest.py b/tests/test_manifest.py index b68e7f83f..ac4dd8641 100644 --- a/tests/test_manifest.py +++ b/tests/test_manifest.py @@ -60,7 +60,7 @@ def manifest_generator(helpers, request): use_annotations, data_type = request.param path_to_data_model = helpers.get_data_path("example.model.jsonld") - + # Get graph data model graph_data_model = generate_graph_data_model(helpers, path_to_data_model=path_to_data_model) @@ -113,7 +113,7 @@ class TestManifestGenerator: def test_init(self, helpers): path_to_data_model = helpers.get_data_path("example.model.jsonld") - + # Get graph data model graph_data_model = generate_graph_data_model(helpers, path_to_data_model=path_to_data_model) @@ -142,7 +142,6 @@ def test_get_manifest_first_time(self, manifest): return # Beyond this point, the output is assumed to be a data frame - # Update expectations based on whether the data type is file-based is_file_based = data_type in ["BulkRNA-seqAssay"] @@ -194,8 +193,9 @@ def test_get_manifest_excel(self, helpers, sheet_url, output_format, dataset_id) data_type = "Patient" + # Get path to data model path_to_data_model = helpers.get_data_path("example.model.jsonld") - + # Get graph data model graph_data_model = generate_graph_data_model(helpers, path_to_data_model=path_to_data_model) @@ -244,9 +244,17 @@ def test_get_manifest_no_annos(self, helpers, dataset_id): # Use a non-file based DataType data_type = "Patient" + # Get path to data model + path_to_data_model = helpers.get_data_path("example.model.jsonld") + + # Get graph data model + graph_data_model = generate_graph_data_model(helpers, path_to_data_model=path_to_data_model) + + # Instantiate object with use_annotations set to True generator = ManifestGenerator( - path_to_json_ld=helpers.get_data_path("example.model.jsonld"), + path_to_json_ld=path_to_data_model, + graph=graph_data_model, root=data_type, use_annotations=True, ) @@ -323,8 +331,15 @@ def test_gather_all_fields(self, simple_manifest_generator): # assume there is no existing additional metadata @pytest.mark.parametrize("data_type,required_metadata_fields,expected", [("Patient", {"Component": []}, {'Component': ['Patient']}), ("BulkRNA-seqAssay", {"Filename": [], "Component":[]}, {'Component': ['BulkRNA-seqAssay']})]) def test_add_root_to_component_without_additional_metadata(self, helpers, data_type, required_metadata_fields, expected): + # Get path to data model + path_to_data_model = helpers.get_data_path("example.model.jsonld") + + # Get graph data model + graph_data_model = generate_graph_data_model(helpers, path_to_data_model=path_to_data_model) + manifest_generator = ManifestGenerator( - path_to_json_ld=helpers.get_data_path("example.model.jsonld"), + path_to_json_ld=path_to_data_model, + graph=graph_data_model, root=data_type, ) manifest_generator._add_root_to_component(required_metadata_fields) @@ -336,8 +351,15 @@ def test_add_root_to_component_without_additional_metadata(self, helpers, data_t # assume there is additional metadata @pytest.mark.parametrize("additional_metadata", [{'author': ['test', '', ], 'Filename': ['test.txt', 'test2.txt'], 'Component': []}, {'Year of Birth': ['1988'], 'Filename': ['test.txt'], 'Component': []}]) def test_add_root_to_component_with_additional_metadata(self, helpers, additional_metadata): + # Get path to data model + path_to_data_model = helpers.get_data_path("example.model.jsonld") + + # Get graph data model + graph_data_model = generate_graph_data_model(helpers, path_to_data_model=path_to_data_model) + manifest_generator = ManifestGenerator( - path_to_json_ld=helpers.get_data_path("example.model.jsonld"), + path_to_json_ld=path_to_data_model, + graph=graph_data_model, root="BulkRNA-seqAssay" ) @@ -382,7 +404,7 @@ def test_update_dataframe_with_existing_df(self, helpers, existing_manifest): sheet_url = True path_to_data_model = helpers.get_data_path("example.model.jsonld") - + # Get graph data model graph_data_model = generate_graph_data_model(helpers, path_to_data_model=path_to_data_model) From bf90b8b38b93bc0e39cac56827e3a56386790e90 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Fri, 15 Sep 2023 15:29:31 -0700 Subject: [PATCH 071/239] clean/document schematic/schemas/data_model_json_schema.py --- schematic/schemas/data_model_json_schema.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/schematic/schemas/data_model_json_schema.py b/schematic/schemas/data_model_json_schema.py index 47c386c4f..5a65c40e4 100644 --- a/schematic/schemas/data_model_json_schema.py +++ b/schematic/schemas/data_model_json_schema.py @@ -122,7 +122,7 @@ def get_json_validation_schema(self, source_node: str, schema_name: str) -> Dict # the domain node is very likely the parentof ("parentOf" relationship) of the range node root_dependencies = self.DME.get_adjacent_nodes_by_relationship( - node = source_node, relationship=self.rel_dict['requiresDependency']['edge_key'], + node_label = source_node, relationship=self.rel_dict['requiresDependency']['edge_key'], ) # if root_dependencies is empty it means that a class with name 'source_node' exists @@ -141,7 +141,7 @@ def get_json_validation_schema(self, source_node: str, schema_name: str) -> Dict node_is_processed = True node_range = self.DME.get_adjacent_nodes_by_relationship( - node=process_node, relationship=self.rel_dict['rangeIncludes']['edge_key'], + node_label=process_node, relationship=self.rel_dict['rangeIncludes']['edge_key'], ) @@ -149,7 +149,7 @@ def get_json_validation_schema(self, source_node: str, schema_name: str) -> Dict node_range_d = self.DME.get_nodes_display_names(node_list=node_range) node_dependencies = self.DME.get_adjacent_nodes_by_relationship( - node=process_node, relationship=self.rel_dict['requiresDependency']['edge_key'], + node_label=process_node, relationship=self.rel_dict['requiresDependency']['edge_key'], ) # get process node display name From 120bf1b8caaa0d040bb6c5e10a0dfb9cd9c36387 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Fri, 15 Sep 2023 15:30:04 -0700 Subject: [PATCH 072/239] update function names --- schematic/schemas/data_model_jsonld.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/schematic/schemas/data_model_jsonld.py b/schematic/schemas/data_model_jsonld.py index f30798273..2b4b2b19d 100644 --- a/schematic/schemas/data_model_jsonld.py +++ b/schematic/schemas/data_model_jsonld.py @@ -4,7 +4,7 @@ from schematic.schemas.data_model_graph import DataModelGraphExplorer from schematic.schemas.data_model_relationships import DataModelRelationships -from schematic.utils.schema_utils import get_label_from_display_name, get_display_name_from_label, convert_bool +from schematic.utils.schema_utils import get_label_from_display_name, convert_bool_to_str class DataModelJsonLD(object): @@ -147,7 +147,7 @@ def add_contexts_to_entries(self, template:dict) -> dict: # Add appropritae contexts that have been removed in previous steps (for JSONLD) or did not exist to begin with (csv) if key == 'id' and rel_func == get_label_from_display_name: template[jsonld_key] = 'bts:' + template[jsonld_key] - elif key == 'required' and rel_func == convert_bool: + elif key == 'required' and rel_func == convert_bool_to_str: template[jsonld_key] = 'sms:' + str(template[jsonld_key]).lower() return template From 7040714fc2c4752b40cd59744df25b856b92c401 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Fri, 15 Sep 2023 15:30:39 -0700 Subject: [PATCH 073/239] add logger warning about JSONLD and a docstring --- schematic/schemas/data_model_parser.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/schematic/schemas/data_model_parser.py b/schematic/schemas/data_model_parser.py index 41c193bf2..cac6f9321 100644 --- a/schematic/schemas/data_model_parser.py +++ b/schematic/schemas/data_model_parser.py @@ -53,6 +53,7 @@ def get_model_type(self, path_to_data_model: str) -> str: path_to_data_model, str: path to data model Returns: str: uppercase, data model file extension. + Note: Consider moving this to Utils. ''' return pathlib.Path(path_to_data_model).suffix.replace('.', '').upper() @@ -309,6 +310,8 @@ def parse_jsonld_model( {Relationships: { CSV Header: Value}}} ''' + # Log warning that JSONLD parsing is in beta mode. + logger.warning('JSONLD parsing is in Beta Mode. Please inspect outputs carefully and report any errors.') # Load the json_ld model to df json_load = load_json(path_to_data_model) # Convert dataframe to attributes relationship dictionary. From ba20d4c8f69c23bb7c576b076542add3ca27b9e6 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Fri, 15 Sep 2023 15:31:11 -0700 Subject: [PATCH 074/239] explicitly call variables --- schematic/visualization/tangled_tree.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic/visualization/tangled_tree.py b/schematic/visualization/tangled_tree.py index 33c89fbd8..2a5970e05 100644 --- a/schematic/visualization/tangled_tree.py +++ b/schematic/visualization/tangled_tree.py @@ -738,7 +738,7 @@ def get_ancestors_nodes(self, subgraph, components): """ all_parent_children = {} for component in components: - all_ancestors = self.DME.get_nodes_ancestors(subgraph, component) + all_ancestors = self.DME.get_nodes_ancestors(subgraph=subgraph, node_label=component) all_parent_children[component] = all_ancestors return all_parent_children From eaa768a0f2eb0969ee89630a31b54c4790f7ea5b Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Fri, 15 Sep 2023 15:31:46 -0700 Subject: [PATCH 075/239] update routes to take CSV --- schematic_api/api/routes.py | 79 +++++++++++++++++++++++++------------ 1 file changed, 54 insertions(+), 25 deletions(-) diff --git a/schematic_api/api/routes.py b/schematic_api/api/routes.py index b978ee789..0f147d971 100644 --- a/schematic_api/api/routes.py +++ b/schematic_api/api/routes.py @@ -5,6 +5,7 @@ import shutil import urllib.request import logging +import pathlib import pickle import connexion @@ -184,9 +185,12 @@ def save_file(file_key="csv_file"): return temp_path def initalize_metadata_model(schema_url): - jsonld = get_temp_jsonld(schema_url) + # get path to temp data model file (csv or jsonld) as appropriate + data_model = get_temp_model_path(schema_url) + + metadata_model = MetadataModel( - inputMModelLocation=jsonld, inputMModelLocationType="local" + inputMModelLocation=data_model, inputMModelLocationType="local" ) return metadata_model @@ -199,11 +203,32 @@ def get_temp_jsonld(schema_url): # get path to temporary JSON-LD file return tmp_file.name +def get_temp_csv(schema_url): + # retrieve a CSV via URL and store it in a temporary location + with urllib.request.urlopen(schema_url) as response: + with tempfile.NamedTemporaryFile(delete=False, suffix=".model.csv") as tmp_file: + shutil.copyfileobj(response, tmp_file) + + # get path to temporary csv file + return tmp_file.name + +def get_temp_model_path(schema_url): + # Get model type: + model_extension = pathlib.Path(schema_url).suffix.replace('.', '').upper() + if model_extension == 'CSV': + temp_path = get_temp_csv(schema_url) + elif model_extension == 'JSONLD': + temp_path = get_temp_jsonld(schema_url) + else: + raise ValueError("Did not provide a valid model type CSV or JSONLD, please check submission and try again.") + return temp_path + + # @before_request def get_manifest_route(schema_url: str, use_annotations: bool, dataset_ids=None, asset_view = None, output_format=None, title=None, access_token=None, strict_validation:bool=True): """Get the immediate dependencies that are related to a given source node. Args: - schema_url: link to data model in json ld format + schema_url: link to data model in json ld or csv format title: title of a given manifest. dataset_id: Synapse ID of the "dataset" entity on Synapse (for a given center/project). output_format: contains three option: "excel", "google_sheet", and "dataframe". if set to "excel", return an excel spreadsheet @@ -217,10 +242,7 @@ def get_manifest_route(schema_url: str, use_annotations: bool, dataset_ids=None, # call config_handler() config_handler(asset_view = asset_view) - - # get path to temporary JSON-LD file - jsonld = get_temp_jsonld(schema_url) - + # Gather all data_types to make manifests for. all_args = connexion.request.args args_dict = dict(all_args.lists()) @@ -255,7 +277,7 @@ def get_manifest_route(schema_url: str, use_annotations: bool, dataset_ids=None, f"Please check your submission and try again." ) - data_model_parser = DataModelParser(path_to_data_model = jsonld) + data_model_parser = DataModelParser(path_to_data_model = schema_url) #Parse Model parsed_data_model = data_model_parser.parse_model() @@ -270,7 +292,7 @@ def get_manifest_route(schema_url: str, use_annotations: bool, dataset_ids=None, def create_single_manifest(data_type, title, dataset_id=None, output_format=None, access_token=None, strict=strict_validation): # create object of type ManifestGenerator manifest_generator = ManifestGenerator( - path_to_json_ld=jsonld, + path_to_json_ld=schema_url, graph=graph_data_model, title=title, root=data_type, @@ -359,11 +381,11 @@ def validate_manifest_route(schema_url, data_type, restrict_rules=None, json_str else: temp_path = jsc.convert_json_file_to_csv("file_name") - # get path to temporary JSON-LD file - jsonld = get_temp_jsonld(schema_url) + # get path to temp data model file (csv or jsonld) as appropriate + data_model = get_temp_model_path(schema_url) metadata_model = MetadataModel( - inputMModelLocation=jsonld, inputMModelLocationType="local" + inputMModelLocation=data_model, inputMModelLocationType="local" ) errors, warnings = metadata_model.validateModelManifest( @@ -412,9 +434,12 @@ def submit_manifest_route(schema_url, asset_view=None, manifest_record_type=None validate_component = None else: validate_component = data_type + + # get path to temp data model file (csv or jsonld) as appropriate + data_model = get_temp_model_path(schema_url) manifest_id = metadata_model.submit_metadata_manifest( - path_to_json_ld = schema_url, + path_to_json_ld = data_model, manifest_path=temp_path, dataset_id=dataset_id, validate_component=validate_component, @@ -431,14 +456,14 @@ def populate_manifest_route(schema_url, title=None, data_type=None, return_excel # call config_handler() config_handler() - # get path to temporary JSON-LD file - jsonld = get_temp_jsonld(schema_url) - # Get path to temp file where manifest file contents will be saved temp_path = save_file() + + # get path to temp data model file (csv or jsonld) as appropriate + data_model = get_temp_model_path(schema_url) #Initalize MetadataModel - metadata_model = MetadataModel(inputMModelLocation=jsonld, inputMModelLocationType='local') + metadata_model = MetadataModel(inputMModelLocation=data_model, inputMModelLocationType='local') #Call populateModelManifest class populated_manifest_link = metadata_model.populateModelManifest(title=title, manifestPath=temp_path, rootNode=data_type, return_excel=return_excel) @@ -517,9 +542,10 @@ def get_viz_attributes_explorer(schema_url): # call config_handler() config_handler() - temp_path_to_jsonld = get_temp_jsonld(schema_url) + # get path to temp data model file (csv or jsonld) as appropriate + data_model = get_temp_model_path(schema_url) - attributes_csv = AttributesExplorer(temp_path_to_jsonld).parse_attributes(save_file=False) + attributes_csv = AttributesExplorer(data_model).parse_attributes(save_file=False) return attributes_csv @@ -527,19 +553,21 @@ def get_viz_component_attributes_explorer(schema_url, component, include_index): # call config_handler() config_handler() - temp_path_to_jsonld = get_temp_jsonld(schema_url) + # get path to temp data model file (csv or jsonld) as appropriate + data_model = get_temp_model_path(schema_url) - attributes_csv = AttributesExplorer(temp_path_to_jsonld).parse_component_attributes(component, save_file=False, include_index=include_index) + attributes_csv = AttributesExplorer(data_model).parse_component_attributes(component, save_file=False, include_index=include_index) return attributes_csv @cross_origin(["http://localhost", "https://sage-bionetworks.github.io"]) def get_viz_tangled_tree_text(schema_url, figure_type, text_format): - temp_path_to_jsonld = get_temp_jsonld(schema_url) + # get path to temp data model file (csv or jsonld) as appropriate + data_model = get_temp_model_path(schema_url) # Initialize TangledTree - tangled_tree = TangledTree(temp_path_to_jsonld, figure_type) + tangled_tree = TangledTree(data_model, figure_type) # Get text for tangled tree. text_df = tangled_tree.get_text_for_tangled_tree(text_format, save_file=False) @@ -552,10 +580,11 @@ def get_viz_tangled_tree_layers(schema_url, figure_type): # call config_handler() config_handler() - temp_path_to_jsonld = get_temp_jsonld(schema_url) + # get path to temp data model file (csv or jsonld) as appropriate + data_model = get_temp_model_path(schema_url) # Initialize Tangled Tree - tangled_tree = TangledTree(temp_path_to_jsonld, figure_type) + tangled_tree = TangledTree(data_model, figure_type) # Get tangled trees layers JSON. layers = tangled_tree.get_tangled_tree_layers(save_file=False) From 89e83ce63738479a9088a94b58230c7ffb319713 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Fri, 15 Sep 2023 15:32:31 -0700 Subject: [PATCH 076/239] added TODO --- tests/test_utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/test_utils.py b/tests/test_utils.py index 52f0dd7d9..0fe92acf4 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -494,6 +494,7 @@ def test_csv_to_schemaorg(self, helpers, tmp_path): This test also ensures that the CSV and JSON-LD files for the example data model stay in sync. + TODO: This probably should be moved out of here and to test_schemas """ csv_path = helpers.get_data_path("example.model.csv") From 4716e4ee4b5c4bb44e3e95b7db5378e7f93a3232 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Sat, 16 Sep 2023 17:01:58 -0700 Subject: [PATCH 077/239] Add stubs for tests/test_schemas.py --- tests/test_schemas.py | 460 ++++++++++++++++++------------------------ 1 file changed, 193 insertions(+), 267 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index eee8d7058..fe35ebe93 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -6,291 +6,156 @@ #from schematic.schemas import df_parser from schematic.utils.df_utils import load_df -from schematic.schemas.generator import SchemaGenerator +from schematic.schemas.data_model_graph import DataModelGraph +from schematic.schemas.data_model_nodes import DataModelNodes +from schematic.schemas.data_model_edges import DataModelEdges +from schematic.schemas.data_model_graph import DataModelGraphExplorer +from schematic.schemas.data_model_relationships import DataModelRelationships +from schematic.schemas.data_model_jsonld import DataModelJsonLD +from schematic.schemas.data_model_json_schema import DataModelJSONSchema +from schematic.schemas.data_model_parser import DataModelParser logging.basicConfig(level=logging.DEBUG) logger = logging.getLogger(__name__) -class TestDataModelEdges(): - def test_generate_edge(self,helpers): - return - -class TestDataModelGraph(): +def generate_graph_data_model(helpers, path_to_data_model): + """ + Simple helper function to generate a networkx graph data model from a CSV or JSONLD data model + """ + # Instantiate Parser + data_model_parser = DataModelParser(path_to_data_model=path_to_data_model) -''' -@pytest.fixture -def extended_schema_path(helpers, tmp_path): - data_model_csv_path = helpers.get_data_path("example.model.csv") - - example_model_df = load_df(data_model_csv_path) - - # additional "Assay" attribute to be added to example schema - assay_attr_row = { - "Attribute": "Assay", - "Description": ( - "A planned process with the objective to produce information " - "about the material entity that is the evaluant, by physically " - "examining it or its proxies.[OBI_0000070]" - ), - "Valid Values": "", - "DependsOn": "", - "Properties": "", - "Required": False, - "Parent": "", - "DependsOn Component": "", - "Source": "http://purl.obolibrary.org/obo/OBI_0000070", - "Validation Rules": "", - } - - example_model_df = example_model_df.append(assay_attr_row, ignore_index=True) - - # create empty temporary file to write extended schema to - schemas_folder = tmp_path / "schemas" - schemas_folder.mkdir() - extended_schema_path = schemas_folder / "extended_example.model.csv" - - example_model_df.to_csv(extended_schema_path) - - yield extended_schema_path + #Parse Model + parsed_data_model = data_model_parser.parse_model() + + # Convert parsed model to graph + # Instantiate DataModelGraph + data_model_grapher = DataModelGraph(parsed_data_model) + + # Generate graph + graph_data_model = data_model_grapher.generate_data_model_graph() + + return graph_data_model @pytest.fixture -def sg(helpers): - - inputModelLocation = helpers.get_data_path('example.model.jsonld') - sg = SchemaGenerator(inputModelLocation) - - yield sg - -class TestDfParser: - def test_get_class(self, helpers): - - se_obj = helpers.get_schema_explorer("example.model.jsonld") - - actual = df_parser.get_class( - se=se_obj, - class_display_name="Test", - description="This is a dummy test class", - subclass_of=["Thing"], - requires_dependencies=["Test_Dep_1", "Test_Dep_2"], - requires_range=["Test_Start", "Test_End"], - requires_components=["Test_Comp_1", "Test_Comp_2"], - required=True, - validation_rules=["Rule_1", "Rule_2"], - ) - - expected = { - "@id": "bts:Test", - "@type": "rdfs:Class", - "rdfs:comment": "This is a dummy test class", - "rdfs:label": "Test", - "rdfs:subClassOf": [{"@id": "bts:Thing"}], - "schema:isPartOf": {"@id": "http://schema.biothings.io"}, - "schema:rangeIncludes": [{"@id": "bts:TestStart"}, {"@id": "bts:TestEnd"}], - "sms:displayName": "Test", - "sms:required": "sms:true", - "sms:requiresComponent": [ - {"@id": "bts:Test_Comp_1"}, - {"@id": "bts:Test_Comp_2"}, - ], - "sms:requiresDependency": [ - {"@id": "bts:Test_Dep_1"}, - {"@id": "bts:Test_Dep_2"}, - ], - "sms:validationRules": ["Rule_1", "Rule_2"], - } +def DME(helpers, data_model_name='example.model.csv'): + path_to_data_model = helpers.get_data_path("example.model.jsonld") - assert expected == actual + graph_data_model = generate_graph_data_model(helpers, path_to_data_model=path_to_data_model) + DME = DataModelGraphExplorer(graph_data_model) + yield DME - def test_get_property(self, helpers): +class TestDataModelParser: + def test_get_base_schema_path(self, helpers): + return - se_obj = helpers.get_schema_explorer("example.model.jsonld") + def test_get_model_type(self): + return - actual = df_parser.get_property( - se=se_obj, - property_display_name="Test", - property_class_names=["Prop_Class"], - description="This is a dummy test property", - requires_range=["Test_Start", "Test_End"], - requires_dependencies=["Test_Dep_1", "Test_Dep_2"], - required=True, - validation_rules=["Rule_1", "Rule_2"], - ) + def test_parse_model(self): + return - expected = { - "@id": "bts:test", - "@type": "rdf:Property", - "rdfs:comment": "This is a dummy test property", - "rdfs:label": "test", - "schema:isPartOf": {"@id": "http://schema.biothings.io"}, - "schema:rangeIncludes": [{"@id": "bts:TestStart"}, {"@id": "bts:TestEnd"}], - "sms:displayName": "Test", - "sms:required": "sms:true", - "schema:domainIncludes": [{"@id": "bts:PropClass"}], - "sms:requiresDependency": [ - {"@id": "bts:Test_Dep_1"}, - {"@id": "bts:Test_Dep_2"}, - ], - "sms:validationRules": ["Rule_1", "Rule_2"], - } +class TestDataModelCsvParser: + def test_check_schema_definition(self): + return + def test_gather_csv_attributes_relationships(self): + return + def test_parse_csv_model(self ): + return - assert expected == actual +class TestDataModelJsonLdParser: + def test_gather_jsonld_attributes_relationships(self): + return + def test_parse_jsonld_model(self): + return + +class TestDataModelRelationships: + def test_define_data_model_relationships(self): + return + def test_define_required_csv_headers(self): + return + def test_define_edge_relationships(self): + return + def test_define_value_relationships(self): + return - def test_attribute_exists(self, helpers): +class TestDataModelGraph: + def test_generate_data_model_graph(self): + return - se_obj = helpers.get_schema_explorer("example.model.jsonld") +class TestDataModelGraphExplorer: + def test_find_properties(self): + return - # test when attribute is present in data model - attribute_present = df_parser.attribute_exists(se_obj, "Patient") - - # test when attribute is not present in data model - attribute_absent = df_parser.attribute_exists(se_obj, "RandomAttribute") + def test_find_classes(self): + return - assert attribute_present - assert not attribute_absent - - def test_check_schema_definition(self, helpers): - - data_model_csv_path = helpers.get_data_path("example.model.csv") - - example_model_df = load_df(data_model_csv_path) - - # when all required headers are provided in the CSV data model - actual_df = df_parser.check_schema_definition(example_model_df) - - assert actual_df is None - - # when either "Requires" or "Requires Component" is present - # in column headers, raise ValueError - if "DependsOn Component" in example_model_df.columns: - del example_model_df["DependsOn Component"] - - example_model_df["Requires Component"] = "" - - with pytest.raises(ValueError): - df_parser.check_schema_definition(example_model_df) - - def test_create_nx_schema_objects(self, helpers, extended_schema_path): - - se_obj = helpers.get_schema_explorer("example.model.jsonld") - - # path to extended CSV data model which has one additional attribute - # namely, "Assay" - extended_csv_model_path = helpers.get_data_path(extended_schema_path) - - extended_model_df = load_df(extended_csv_model_path, data_model=True) - - extended_csv_model_se = df_parser.create_nx_schema_objects( - extended_model_df, se_obj - ) - - # check if the "Assay" attribute has been added to the new SchemaExplorer - # object with attributes from the extended schema - result = df_parser.attribute_exists(extended_csv_model_se, "Assay") - - assert result - - def test_get_base_schema_path(self): - - base_schema_path = "/path/to/base_schema.jsonld" - - # path to base schema is returned when base_schema is passed - result_path = df_parser._get_base_schema_path(base_schema=base_schema_path) - - assert result_path == "/path/to/base_schema.jsonld" - - # path to default BioThings data model is returned when no - # base schema path is passed explicitly - biothings_path = df_parser._get_base_schema_path() - - assert os.path.basename(biothings_path) == "biothings.model.jsonld" - - def test_convert_csv_to_data_model(self, helpers, extended_schema_path): - - csv_path = helpers.get_data_path("example.model.jsonld") - - extended_csv_model_path = helpers.get_data_path(extended_schema_path) - - # convert extended CSV data model to JSON-LD using provided - # CSV data model as base schema - extended_csv_model_se = df_parser._convert_csv_to_data_model( - extended_csv_model_path, csv_path - ) - - # if new attribute can be found in extended_csv_model_se - # we know the conversion was successful - attribute_present = df_parser.attribute_exists(extended_csv_model_se, "Assay") - - assert attribute_present -<<<<<<< HEAD -''' -======= - - def test_get_property_label_from_display_name(self, helpers): - se_obj = helpers.get_schema_explorer("example.model.jsonld") + def test_find_node_range(self): + return + + def test_get_adjacent_nodes_by_relationship(self): + return + + def test_get_component_requirements(self): + return + + def test_get_component_requirements_graph(self): + return + + def get_descendants_by_edge_type(self): + return + + def test_get_digraph_by_edge_type(self): + return + + def test_get_edges_by_relationship(self): + return + + def test_get_ordered_entry(self): + return + + def test_get_nodes_ancestors(self): + return + + def test_get_node_comment(self): + return + + def test_get_node_dependencies(self): + return + + def test_get_nodes_descendants(self): + return + + def test_get_nodes_display_names(self): + return + + def test_get_node_label(self): + return + + def test_get_node_range(self): + return + + def test_get_node_required(self): + return + + def test_get_node_validation_rules(self): + return + + def test_get_subgraph_by_edge_type(self): + return + + def test_find_adjacent_child_classes(self): + return + + def test_find_parent_classes(self): + return + + def test_full_schema_graph(self): + return - # tests where strict_camel_case is the same - assert(se_obj.get_property_label_from_display_name("howToAcquire") == "howToAcquire") - assert(se_obj.get_property_label_from_display_name("howToAcquire", strict_camel_case = True) == "howToAcquire") - assert(se_obj.get_property_label_from_display_name("how_to_acquire") == "howToAcquire") - assert(se_obj.get_property_label_from_display_name("how_to_acquire", strict_camel_case = True) == "howToAcquire") - assert(se_obj.get_property_label_from_display_name("howtoAcquire") == "howtoAcquire") - assert(se_obj.get_property_label_from_display_name("howtoAcquire", strict_camel_case = True) == "howtoAcquire") - assert(se_obj.get_property_label_from_display_name("How To Acquire") == "howToAcquire") - assert(se_obj.get_property_label_from_display_name("How To Acquire", strict_camel_case = True) == "howToAcquire") - assert(se_obj.get_property_label_from_display_name("Model Of Manifestation") == "modelOfManifestation") - assert(se_obj.get_property_label_from_display_name("Model Of Manifestation", strict_camel_case = True) == "modelOfManifestation") - assert(se_obj.get_property_label_from_display_name("ModelOfManifestation") == "modelOfManifestation") - assert(se_obj.get_property_label_from_display_name("ModelOfManifestation", strict_camel_case = True) == "modelOfManifestation") - assert(se_obj.get_property_label_from_display_name("model Of Manifestation") == "modelOfManifestation") - assert(se_obj.get_property_label_from_display_name("model Of Manifestation", strict_camel_case = True) == "modelOfManifestation") - - # tests where strict_camel_case changes the result - assert(se_obj.get_property_label_from_display_name("how to Acquire") == "howtoAcquire") - assert(se_obj.get_property_label_from_display_name("how to Acquire", strict_camel_case = True) == "howToAcquire") - assert(se_obj.get_property_label_from_display_name("How to Acquire") == "howtoAcquire") - assert(se_obj.get_property_label_from_display_name("How to Acquire", strict_camel_case = True) == "howToAcquire") - assert(se_obj.get_property_label_from_display_name("how to acquire") == "howtoacquire") - assert(se_obj.get_property_label_from_display_name("how to acquire", strict_camel_case = True) == "howToAcquire") - assert(se_obj.get_property_label_from_display_name("model of manifestation") == "modelofmanifestation") - assert(se_obj.get_property_label_from_display_name("model of manifestation", strict_camel_case = True) == "modelOfManifestation") - assert(se_obj.get_property_label_from_display_name("model of manifestation") == "modelofmanifestation") - assert(se_obj.get_property_label_from_display_name("model of manifestation", strict_camel_case = True) == "modelOfManifestation") - - def test_get_class_label_from_display_name(self, helpers): - se_obj = helpers.get_schema_explorer("example.model.jsonld") - - # tests where strict_camel_case is the same - assert(se_obj.get_class_label_from_display_name("howToAcquire") == "HowToAcquire") - assert(se_obj.get_class_label_from_display_name("howToAcquire", strict_camel_case = True) == "HowToAcquire") - assert(se_obj.get_class_label_from_display_name("how_to_acquire") == "HowToAcquire") - assert(se_obj.get_class_label_from_display_name("how_to_acquire", strict_camel_case = True) == "HowToAcquire") - assert(se_obj.get_class_label_from_display_name("howtoAcquire") == "HowtoAcquire") - assert(se_obj.get_class_label_from_display_name("howtoAcquire", strict_camel_case = True) == "HowtoAcquire") - assert(se_obj.get_class_label_from_display_name("How To Acquire") == "HowToAcquire") - assert(se_obj.get_class_label_from_display_name("How To Acquire", strict_camel_case = True) == "HowToAcquire") - assert(se_obj.get_class_label_from_display_name("Model Of Manifestation") == "ModelOfManifestation") - assert(se_obj.get_class_label_from_display_name("Model Of Manifestation", strict_camel_case = True) == "ModelOfManifestation") - assert(se_obj.get_class_label_from_display_name("ModelOfManifestation") == "ModelOfManifestation") - assert(se_obj.get_class_label_from_display_name("ModelOfManifestation", strict_camel_case = True) == "ModelOfManifestation") - assert(se_obj.get_class_label_from_display_name("model Of Manifestation") == "ModelOfManifestation") - assert(se_obj.get_class_label_from_display_name("model Of Manifestation", strict_camel_case = True) == "ModelOfManifestation") - - # tests where strict_camel_case changes the result - assert(se_obj.get_class_label_from_display_name("how to Acquire") == "HowtoAcquire") - assert(se_obj.get_class_label_from_display_name("how to Acquire", strict_camel_case = True) == "HowToAcquire") - assert(se_obj.get_class_label_from_display_name("How to Acquire") == "HowtoAcquire") - assert(se_obj.get_class_label_from_display_name("How to Acquire", strict_camel_case = True) == "HowToAcquire") - assert(se_obj.get_class_label_from_display_name("how to acquire") == "Howtoacquire") - assert(se_obj.get_class_label_from_display_name("how to acquire", strict_camel_case = True) == "HowToAcquire") - assert(se_obj.get_class_label_from_display_name("model of manifestation") == "Modelofmanifestation") - assert(se_obj.get_class_label_from_display_name("model of manifestation", strict_camel_case = True) == "ModelOfManifestation") - assert(se_obj.get_class_label_from_display_name("model of manifestation") == "Modelofmanifestation") - assert(se_obj.get_class_label_from_display_name("model of manifestation", strict_camel_case = True) == "ModelOfManifestation") - -class TestSchemaExplorer: @pytest.mark.parametrize("class_name, expected_in_schema", [("Patient",True), ("ptaient",False), ("Biospecimen",True), ("InvalidComponent",False)]) - def test_is_class_in_schema(self, sg, class_name, expected_in_schema): + def test_is_class_in_schema(self, DME, class_name, expected_in_schema): """ Test to cover checking if a given class is in a schema. `is_class_in_schema` should return `True` if the class is in the schema @@ -298,8 +163,69 @@ def test_is_class_in_schema(self, sg, class_name, expected_in_schema): """ # Check if class is in schema - class_in_schema = sg.se.is_class_in_schema(class_name) + class_in_schema = DME.is_class_in_schema(class_name) # Assert value is as expected assert class_in_schema == expected_in_schema ->>>>>>> ca5f2938123180b2b3a3448ea2c4b86af9cbe453 + + def test_sub_schema_graph(self): + return + +class TestDataModelNodes: + def test_gather_nodes(self): + return + def test_gather_all_nodes(self): + return + def test_get_rel_node_dict_info(self): + return + def test_get_data_model_properties(self): + return + def test_get_entry_type(self): + return + def test_run_rel_functions(self): + return + def test_generate_node_dict(self): + return + def test_generate_node(self): + return + +class TestDataModelEdges: + def test_generate_edge(self,helpers): + return + + +class TestDataModelJsonSchema: + def test_get_array_schema(self): + return + def test_get_non_blank_schema(self): + return + def test_get_json_validation_schema(self): + return + +class TestDataModelJsonLd: + def test_base_jsonld_template(self): + return + def test_create_object(self): + return + def test_add_contexts_to_entries(self): + return + def test_clean_template(self): + return + def test_strip_context(self): + return + def test_reorder_template_entries(self): + return + def test_property_template(self): + return + def test_class_template(self): + return + def test_generate_jsonld_object(self): + return + def test_convert_graph_to_jsonld(self): + return +class TestSchemas: + def test_convert_csv_to_graph(self, helpers): + return + def test_convert_jsonld_to_graph(self, helpers): + return + From f7fb1aa563aa0d55825d2d27c304174514075d07 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Wed, 20 Sep 2023 08:55:18 -0700 Subject: [PATCH 078/239] added first set of tests for DataModelRelationships class --- tests/test_schemas.py | 37 +++++++++++++++++++++++++++++-------- 1 file changed, 29 insertions(+), 8 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index fe35ebe93..f40f6fd8a 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -46,6 +46,11 @@ def DME(helpers, data_model_name='example.model.csv'): DME = DataModelGraphExplorer(graph_data_model) yield DME +@pytest.fixture(name="dmr_object") +def dmr(): + """Yields a data model relationships object for testing""" + yield DataModelRelationships() + class TestDataModelParser: def test_get_base_schema_path(self, helpers): return @@ -71,14 +76,30 @@ def test_parse_jsonld_model(self): return class TestDataModelRelationships: - def test_define_data_model_relationships(self): - return - def test_define_required_csv_headers(self): - return - def test_define_edge_relationships(self): - return - def test_define_value_relationships(self): - return + """Tests for DataModelRelationships class""" + def test_define_data_model_relationships(self, dmr_object: DataModelRelationships): + """Tests relationships_dictionary attribute created""" + relationships = dmr_object.relationships_dictionary + assert isinstance(relationships, dict) + assert relationships + + def test_define_required_csv_headers(self, dmr_object: DataModelRelationships): + """Tests method returns a list""" + csv_headers = dmr_object.define_required_csv_headers() + assert isinstance(csv_headers, list) + assert csv_headers + + def test_define_edge_relationships(self, dmr_object: DataModelRelationships): + """Tests method returns a dict""" + edge_relationships = dmr_object.define_edge_relationships() + assert isinstance(edge_relationships, dict) + assert edge_relationships + + def test_define_value_relationships(self, dmr_object: DataModelRelationships): + """Tests method returns a dict""" + value_relationships = dmr_object.define_value_relationships() + assert isinstance(value_relationships, dict) + assert value_relationships class TestDataModelGraph: def test_generate_data_model_graph(self): From 9e7298b070283b2f86fa7043a19895927455c8da Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 20 Sep 2023 11:48:34 -0700 Subject: [PATCH 079/239] add test_get_base_schema_path test, update DME that is yielded to take from data_model_name --- tests/test_schemas.py | 27 ++++++++++++++++++++++++--- 1 file changed, 24 insertions(+), 3 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index fe35ebe93..c4cdb664d 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -40,15 +40,36 @@ def generate_graph_data_model(helpers, path_to_data_model): @pytest.fixture def DME(helpers, data_model_name='example.model.csv'): - path_to_data_model = helpers.get_data_path("example.model.jsonld") + ''' + In future could pull using helpers. + ''' + path_to_data_model = helpers.get_data_path(data_model_name) graph_data_model = generate_graph_data_model(helpers, path_to_data_model=path_to_data_model) DME = DataModelGraphExplorer(graph_data_model) yield DME - +''' +@pytest.fixture +def data_model_parser(helper, data_model_name:str='example.model.csv'): + path_to_data_model = helpers.get_data_path(data_model_name) + data_model_parser = DataModelParser(path_to_data_model=path_to_data_model) + yield data_model_parser +''' class TestDataModelParser: def test_get_base_schema_path(self, helpers): - return + '''Test that base schema path is returned properly. + Note: + data model parser class does not currently accept an new path to a base schema, + so just test that default BioThings data model path is returned. + ''' + # Instantiate DMP, Data model parser. + path_to_data_model = helpers.get_data_path('example.model.csv') + DMP = DataModelParser(path_to_data_model=path_to_data_model) + + # Get path to default biothings model. + biothings_path = DMP._get_base_schema_path(base_schema=None) + + assert os.path.basename(biothings_path) == "biothings.model.jsonld" def test_get_model_type(self): return From 1e6a984bd20558e1f609d905c719bfa1162e2905 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Thu, 21 Sep 2023 09:11:53 -0700 Subject: [PATCH 080/239] improved tests --- tests/test_schemas.py | 87 ++++++++++++++++++++++++++++++------------- 1 file changed, 62 insertions(+), 25 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index f40f6fd8a..af3ed6047 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -46,8 +46,8 @@ def DME(helpers, data_model_name='example.model.csv'): DME = DataModelGraphExplorer(graph_data_model) yield DME -@pytest.fixture(name="dmr_object") -def dmr(): +@pytest.fixture(name="dmr") +def fixture_dmr(): """Yields a data model relationships object for testing""" yield DataModelRelationships() @@ -77,29 +77,66 @@ def test_parse_jsonld_model(self): class TestDataModelRelationships: """Tests for DataModelRelationships class""" - def test_define_data_model_relationships(self, dmr_object: DataModelRelationships): - """Tests relationships_dictionary attribute created""" - relationships = dmr_object.relationships_dictionary - assert isinstance(relationships, dict) - assert relationships - - def test_define_required_csv_headers(self, dmr_object: DataModelRelationships): - """Tests method returns a list""" - csv_headers = dmr_object.define_required_csv_headers() - assert isinstance(csv_headers, list) - assert csv_headers - - def test_define_edge_relationships(self, dmr_object: DataModelRelationships): - """Tests method returns a dict""" - edge_relationships = dmr_object.define_edge_relationships() - assert isinstance(edge_relationships, dict) - assert edge_relationships - - def test_define_value_relationships(self, dmr_object: DataModelRelationships): - """Tests method returns a dict""" - value_relationships = dmr_object.define_value_relationships() - assert isinstance(value_relationships, dict) - assert value_relationships + def test_define_data_model_relationships(self, dmr: DataModelRelationships): + """Tests relationships_dictionary created has correct keys""" + required_keys = [ + 'jsonld_key', + 'csv_header', + 'type', + 'edge_rel', + 'required_header' + ] + required_edge_keys = ['edge_key', 'edge_dir'] + required_node_keys = ['node_label', 'node_attr_dict'] + + relationships = dmr.relationships_dictionary + + for relationship in relationships.values(): + for key in required_keys: + assert key in relationship.keys() + if relationship['edge_rel']: + for key in required_edge_keys: + assert key in relationship.keys() + else: + for key in required_node_keys: + assert key in relationship.keys() + + def test_define_required_csv_headers(self, dmr: DataModelRelationships): + """Tests method returns correct values""" + assert dmr.define_required_csv_headers() == [ + 'Attribute', + 'Description', + 'Valid Values', + 'DependsOn', + 'DependsOn Component', + 'Required', 'Parent', + 'Validation Rules', + 'Properties', + 'Source' + ] + + def test_define_edge_relationships(self, dmr: DataModelRelationships): + """Tests method returns correct values""" + assert dmr.define_edge_relationships() == { + 'rangeIncludes': 'Valid Values', + 'requiresDependency': 'DependsOn', + 'requiresComponent': 'DependsOn Component', + 'subClassOf': 'Parent', + 'domainIncludes': 'Properties' + } + + def test_define_value_relationships(self, dmr: DataModelRelationships): + """Tests method returns correct values""" + assert dmr.define_value_relationships() == { + 'displayName': 'Attribute', + 'label': None, + 'comment': 'Description', + 'required': 'Required', + 'validationRules': 'Validation Rules', + 'isPartOf': None, + 'id': 'Source' + } + class TestDataModelGraph: def test_generate_data_model_graph(self): From 5d7864da026175bb09863ad335204a3a1aa37c9d Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Fri, 22 Sep 2023 11:19:16 -0700 Subject: [PATCH 081/239] add test_get_model_type --- schematic/schemas/data_model_parser.py | 4 ++-- tests/test_schemas.py | 13 +++++++++++-- 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/schematic/schemas/data_model_parser.py b/schematic/schemas/data_model_parser.py index cac6f9321..1cd9a3b5c 100644 --- a/schematic/schemas/data_model_parser.py +++ b/schematic/schemas/data_model_parser.py @@ -47,7 +47,7 @@ def _get_base_schema_path(self, base_schema: str = None) -> str: return self.base_schema_path - def get_model_type(self, path_to_data_model: str) -> str: + def get_model_type(self) -> str: '''Parses the path to the data model to extract the extension and determine the data model type. Args: path_to_data_model, str: path to data model @@ -55,7 +55,7 @@ def get_model_type(self, path_to_data_model: str) -> str: str: uppercase, data model file extension. Note: Consider moving this to Utils. ''' - return pathlib.Path(path_to_data_model).suffix.replace('.', '').upper() + return pathlib.Path(self.path_to_data_model).suffix.replace('.', '').upper() def parse_base_model(self): '''Parse base data model that new model could be built upon. diff --git a/tests/test_schemas.py b/tests/test_schemas.py index c4cdb664d..a3a200760 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -71,10 +71,19 @@ def test_get_base_schema_path(self, helpers): assert os.path.basename(biothings_path) == "biothings.model.jsonld" - def test_get_model_type(self): - return + @pytest.mark.parametrize("data_model", ['example.model.csv', 'example.model.jsonld'], ids=["csv", "jsonld"]) + def test_get_model_type(self, helpers, data_model): + # Instantiate DMP, Data model parser. + path_to_data_model = helpers.get_data_path(data_model) + DMP = DataModelParser(path_to_data_model=path_to_data_model) + + # Check the data model type + assert (data_model == 'example.model.csv') == (DMP.model_type == 'CSV') + assert (data_model == 'example.model.jsonld') == (DMP.model_type == 'JSONLD') def test_parse_model(self): + '''Test that the correct parser is called and that a dictionary is returned in the expected structure. + ''' return class TestDataModelCsvParser: From 17a54bf88226d14d80862c84872b066a52b9be4c Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Fri, 22 Sep 2023 11:21:04 -0700 Subject: [PATCH 082/239] dont take path to data model for get_model_type --- schematic/schemas/data_model_parser.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic/schemas/data_model_parser.py b/schematic/schemas/data_model_parser.py index 1cd9a3b5c..293d18458 100644 --- a/schematic/schemas/data_model_parser.py +++ b/schematic/schemas/data_model_parser.py @@ -30,7 +30,7 @@ def __init__( """ self.path_to_data_model = path_to_data_model - self.model_type = self.get_model_type(path_to_data_model) + self.model_type = self.get_model_type() self.base_schema_path = None def _get_base_schema_path(self, base_schema: str = None) -> str: From 2cb11e2dd04d0ce81414260a15c6aa229fbbf4a2 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Fri, 22 Sep 2023 14:33:00 -0700 Subject: [PATCH 083/239] add test_parse_model and use helpers to get data_model_parser --- tests/test_schemas.py | 36 +++++++++++++++++++++--------------- 1 file changed, 21 insertions(+), 15 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index a3a200760..52c4c3315 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -48,13 +48,8 @@ def DME(helpers, data_model_name='example.model.csv'): graph_data_model = generate_graph_data_model(helpers, path_to_data_model=path_to_data_model) DME = DataModelGraphExplorer(graph_data_model) yield DME -''' -@pytest.fixture -def data_model_parser(helper, data_model_name:str='example.model.csv'): - path_to_data_model = helpers.get_data_path(data_model_name) - data_model_parser = DataModelParser(path_to_data_model=path_to_data_model) - yield data_model_parser -''' + + class TestDataModelParser: def test_get_base_schema_path(self, helpers): '''Test that base schema path is returned properly. @@ -63,8 +58,7 @@ def test_get_base_schema_path(self, helpers): so just test that default BioThings data model path is returned. ''' # Instantiate DMP, Data model parser. - path_to_data_model = helpers.get_data_path('example.model.csv') - DMP = DataModelParser(path_to_data_model=path_to_data_model) + data_model_parser = helpers.get_data_model_parser(path_to_data_model=path_to_data_model) # Get path to default biothings model. biothings_path = DMP._get_base_schema_path(base_schema=None) @@ -74,17 +68,29 @@ def test_get_base_schema_path(self, helpers): @pytest.mark.parametrize("data_model", ['example.model.csv', 'example.model.jsonld'], ids=["csv", "jsonld"]) def test_get_model_type(self, helpers, data_model): # Instantiate DMP, Data model parser. - path_to_data_model = helpers.get_data_path(data_model) - DMP = DataModelParser(path_to_data_model=path_to_data_model) + data_model_parser = helpers.get_data_model_parser(path_to_data_model=path_to_data_model) # Check the data model type - assert (data_model == 'example.model.csv') == (DMP.model_type == 'CSV') - assert (data_model == 'example.model.jsonld') == (DMP.model_type == 'JSONLD') + assert (data_model == 'example.model.csv') == (data_model_parser.model_type == 'CSV') + assert (data_model == 'example.model.jsonld') == (data_model_parser.model_type == 'JSONLD') - def test_parse_model(self): + @pytest.mark.parametrize("data_model", ['example.model.csv', 'example.model.jsonld'], ids=["csv", "jsonld"]) + def test_parse_model(self, helpers, data_model): '''Test that the correct parser is called and that a dictionary is returned in the expected structure. ''' - return + # Instantiate DMP, Data model parser. + data_model_parser = helpers.get_data_model_parser(data_model_name=data_model) + # Parse Model + model_dict = data_model_parser.parse_model() + + # Get a key in the model + attribute_key = list(model_dict.keys())[0] + + # Check that the structure of the model dictionary conforms to expectations. + assert True == (type(model_dict) == dict) + assert True == (attribute_key in model_dict.keys()) + assert True == ('Relationships' in model_dict[attribute_key]) + assert True == ('Attribute' in model_dict[attribute_key]['Relationships']) class TestDataModelCsvParser: def test_check_schema_definition(self): From b8a509d3ae0aa1bf498fdd22352fa20ec2cd5bf2 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Fri, 22 Sep 2023 14:33:36 -0700 Subject: [PATCH 084/239] add helper to get_data_model_parser --- tests/conftest.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/tests/conftest.py b/tests/conftest.py index 5d403185a..edb6afbe7 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -77,6 +77,14 @@ def get_data_model_explorer(path=None, *paths): DME = DataModelGraphExplorer(graph_data_model) return DME + + @staticmethod + def get_data_model_parser(data_model_name:str=None, *paths): + # Get path to data model + fullpath = Helpers.get_data_path(path=data_model_name, *paths) + # Instantiate DataModelParser + data_model_parser = DataModelParser(path_to_data_model=fullpath) + return data_model_parser @staticmethod From 8152ddcc0e266199c0605c4850d7007506b7601e Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Fri, 22 Sep 2023 14:50:27 -0700 Subject: [PATCH 085/239] fix issues with changing how data_model_parser instantiated --- tests/test_schemas.py | 20 +++++++++----------- 1 file changed, 9 insertions(+), 11 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 52c4c3315..8beb9789e 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -18,13 +18,13 @@ logging.basicConfig(level=logging.DEBUG) logger = logging.getLogger(__name__) -def generate_graph_data_model(helpers, path_to_data_model): +def generate_graph_data_model(helpers, data_model_name): """ Simple helper function to generate a networkx graph data model from a CSV or JSONLD data model """ # Instantiate Parser - data_model_parser = DataModelParser(path_to_data_model=path_to_data_model) + data_model_parser = helpers.get_data_model_parser(data_model_name=data_model_name) #Parse Model parsed_data_model = data_model_parser.parse_model() @@ -43,9 +43,7 @@ def DME(helpers, data_model_name='example.model.csv'): ''' In future could pull using helpers. ''' - path_to_data_model = helpers.get_data_path(data_model_name) - - graph_data_model = generate_graph_data_model(helpers, path_to_data_model=path_to_data_model) + graph_data_model = generate_graph_data_model(helpers, data_model_name=data_model_name) DME = DataModelGraphExplorer(graph_data_model) yield DME @@ -57,18 +55,18 @@ def test_get_base_schema_path(self, helpers): data model parser class does not currently accept an new path to a base schema, so just test that default BioThings data model path is returned. ''' - # Instantiate DMP, Data model parser. - data_model_parser = helpers.get_data_model_parser(path_to_data_model=path_to_data_model) + # Instantiate Data model parser. + data_model_parser = helpers.get_data_model_parser(data_model_name='example.model.csv') # Get path to default biothings model. - biothings_path = DMP._get_base_schema_path(base_schema=None) + biothings_path = data_model_parser._get_base_schema_path(base_schema=None) assert os.path.basename(biothings_path) == "biothings.model.jsonld" @pytest.mark.parametrize("data_model", ['example.model.csv', 'example.model.jsonld'], ids=["csv", "jsonld"]) def test_get_model_type(self, helpers, data_model): - # Instantiate DMP, Data model parser. - data_model_parser = helpers.get_data_model_parser(path_to_data_model=path_to_data_model) + # Instantiate Data model parser. + data_model_parser = helpers.get_data_model_parser(data_model_name=data_model) # Check the data model type assert (data_model == 'example.model.csv') == (data_model_parser.model_type == 'CSV') @@ -78,7 +76,7 @@ def test_get_model_type(self, helpers, data_model): def test_parse_model(self, helpers, data_model): '''Test that the correct parser is called and that a dictionary is returned in the expected structure. ''' - # Instantiate DMP, Data model parser. + # Instantiate Data model parser. data_model_parser = helpers.get_data_model_parser(data_model_name=data_model) # Parse Model model_dict = data_model_parser.parse_model() From 6071920b5febef4ee13804880793a373c2adbbd0 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Fri, 22 Sep 2023 15:08:06 -0700 Subject: [PATCH 086/239] add data_model_parser helpers function, and update relevant parts of test_schemas --- tests/conftest.py | 9 +++++++++ tests/test_schemas.py | 6 +++--- 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 5d403185a..cf7ec95cd 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -77,6 +77,15 @@ def get_data_model_explorer(path=None, *paths): DME = DataModelGraphExplorer(graph_data_model) return DME + + @staticmethod + def get_data_model_parser(data_model_name:str=None, *paths): + # Get path to data model + fullpath = Helpers.get_data_path(path=data_model_name, *paths) + # Instantiate DataModelParser + data_model_parser = DataModelParser(path_to_data_model=fullpath) + return data_model_parser + @staticmethod diff --git a/tests/test_schemas.py b/tests/test_schemas.py index fe35ebe93..350a8c307 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -18,13 +18,13 @@ logging.basicConfig(level=logging.DEBUG) logger = logging.getLogger(__name__) -def generate_graph_data_model(helpers, path_to_data_model): +def generate_graph_data_model(helpers, data_model_name): """ Simple helper function to generate a networkx graph data model from a CSV or JSONLD data model """ # Instantiate Parser - data_model_parser = DataModelParser(path_to_data_model=path_to_data_model) + data_model_parser = helpers.get_data_model_parser(data_model_name=data_model_name) #Parse Model parsed_data_model = data_model_parser.parse_model() @@ -42,7 +42,7 @@ def generate_graph_data_model(helpers, path_to_data_model): def DME(helpers, data_model_name='example.model.csv'): path_to_data_model = helpers.get_data_path("example.model.jsonld") - graph_data_model = generate_graph_data_model(helpers, path_to_data_model=path_to_data_model) + graph_data_model = generate_graph_data_model(helpers, data_model_name=path_to_data_model) DME = DataModelGraphExplorer(graph_data_model) yield DME From be5181d180ef7a031e031cfda3d12ed657ca9fa3 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 25 Sep 2023 09:51:24 -0700 Subject: [PATCH 087/239] add TestDataModelCsvParser tests --- tests/test_schemas.py | 60 ++++++++++++++++++++++++++++++++++++++----- 1 file changed, 53 insertions(+), 7 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 350a8c307..032486bc0 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -13,7 +13,7 @@ from schematic.schemas.data_model_relationships import DataModelRelationships from schematic.schemas.data_model_jsonld import DataModelJsonLD from schematic.schemas.data_model_json_schema import DataModelJSONSchema -from schematic.schemas.data_model_parser import DataModelParser +from schematic.schemas.data_model_parser import DataModelParser, DataModelCSVParser, DataModelJSONLDParser logging.basicConfig(level=logging.DEBUG) logger = logging.getLogger(__name__) @@ -46,6 +46,11 @@ def DME(helpers, data_model_name='example.model.csv'): DME = DataModelGraphExplorer(graph_data_model) yield DME +@pytest.fixture(name='dmcsvp') +def fixture_dm_csv_parser(): + yield DataModelCSVParser() + + class TestDataModelParser: def test_get_base_schema_path(self, helpers): return @@ -57,12 +62,53 @@ def test_parse_model(self): return class TestDataModelCsvParser: - def test_check_schema_definition(self): - return - def test_gather_csv_attributes_relationships(self): - return - def test_parse_csv_model(self ): - return + @pytest.mark.parametrize("data_model", ['example.model.csv'], ids=["csv"]) + def test_check_schema_definition(self, helpers, data_model, dmcsvp:DataModelCSVParser): + """If the csv schema contains the required headers, then this function should not return anything. Check that this is so. + """ + path_to_data_model = helpers.get_data_path(path=data_model) + model_df = load_df(path_to_data_model, data_model=True) + assert None == (dmcsvp.check_schema_definition(model_df = model_df)) + + @pytest.mark.parametrize("data_model", ['example.model.csv'], ids=["csv"]) + def test_gather_csv_attributes_relationships(self, helpers, data_model, dmcsvp:DataModelCSVParser): + """The output of the function is a attributes relationship dictionary, check that it is formatted properly. + """ + path_to_data_model = helpers.get_data_path(path=data_model) + model_df = load_df(path_to_data_model, data_model=True) + + # Get output of the function: + attr_rel_dict = dmcsvp.gather_csv_attributes_relationships(model_df=model_df) + + # Test the attr_rel_dict is formatted as expected: + # Get a key in the model + attribute_key = list(attr_rel_dict.keys())[0] + + # Check that the structure of the model dictionary conforms to expectations. + assert True == (type(attr_rel_dict) == dict) + assert True == (attribute_key in attr_rel_dict.keys()) + assert True == ('Relationships' in attr_rel_dict[attribute_key]) + assert True == ('Attribute' in attr_rel_dict[attribute_key]['Relationships']) + + @pytest.mark.parametrize("data_model", ['example.model.csv'], ids=["csv"]) + def test_parse_csv_model(self, helpers, data_model, dmcsvp:DataModelCSVParser): + """The output of the function is a attributes relationship dictionary, check that it is formatted properly. + """ + path_to_data_model = helpers.get_data_path(path=data_model) + model_df = load_df(path_to_data_model, data_model=True) + + # Get output of the function: + model_dict = dmcsvp.parse_csv_model(path_to_data_model=path_to_data_model) + + # Test the model_dict is formatted as expected: + # Get a key in the model + attribute_key = list(model_dict.keys())[0] + + # Check that the structure of the model dictionary conforms to expectations. + assert True == (type(model_dict) == dict) + assert True == (attribute_key in model_dict.keys()) + assert True == ('Relationships' in model_dict[attribute_key]) + assert True == ('Attribute' in model_dict[attribute_key]['Relationships']) class TestDataModelJsonLdParser: def test_gather_jsonld_attributes_relationships(self): From 695ce49f31ff28e99e18e13842a92e6a80a11fca Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 25 Sep 2023 15:39:18 -0700 Subject: [PATCH 088/239] add helper to get_data_model_json_schema --- tests/conftest.py | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/tests/conftest.py b/tests/conftest.py index cf7ec95cd..77fa71d92 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -10,6 +10,8 @@ from schematic.schemas.data_model_parser import DataModelParser from schematic.schemas.data_model_graph import DataModelGraph, DataModelGraphExplorer +from schematic.schemas.data_model_json_schema import DataModelJSONSchema + from schematic.configuration.configuration import CONFIG from schematic.utils.df_utils import load_df @@ -85,6 +87,30 @@ def get_data_model_parser(data_model_name:str=None, *paths): # Instantiate DataModelParser data_model_parser = DataModelParser(path_to_data_model=fullpath) return data_model_parser + + @staticmethod + def get_data_model_json_schema(data_model_name:str=None, *paths): + # Get path to data model + fullpath = Helpers.get_data_path(path=data_model_name, *paths) + + # Instantiate DataModelParser + data_model_parser = DataModelParser(path_to_data_model = fullpath) + + #Parse Model + parsed_data_model = data_model_parser.parse_model() + + # Instantiate DataModelGraph + data_model_grapher = DataModelGraph(parsed_data_model) + + # Generate graph + graph_data_model = data_model_grapher.generate_data_model_graph() + + #Instantiate DataModelGraphExplorer + DME = DataModelGraphExplorer(graph_data_model) + + # Instantiate DataModelJsonSchema + dmjs = DataModelJSONSchema(fullpath, graph=graph_data_model) + return dmjs From 47efa565ea1b42ead505da384139a9feeda3491e Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 25 Sep 2023 15:39:49 -0700 Subject: [PATCH 089/239] add test TestDataModelJsonSchema::test_get_array_schema --- tests/test_schemas.py | 24 ++++++++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 350a8c307..c203018b0 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -195,8 +195,28 @@ def test_generate_edge(self,helpers): class TestDataModelJsonSchema: - def test_get_array_schema(self): - return + @pytest.mark.parametrize("data_model", ['example.model.csv', 'example.model.jsonld'], ids=["csv", "jsonld"]) + @pytest.mark.parametrize("node_range", [[], ['healthy'], ['healthy', 'cancer']], ids=['empty_range', "single_range", "multi_range"]) + @pytest.mark.parametrize("node_name", ['', 'Diagnosis'], ids=['empty_node_name', "Diagnosis_node_name"]) + @pytest.mark.parametrize("blank", [True, False], ids=["True_blank", "False_blank"]) + def test_get_array_schema(self, helpers, data_model, node_range, node_name, blank): + dmjs = helpers.get_data_model_json_schema(data_model_name=data_model) + array_schema = dmjs.get_array_schema(node_range=node_range, node_name=node_name, blank=blank) + + # check node_name is recoreded as the key to the array schema + assert node_name in array_schema + + # Check maxItems is the lenghth of node_range + assert len(node_range) == array_schema[node_name]['maxItems'] + + # Check that blank value is added at the end of node_range, if true + if blank: + assert True == (array_schema[node_name]['items']['enum'][-1]== '') + assert True == (len(array_schema[node_name]['items']['enum'])==len(node_range)+1) + else: + assert True == (array_schema[node_name]['items']['enum']== node_range) + assert True == (len(array_schema[node_name]['items']['enum'])==len(node_range)) + def test_get_non_blank_schema(self): return def test_get_json_validation_schema(self): From 1fc6fbcd9c76bf163f9825d9a2dfb2147c151cb4 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 25 Sep 2023 15:44:45 -0700 Subject: [PATCH 090/239] add test TestDataModelJsonSchema::test_get_non_blank_schema --- tests/test_schemas.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index c203018b0..89cf92fda 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -217,8 +217,15 @@ def test_get_array_schema(self, helpers, data_model, node_range, node_name, blan assert True == (array_schema[node_name]['items']['enum']== node_range) assert True == (len(array_schema[node_name]['items']['enum'])==len(node_range)) - def test_get_non_blank_schema(self): - return + @pytest.mark.parametrize("data_model", ['example.model.csv', 'example.model.jsonld'], ids=["csv", "jsonld"]) + @pytest.mark.parametrize("node_name", ['', 'Diagnosis'], ids=['empty_node_name', "Diagnosis_node_name"]) + def test_get_non_blank_schema(self, helpers, data_model, node_name): + dmjs = helpers.get_data_model_json_schema(data_model_name=data_model) + non_blank_schema = dmjs.get_non_blank_schema(node_name=node_name) + # check node_name is recoreded as the key to the array schema + assert node_name in non_blank_schema + assert non_blank_schema[node_name] == {"not": {"type": "null"}, "minLength": 1} + def test_get_json_validation_schema(self): return From 27028381a1547784d7380de522e96d29470ed45f Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 25 Sep 2023 15:45:53 -0700 Subject: [PATCH 091/239] remove true statements in test_get_array_schema --- tests/test_schemas.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 89cf92fda..003c32aae 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -211,11 +211,11 @@ def test_get_array_schema(self, helpers, data_model, node_range, node_name, blan # Check that blank value is added at the end of node_range, if true if blank: - assert True == (array_schema[node_name]['items']['enum'][-1]== '') - assert True == (len(array_schema[node_name]['items']['enum'])==len(node_range)+1) + assert array_schema[node_name]['items']['enum'][-1]== '' + assert len(array_schema[node_name]['items']['enum'])==len(node_range)+1 else: - assert True == (array_schema[node_name]['items']['enum']== node_range) - assert True == (len(array_schema[node_name]['items']['enum'])==len(node_range)) + assert array_schema[node_name]['items']['enum']== node_range + assert len(array_schema[node_name]['items']['enum'])==len(node_range) @pytest.mark.parametrize("data_model", ['example.model.csv', 'example.model.jsonld'], ids=["csv", "jsonld"]) @pytest.mark.parametrize("node_name", ['', 'Diagnosis'], ids=['empty_node_name', "Diagnosis_node_name"]) From b3188ec183c6a1443eb2b273dd874a303ad3f34b Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 25 Sep 2023 15:56:56 -0700 Subject: [PATCH 092/239] add todo --- schematic/schemas/data_model_json_schema.py | 1 + 1 file changed, 1 insertion(+) diff --git a/schematic/schemas/data_model_json_schema.py b/schematic/schemas/data_model_json_schema.py index 5a65c40e4..dcecc8f6f 100644 --- a/schematic/schemas/data_model_json_schema.py +++ b/schematic/schemas/data_model_json_schema.py @@ -12,6 +12,7 @@ class DataModelJSONSchema: def __init__(self, jsonld_path: str, graph:nx.MultiDiGraph, ): + # TODO: Change jsonld_path to data_model_path (can work with CSV too) self.jsonld_path = jsonld_path self.graph = graph self.DME = DataModelGraphExplorer(self.graph) From 11504870294438a88795ba3371945f6c14f9d430 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 25 Sep 2023 15:58:16 -0700 Subject: [PATCH 093/239] add tests TestDataModelJsonSchema::test_get_range_schema --- tests/test_schemas.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 003c32aae..0eac66936 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -226,6 +226,25 @@ def test_get_non_blank_schema(self, helpers, data_model, node_name): assert node_name in non_blank_schema assert non_blank_schema[node_name] == {"not": {"type": "null"}, "minLength": 1} + @pytest.mark.parametrize("data_model", ['example.model.csv', 'example.model.jsonld'], ids=["csv", "jsonld"]) + @pytest.mark.parametrize("node_range", [[], ['healthy'], ['healthy', 'cancer']], ids=['empty_range', "single_range", "multi_range"]) + @pytest.mark.parametrize("node_name", ['', 'Diagnosis'], ids=['empty_node_name', "Diagnosis_node_name"]) + @pytest.mark.parametrize("blank", [True, False], ids=["True_blank", "False_blank"]) + def test_get_range_schema(self, helpers, data_model, node_range, node_name, blank): + dmjs = helpers.get_data_model_json_schema(data_model_name=data_model) + range_schema = dmjs.get_range_schema(node_range=node_range, node_name=node_name, blank=blank) + + # check node_name is recoreded as the key to the array schema + assert node_name in range_schema + + # Check that blank value is added at the end of node_range, if true + if blank: + assert range_schema[node_name]['enum'][-1]== '' + assert len(range_schema[node_name]['enum'])==len(node_range)+1 + else: + assert range_schema[node_name]['enum']== node_range + assert len(range_schema[node_name]['enum'])==len(node_range) + def test_get_json_validation_schema(self): return From 16d8d15dc587cb6331c3169b5dc33a38184438d3 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Tue, 26 Sep 2023 12:34:16 -0700 Subject: [PATCH 094/239] add tests TestDataModelJsonSchema::test_get_json_validation_schema --- tests/test_schemas.py | 26 ++++++++++++++++++++++++-- 1 file changed, 24 insertions(+), 2 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 0eac66936..afeca82bd 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -245,8 +245,30 @@ def test_get_range_schema(self, helpers, data_model, node_range, node_name, blan assert range_schema[node_name]['enum']== node_range assert len(range_schema[node_name]['enum'])==len(node_range) - def test_get_json_validation_schema(self): - return + @pytest.mark.parametrize("data_model", ['example.model.csv', 'example.model.jsonld'], ids=["csv", "jsonld"]) + @pytest.mark.parametrize("source_node", ['', 'Patient'], ids=['empty_node_name', "patient_source"]) + @pytest.mark.parametrize("schema_name", ['', 'Test_Schema_Name'], ids=['empty_schema_name', "schema_name"]) + def test_get_json_validation_schema(self, helpers, data_model, source_node, schema_name): + dmjs = helpers.get_data_model_json_schema(data_model_name=data_model) + + try: + # Get validation schema + json_validation_schema = dmjs.get_json_validation_schema(source_node=source_node, schema_name=schema_name) + + # Check Keys in Schema + expected_jvs_keys = ['$schema', '$id', 'title', 'type', 'properties', 'required', 'allOf'] + actual_jvs_keys = list( json_validation_schema.keys()) + assert expected_jvs_keys == actual_jvs_keys + + # Check title + assert schema_name == json_validation_schema['title'] + + # Check contents of validation schema + assert 'Diagnosis' in json_validation_schema['properties'] + assert json_validation_schema['properties']['Diagnosis'] == {'enum': ['Cancer', 'Healthy']} + except: + # Should only fail if no source node is provided. + assert source_node == '' class TestDataModelJsonLd: def test_base_jsonld_template(self): From b273ac4e242f15e28bac4e2c816b242f2d33642f Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Tue, 26 Sep 2023 13:03:09 -0700 Subject: [PATCH 095/239] add test TestDataModelJsonLdParser::test_gather_jsonld_attributes_relationships --- tests/test_schemas.py | 34 +++++++++++++++++++++++++++++----- 1 file changed, 29 insertions(+), 5 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 350a8c307..e21b2cc1c 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -4,8 +4,9 @@ import pandas as pd import pytest -#from schematic.schemas import df_parser from schematic.utils.df_utils import load_df +from schematic.utils.io_utils import load_json + from schematic.schemas.data_model_graph import DataModelGraph from schematic.schemas.data_model_nodes import DataModelNodes from schematic.schemas.data_model_edges import DataModelEdges @@ -13,7 +14,7 @@ from schematic.schemas.data_model_relationships import DataModelRelationships from schematic.schemas.data_model_jsonld import DataModelJsonLD from schematic.schemas.data_model_json_schema import DataModelJSONSchema -from schematic.schemas.data_model_parser import DataModelParser +from schematic.schemas.data_model_parser import DataModelParser, DataModelCSVParser, DataModelJSONLDParser logging.basicConfig(level=logging.DEBUG) logger = logging.getLogger(__name__) @@ -38,6 +39,10 @@ def generate_graph_data_model(helpers, data_model_name): return graph_data_model +@pytest.fixture(name='dmjsonldp') +def fixture_dm_jsonld_parser(): + yield DataModelJSONLDParser() + @pytest.fixture def DME(helpers, data_model_name='example.model.csv'): path_to_data_model = helpers.get_data_path("example.model.jsonld") @@ -65,9 +70,28 @@ def test_parse_csv_model(self ): return class TestDataModelJsonLdParser: - def test_gather_jsonld_attributes_relationships(self): - return - def test_parse_jsonld_model(self): + @pytest.mark.parametrize("data_model", ['example.model.jsonld'], ids=["jsonld"]) + def test_gather_jsonld_attributes_relationships(self, helpers, data_model, dmjsonldp): + """The output of the function is a attributes relationship dictionary, check that it is formatted properly. + """ + path_to_data_model = helpers.get_data_path(path=data_model) + model_jsonld = load_json(path_to_data_model) + + # Get output of the function: + attr_rel_dict = dmjsonldp.gather_jsonld_attributes_relationships(model_jsonld=model_jsonld['@graph']) + + # Test the attr_rel_dict is formatted as expected: + # Get a key in the model + attribute_key = list(attr_rel_dict.keys())[0] + + # Check that the structure of the model dictionary conforms to expectations. + assert True == (type(attr_rel_dict) == dict) + assert True == (attribute_key in attr_rel_dict.keys()) + assert True == ('Relationships' in attr_rel_dict[attribute_key]) + assert True == ('Attribute' in attr_rel_dict[attribute_key]['Relationships']) + + @pytest.mark.parametrize("data_model", ['example.model.jsonld'], ids=["jsonld"]) + def test_parse_jsonld_model(self, data_model): return class TestDataModelRelationships: From 27b67ef52e05843b18352335d5921630e2315024 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Tue, 26 Sep 2023 13:06:17 -0700 Subject: [PATCH 096/239] add test TestDataModelJsonLdParser::test_parse_jsonld_model --- tests/test_schemas.py | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index e21b2cc1c..b0b2ce727 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -91,8 +91,24 @@ def test_gather_jsonld_attributes_relationships(self, helpers, data_model, dmjso assert True == ('Attribute' in attr_rel_dict[attribute_key]['Relationships']) @pytest.mark.parametrize("data_model", ['example.model.jsonld'], ids=["jsonld"]) - def test_parse_jsonld_model(self, data_model): - return + def test_parse_jsonld_model(self, helpers, data_model, dmjsonldp): + """The output of the function is a attributes relationship dictionary, check that it is formatted properly. + """ + path_to_data_model = helpers.get_data_path(path=data_model) + model_jsonld = load_json(path_to_data_model) + + # Get output of the function: + model_dict = dmjsonldp.parse_jsonld_model(path_to_data_model=path_to_data_model) + + # Test the model_dict is formatted as expected: + # Get a key in the model + attribute_key = list(model_dict.keys())[0] + + # Check that the structure of the model dictionary conforms to expectations. + assert True == (type(model_dict) == dict) + assert True == (attribute_key in model_dict.keys()) + assert True == ('Relationships' in model_dict[attribute_key]) + assert True == ('Attribute' in model_dict[attribute_key]['Relationships']) class TestDataModelRelationships: def test_define_data_model_relationships(self): From b4dc47a7f344b2ebeaf0c045235fa581005f5324 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Tue, 26 Sep 2023 13:46:32 -0700 Subject: [PATCH 097/239] add tests TestDataModelGraph::test_generate_data_model_graph --- tests/test_schemas.py | 30 ++++++++++++++++++++++++++++-- 1 file changed, 28 insertions(+), 2 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 350a8c307..13e31381f 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -81,8 +81,34 @@ def test_define_value_relationships(self): return class TestDataModelGraph: - def test_generate_data_model_graph(self): - return + @pytest.mark.parametrize("data_model", ['example.model.csv', 'example.model.jsonld'], ids=["csv", "jsonld"]) + def test_generate_data_model_graph(self, helpers, data_model): + '''Check that data model graph is constructed properly, requires calling various classes. + TODO: In another test, check conditional dependencies. + ''' + graph = generate_graph_data_model(helpers=helpers, data_model_name=data_model) + + #Check that some edges are present as expected: + assert True == (('FamilyHistory', 'Breast') in graph.edges('FamilyHistory')) + assert True == (('BulkRNA-seqAssay', 'Biospecimen') in graph.edges('BulkRNA-seqAssay')) + assert ['Ab', 'Cd', 'Ef', 'Gh'] == [k for k,v in graph['CheckList'].items() for vk, vv in v.items() if vk == 'rangeValue'] + + # Check that all relationships recorded between 'CheckList' and 'Ab' are present + assert True == ('rangeValue' and 'parentOf' in graph['CheckList']['Ab']) + assert False == ('requiresDependency' in graph['CheckList']['Ab']) + + # Check nodes: + assert True == ('Patient' in graph.nodes) + assert True == ('GRCh38' in graph.nodes) + + + # Check weights + assert True == (graph['Sex']['Female']['rangeValue']['weight'] == 0) + assert True == (graph['MockComponent']['CheckRegexFormat']['requiresDependency']['weight'] == 4) + + # Check Edge directions + assert 4 == (len(graph.out_edges('TissueStatus'))) + assert 2 == (len(graph.in_edges('TissueStatus'))) class TestDataModelGraphExplorer: def test_find_properties(self): From 2516d4957c2b368ace6e2ee4ee3f034cac3e6d4c Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 2 Oct 2023 14:55:40 -0700 Subject: [PATCH 098/239] move get_data_model_nodes to test_schemas function --- tests/conftest.py | 3 +-- tests/test_schemas.py | 38 ++++++++++++++++++++++++++++++++++++-- 2 files changed, 37 insertions(+), 4 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index cf7ec95cd..6c263d38f 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -10,6 +10,7 @@ from schematic.schemas.data_model_parser import DataModelParser from schematic.schemas.data_model_graph import DataModelGraph, DataModelGraphExplorer +from schematic.schemas.data_model_nodes import DataModelNodes from schematic.configuration.configuration import CONFIG from schematic.utils.df_utils import load_df @@ -85,8 +86,6 @@ def get_data_model_parser(data_model_name:str=None, *paths): # Instantiate DataModelParser data_model_parser = DataModelParser(path_to_data_model=fullpath) return data_model_parser - - @staticmethod def get_python_version(self): diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 350a8c307..46a2c1f3d 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -38,6 +38,15 @@ def generate_graph_data_model(helpers, data_model_name): return graph_data_model +def generate_data_model_nodes(helpers, data_model_name): + # Instantiate Parser + data_model_parser = helpers.get_data_model_parser(data_model_name=data_model_name) + # Parse Model + parsed_data_model = data_model_parser.parse_model() + # Instantiate DataModelNodes + data_model_nodes = DataModelNodes(attribute_relationships_dict=parsed_data_model) + return data_model_nodes + @pytest.fixture def DME(helpers, data_model_name='example.model.csv'): path_to_data_model = helpers.get_data_path("example.model.jsonld") @@ -172,8 +181,33 @@ def test_sub_schema_graph(self): return class TestDataModelNodes: - def test_gather_nodes(self): - return + @pytest.mark.parametrize("data_model", ['example.model.csv', 'example.model.jsonld'], ids=["csv", "jsonld"]) + def test_gather_nodes(self, helpers, data_model): + # Instantiate Parser + data_model_parser = helpers.get_data_model_parser(data_model_name=data_model) + + # Parse Model + attr_rel_dictionary = data_model_parser.parse_model() + + # Instantiate DataModelNodes + data_model_nodes = generate_data_model_nodes(data_model_name=data_model) + + attr_info = ('Patient', attr_rel_dictionary['Patient']) + nodes = data_model_nodes.gather_nodes(attr_info=attr_info) + + # Make sure there are no repeat nodes + assert len(nodes) == len(set(nodes)) + + # Make sure the nodes returned conform to expectations (values and order) + expected_nodes = ['Patient', 'Patient ID', 'Sex', 'Year of Birth', 'Diagnosis', 'Component', 'DataType'] + assert nodes == expected_nodes + + # Ensure order is tested. + reordered_nodes = nodes.copy() + reordered_nodes.remove('Patient') + reordered_nodes.append('Patient') + assert reordered_nodes != expected_nodes + def test_gather_all_nodes(self): return def test_get_rel_node_dict_info(self): From 368dada55c9cc54edfd06b1535c5c0ad7a2f5ff2 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Tue, 3 Oct 2023 16:46:11 -0700 Subject: [PATCH 099/239] add test_gather_all_nodes --- tests/test_schemas.py | 42 +++++++++++++++++++++++++++++++++++++----- 1 file changed, 37 insertions(+), 5 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 46a2c1f3d..742f7c1bd 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -18,6 +18,11 @@ logging.basicConfig(level=logging.DEBUG) logger = logging.getLogger(__name__) +DATA_MODEL_DICT = { + 'example.model.csv': "CSV", + 'example.model.jsonld': "JSONLD" +} + def generate_graph_data_model(helpers, data_model_name): """ Simple helper function to generate a networkx graph data model from a CSV or JSONLD data model @@ -181,7 +186,7 @@ def test_sub_schema_graph(self): return class TestDataModelNodes: - @pytest.mark.parametrize("data_model", ['example.model.csv', 'example.model.jsonld'], ids=["csv", "jsonld"]) + @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) def test_gather_nodes(self, helpers, data_model): # Instantiate Parser data_model_parser = helpers.get_data_model_parser(data_model_name=data_model) @@ -190,7 +195,7 @@ def test_gather_nodes(self, helpers, data_model): attr_rel_dictionary = data_model_parser.parse_model() # Instantiate DataModelNodes - data_model_nodes = generate_data_model_nodes(data_model_name=data_model) + data_model_nodes = generate_data_model_nodes(helpers, data_model_name=data_model) attr_info = ('Patient', attr_rel_dictionary['Patient']) nodes = data_model_nodes.gather_nodes(attr_info=attr_info) @@ -199,7 +204,12 @@ def test_gather_nodes(self, helpers, data_model): assert len(nodes) == len(set(nodes)) # Make sure the nodes returned conform to expectations (values and order) - expected_nodes = ['Patient', 'Patient ID', 'Sex', 'Year of Birth', 'Diagnosis', 'Component', 'DataType'] + ## The parsing records display names for relationships for CSV and labels for JSONLD, so the expectations are different between the two. + if DATA_MODEL_DICT[data_model]=='CSV': + expected_nodes = ['Patient', 'Patient ID', 'Sex', 'Year of Birth', 'Diagnosis', 'Component', 'DataType'] + elif DATA_MODEL_DICT[data_model] == 'JSONLD': + expected_nodes = ['Patient', 'PatientID', 'Sex', 'YearofBirth', 'Diagnosis', 'Component', 'DataType'] + assert nodes == expected_nodes # Ensure order is tested. @@ -208,8 +218,30 @@ def test_gather_nodes(self, helpers, data_model): reordered_nodes.append('Patient') assert reordered_nodes != expected_nodes - def test_gather_all_nodes(self): - return + @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) + def test_gather_all_nodes(self, helpers, data_model): + # Instantiate Parser + data_model_parser = helpers.get_data_model_parser(data_model_name=data_model) + + # Parse Model + attr_rel_dictionary = data_model_parser.parse_model() + + # Instantiate DataModelNodes + data_model_nodes = generate_data_model_nodes(helpers, data_model_name=data_model) + + all_nodes = data_model_nodes.gather_all_nodes(attr_rel_dict=attr_rel_dictionary) + + # Make sure there are no repeat nodes + assert len(all_nodes) == len(set(all_nodes)) + + # Check that nodes from first entry, are recoreded in order in all_nodes + first_attribute = list(attr_rel_dictionary.keys())[0] + attr_info = (first_attribute, attr_rel_dictionary[first_attribute]) + expected_starter_nodes = data_model_nodes.gather_nodes(attr_info=attr_info) + actual_starter_nodes = all_nodes[0:len(expected_starter_nodes)] + + assert actual_starter_nodes == expected_starter_nodes + def test_get_rel_node_dict_info(self): return def test_get_data_model_properties(self): From f0d030be21e323c991f91359fe3a23160bd1f33a Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 4 Oct 2023 09:17:51 -0700 Subject: [PATCH 100/239] add tets for test_get_rel_node_dict and test_get_data_model_properties --- tests/test_schemas.py | 61 ++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 57 insertions(+), 4 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 742f7c1bd..8589e7402 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -3,6 +3,7 @@ import pandas as pd import pytest +import random #from schematic.schemas import df_parser from schematic.utils.df_utils import load_df @@ -60,6 +61,13 @@ def DME(helpers, data_model_name='example.model.csv'): DME = DataModelGraphExplorer(graph_data_model) yield DME +@pytest.fixture(name='relationships') +def get_relationships(helpers): + DMR = DataModelRelationships() + relationships_dict = DMR.relationships_dictionary + relationships = list(relationships_dict.keys()) + yield relationships + class TestDataModelParser: def test_get_base_schema_path(self, helpers): return @@ -235,6 +243,7 @@ def test_gather_all_nodes(self, helpers, data_model): assert len(all_nodes) == len(set(all_nodes)) # Check that nodes from first entry, are recoreded in order in all_nodes + # Only check first entry, bc subsequent ones might be in the same order as would be gathered with gather_nodes if it contained a node that was already recorded. first_attribute = list(attr_rel_dictionary.keys())[0] attr_info = (first_attribute, attr_rel_dictionary[first_attribute]) expected_starter_nodes = data_model_nodes.gather_nodes(attr_info=attr_info) @@ -242,10 +251,54 @@ def test_gather_all_nodes(self, helpers, data_model): assert actual_starter_nodes == expected_starter_nodes - def test_get_rel_node_dict_info(self): - return - def test_get_data_model_properties(self): - return + def test_get_rel_node_dict_info(self, helpers, relationships): + # Instantiate Parser + data_model_parser = helpers.get_data_model_parser(data_model_name='example.model.csv') + + # Parse Model + #attr_rel_dictionary = data_model_parser.parse_model() + + # Instantiate DataModelNodes + data_model_nodes = generate_data_model_nodes(helpers, data_model_name='example.model.csv') + + for relationship in relationships: + rel_dict_info = data_model_nodes.get_rel_node_dict_info(relationship) + if rel_dict_info: + assert type(rel_dict_info[0]) == str + assert type(rel_dict_info[1]) == dict + assert 'default' in rel_dict_info[1].keys() + + @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) + def test_get_data_model_properties(self, helpers, data_model): + # Instantiate Parser + data_model_parser = helpers.get_data_model_parser(data_model_name=data_model) + + # Parse Model + attr_rel_dictionary = data_model_parser.parse_model() + + # Instantiate DataModelNodes + data_model_nodes = generate_data_model_nodes(helpers, data_model_name=data_model) + + # Get properties in the data model + data_model_properties = data_model_nodes.get_data_model_properties(attr_rel_dictionary) + + # In the current example model, there are no properties, would need to update this section if properties are added. + assert data_model_properties == [] + + # Update the attr_rel_dictionary to add a property, then see if its found. + # Get a random relationship key from the attr_rel_dictionary: + all_keys = list(attr_rel_dictionary.keys()) + random_index = len(all_keys)-1 + rel_key = all_keys[random.randint(0, random_index)] + + # Modify the contents of that relationship + attr_rel_dictionary[rel_key]['Relationships']['Properties'] = ['TestProperty'] + + # Get properties in the modified data model + data_model_properties = data_model_nodes.get_data_model_properties(attr_rel_dictionary) + + assert data_model_properties == ['TestProperty'] + def test_get_entry_type(self): return def test_run_rel_functions(self): From ebe11b2809ea05d82c782199fa15a5c4c53f3bdb Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 9 Oct 2023 11:44:57 -0700 Subject: [PATCH 101/239] add test_run_rel_functions --- tests/test_schemas.py | 144 ++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 138 insertions(+), 6 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 8589e7402..2cc54a507 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -7,6 +7,8 @@ #from schematic.schemas import df_parser from schematic.utils.df_utils import load_df +from schematic.utils.schema_utils import get_label_from_display_name, get_attribute_display_name_from_label, convert_bool_to_str, parse_validation_rules + from schematic.schemas.data_model_graph import DataModelGraph from schematic.schemas.data_model_nodes import DataModelNodes from schematic.schemas.data_model_edges import DataModelEdges @@ -23,6 +25,22 @@ 'example.model.csv': "CSV", 'example.model.jsonld': "JSONLD" } +def test_fake_func(): + return + +REL_FUNC_DICT = { + 'get_attribute_display_name_from_label':get_attribute_display_name_from_label, + 'parse_validation_rules': parse_validation_rules, + 'get_label_from_display_name': get_label_from_display_name, + 'convert_bool_to_str': convert_bool_to_str, + 'test_fake_func': test_fake_func, +} +TEST_DN_DICT = {'Bio Things': {'class': 'BioThings', + 'property': 'bioThings'}, + 'bio things': {'class': 'Biothings', + 'property': 'biothings'}, + } + def generate_graph_data_model(helpers, data_model_name): """ @@ -53,6 +71,8 @@ def generate_data_model_nodes(helpers, data_model_name): data_model_nodes = DataModelNodes(attribute_relationships_dict=parsed_data_model) return data_model_nodes + + @pytest.fixture def DME(helpers, data_model_name='example.model.csv'): path_to_data_model = helpers.get_data_path("example.model.jsonld") @@ -255,9 +275,6 @@ def test_get_rel_node_dict_info(self, helpers, relationships): # Instantiate Parser data_model_parser = helpers.get_data_model_parser(data_model_name='example.model.csv') - # Parse Model - #attr_rel_dictionary = data_model_parser.parse_model() - # Instantiate DataModelNodes data_model_nodes = generate_data_model_nodes(helpers, data_model_name='example.model.csv') @@ -299,9 +316,124 @@ def test_get_data_model_properties(self, helpers, data_model): assert data_model_properties == ['TestProperty'] - def test_get_entry_type(self): - return - def test_run_rel_functions(self): + @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) + def test_get_entry_type(self, helpers, data_model): + + # Instantiate Parser + data_model_parser = helpers.get_data_model_parser(data_model_name=data_model) + + # Parse Model + attr_rel_dictionary = data_model_parser.parse_model() + + # Update the attr_rel_dictionary to add a property, then see if it is assigned the correct entry type. + # Get a random relationship key from the attr_rel_dictionary: + all_keys = list(attr_rel_dictionary.keys()) + random_index = len(all_keys)-1 + rel_key = all_keys[random.randint(0, random_index)] + + # Modify the contents of that relationship + attr_rel_dictionary[rel_key]['Relationships']['Properties'] = ['TestProperty'] + + # Instantiate DataModelNodes + # Note: Get entry type uses self, so I will have to instantiate DataModelNodes outside of the generate_data_model_nodes function + data_model_nodes = DataModelNodes(attribute_relationships_dict=attr_rel_dictionary) + + # In the example data model all attributes should be classes. + for attr in attr_rel_dictionary.keys(): + entry_type = data_model_nodes.get_entry_type(attr) + assert entry_type == 'class' + + # Check that the added property is properly loaded as a property + assert data_model_nodes.get_entry_type('TestProperty') == 'property' + + @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) + @pytest.mark.parametrize("rel_func", list(REL_FUNC_DICT.values()), ids=list(REL_FUNC_DICT.keys())) + @pytest.mark.parametrize("test_dn", list(TEST_DN_DICT.keys()), ids=list(TEST_DN_DICT.keys())) + @pytest.mark.parametrize("test_bool", ['True', 'False', True, False, 'kldjk'], ids=['True_str', 'False_str', 'True_bool', 'False_bool', 'Random_str']) + def test_run_rel_functions(self, helpers, data_model, rel_func, test_dn, test_bool): + # Call each relationship function to ensure that it is returning the desired result. + # Note all the called functions will also be tested in other unit tests. + # Instantiate Parser + data_model_parser = helpers.get_data_model_parser(data_model_name=data_model) + + # Parse Model + attr_rel_dictionary = data_model_parser.parse_model() + + # Instantiate DataModelNodes + data_model_nodes = generate_data_model_nodes(helpers, data_model_name=data_model) + + # Run functions the same way they are called in run_rel_functions: + if rel_func == get_attribute_display_name_from_label: + expected_display_names = list(attr_rel_dictionary.keys()) + returned_display_names = [data_model_nodes.run_rel_functions( + rel_func=get_attribute_display_name_from_label, + node_display_name=ndn, + attr_relationships=attr_rel_dictionary) + for ndn in expected_display_names] + + assert expected_display_names == returned_display_names + + elif rel_func == parse_validation_rules: + # Find attributes with validation rules + # Gather Validation Rules + vrs = [] + for k, v in attr_rel_dictionary.items(): + if 'Validation Rules' in v['Relationships'].keys(): + vrs.append(v['Relationships']['Validation Rules']) + parsed_vrs= [] + for attr in attr_rel_dictionary.keys(): + attr_relationships = attr_rel_dictionary[attr]['Relationships'] + if 'Validation Rules' in attr_relationships: + parsed_vrs.append(data_model_nodes.run_rel_functions( + rel_func=parse_validation_rules, + attr_relationships=attr_relationships, + csv_header='Validation Rules')) + + assert len(vrs) == len(parsed_vrs) + if DATA_MODEL_DICT[data_model]=='CSV': + assert vrs != parsed_vrs + elif DATA_MODEL_DICT[data_model]=='JSONLD': + # JSONLDs already contain parsed validaiton rules so the raw vrs will match the parsed_vrs + assert vrs == parsed_vrs + + # For all validation rules where there are multiple rules, make sure they have been split as expected. + for i, pvr in enumerate(parsed_vrs): + delim_count = vrs[i][0].count('::') + if delim_count: + assert len(pvr) == delim_count+1 + + elif rel_func == get_label_from_display_name: + # For a limited set check label is returned as expected. + for entry_type, expected_value in TEST_DN_DICT[test_dn].items(): + actual_value = data_model_nodes.run_rel_functions( + rel_func=get_label_from_display_name, + node_display_name=test_dn, + entry_type=entry_type, + ) + assert actual_value == expected_value + elif rel_func == convert_bool_to_str: + # return nothing if random string provided. + csv_header='Required' + attr_relationships = {csv_header:test_bool} + actual_conversion = data_model_nodes.run_rel_functions( + rel_func=convert_bool_to_str, + csv_header=csv_header, + attr_relationships=attr_relationships, + ) + if 'true' in str(test_bool).lower(): + assert actual_conversion==True + elif 'false' in str(test_bool).lower(): + assert actual_conversion==False + else: + assert actual_conversion==None + else: + # If the function passed is not currently supported, should hit an error. + try: + data_model_nodes.run_rel_functions(rel_func=test_fake_func) + convert_worked = False + except: + convert_worked = True + assert convert_worked==True return def test_generate_node_dict(self): return From aab69f75956c67b9df35b31967f7a1eb4ae7875c Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 9 Oct 2023 11:45:20 -0700 Subject: [PATCH 102/239] add TODO to DataModelNodes --- schematic/schemas/data_model_nodes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic/schemas/data_model_nodes.py b/schematic/schemas/data_model_nodes.py index 8e50401ae..4e27bbcc1 100644 --- a/schematic/schemas/data_model_nodes.py +++ b/schematic/schemas/data_model_nodes.py @@ -79,7 +79,7 @@ def get_rel_node_dict_info(self, relationship: str) -> tuple: Returns: rel_key, str: relationship node label rel_node_dict, dict: node_attr_dict, from relationships dictionary for a given relationship - + TODO: Move to data_model_relationships. """ for k,v in self.data_model_relationships.relationships_dictionary.items(): if k == relationship: From 0036db0e5df480504a1380ea3a89ed7348ce618f Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Mon, 25 Sep 2023 15:02:45 -0700 Subject: [PATCH 103/239] add docstring for cases to test --- tests/test_schemas.py | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 0060fa687..13ca0e566 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -274,7 +274,20 @@ def test_generate_node(self): return class TestDataModelEdges: - def test_generate_edge(self,helpers): + """ + Cases to test + Where node == attribute_display_name + Weights + domain includes weights + list weights + single element weights + Edges + subClassOf/domainIncludes relationship edge + any other relationship edge + rangeIncludes relationship edge + + """ + def test_generate_edge(self, helpers): return From a374f9992a5eac9fade383c70f7ef6adcf56ce11 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Thu, 28 Sep 2023 11:22:10 -0700 Subject: [PATCH 104/239] add self loop to validation test model --- tests/data/validator_dag_test.model.csv | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/data/validator_dag_test.model.csv b/tests/data/validator_dag_test.model.csv index 10da28991..3184250ee 100644 --- a/tests/data/validator_dag_test.model.csv +++ b/tests/data/validator_dag_test.model.csv @@ -3,7 +3,7 @@ Patient,,,"Patient ID, Sex, Year of Birth, Diagnosis, Component",,FALSE,DataType Patient ID,,,Patient,,TRUE,DataProperty,,, Sex,,"Female, Male, Other",,,TRUE,DataProperty,,, Year of Birth,,,,,FALSE,DataProperty,,, -Diagnosis,,"Healthy, Cancer",,,TRUE,DataProperty,,, +Diagnosis,,"Healthy, Cancer, Diagnosis",,,TRUE,DataProperty,,, Cancer,,,"Cancer Type, Family History",,FALSE,ValidValue,,, Cancer Type,,"Breast, Colorectal, Lung, Prostate, Skin",,,TRUE,DataProperty,,, Family History,,"Breast, Colorectal, Lung, Prostate, Skin",Cancer Type,,TRUE,DataProperty,,,list strict @@ -41,4 +41,4 @@ Check Date,,,,,TRUE,DataProperty,,,date Check NA,,,,,TRUE,DataProperty,,,int::IsNA MockRDB,,,"Component, MockRDB_id, SourceManifest",,FALSE,DataType,,, MockRDB_id,,,,,TRUE,DataProperty,,,int -SourceManifest,,,,,TRUE,DataProperty,,, \ No newline at end of file +SourceManifest,,,,,TRUE,DataProperty,,, From 9c7bd0615707d2a5d8929b81ed42e876326c3af5 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Thu, 28 Sep 2023 11:23:12 -0700 Subject: [PATCH 105/239] add slots for other tests --- tests/test_schemas.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 13ca0e566..a4f8bb8fc 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -287,8 +287,16 @@ class TestDataModelEdges: rangeIncludes relationship edge """ + def test_skip_edge(self, helpers): + return + def test_generate_edge(self, helpers): return + + def test_generate_weights(self, helpers): + return + + class TestDataModelJsonSchema: From 247b63a330d29dffdf4cdd975abc5b9f55fc0243 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Thu, 28 Sep 2023 11:23:33 -0700 Subject: [PATCH 106/239] add test to ensure self loop edges are not added --- tests/test_schemas.py | 36 ++++++++++++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index a4f8bb8fc..2a4b0974c 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -3,6 +3,13 @@ import pandas as pd import pytest +import networkx as nx + +from schematic.schemas.data_model_edges import DataModelEdges +from schematic.schemas.data_model_nodes import DataModelNodes +from schematic.schemas.data_model_relationships import ( + DataModelRelationships + ) #from schematic.schemas import df_parser from schematic.utils.df_utils import load_df @@ -288,6 +295,35 @@ class TestDataModelEdges: """ def test_skip_edge(self, helpers): + G = nx.MultiDiGraph() + node = "Diagnosis" + + # Instantiate Parser + data_model_parser = DataModelParser(helpers.get_data_path("validator_dag_test.model.csv")) + + #Parse Model + parsed_data_model = data_model_parser.parse_model() + + dmr = DataModelRelationships() + dmn = DataModelNodes(parsed_data_model) + dme = DataModelEdges() + + + edge_relationships = dmr.define_edge_relationships() + all_nodes = dmn.gather_all_nodes(attr_rel_dict=parsed_data_model) + + assert node in all_nodes + + node_dict = {} + + node_dict = dmn.generate_node_dict(node, parsed_data_model) + node_dict[node] = node_dict + G = dmn.generate_node(G, node_dict) + + new_G = dme.generate_edge(G, node, node_dict, {node:parsed_data_model[node]}, edge_relationships) + + assert G == new_G + return def test_generate_edge(self, helpers): From 269ca0f01b449dcf24a4c9344c3673cba4f8fdb7 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Thu, 28 Sep 2023 14:59:03 -0700 Subject: [PATCH 107/239] change edge checking --- tests/test_schemas.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 2a4b0974c..13c476d82 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -320,9 +320,11 @@ def test_skip_edge(self, helpers): node_dict[node] = node_dict G = dmn.generate_node(G, node_dict) - new_G = dme.generate_edge(G, node, node_dict, {node:parsed_data_model[node]}, edge_relationships) + before_edges = G.edges - assert G == new_G + G = dme.generate_edge(G, node, node_dict, {node:parsed_data_model[node]}, edge_relationships) + + assert before_edges == G.edges return From dcf4872c0576ba8b08f33eebc002dee2cd7581ce Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Thu, 28 Sep 2023 15:01:01 -0700 Subject: [PATCH 108/239] make a deepcopy --- tests/test_schemas.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 13c476d82..1aa416f37 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -4,6 +4,7 @@ import pandas as pd import pytest import networkx as nx +from copy import deepcopy from schematic.schemas.data_model_edges import DataModelEdges from schematic.schemas.data_model_nodes import DataModelNodes @@ -320,7 +321,7 @@ def test_skip_edge(self, helpers): node_dict[node] = node_dict G = dmn.generate_node(G, node_dict) - before_edges = G.edges + before_edges = deepcopy(G.edges) G = dme.generate_edge(G, node, node_dict, {node:parsed_data_model[node]}, edge_relationships) From bedda9317df515b27d14dacce080b8bba134d310 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Fri, 29 Sep 2023 12:41:07 -0700 Subject: [PATCH 109/239] add test for adding edges --- tests/test_schemas.py | 44 ++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 43 insertions(+), 1 deletion(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 1aa416f37..632fd8b40 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -329,7 +329,49 @@ def test_skip_edge(self, helpers): return - def test_generate_edge(self, helpers): + @pytest.mark.parametrize("node_to_add, edge_relationship", + [("DataType", "parentOf"), + ("Female", "parentOf"), + ("Sex","requiresDependency")], + ids=["subClassOf", + "Valid Value", + "all others" + ]) + def test_generate_edge(self, helpers, node_to_add, edge_relationship): + G = nx.MultiDiGraph() + + # Instantiate Parser + data_model_parser = DataModelParser(helpers.get_data_path("validator_dag_test.model.csv")) + + #Parse Model + parsed_data_model = data_model_parser.parse_model() + + dmr = DataModelRelationships() + dmn = DataModelNodes(parsed_data_model) + dme = DataModelEdges() + + + edge_relationships = dmr.define_edge_relationships() + all_nodes = dmn.gather_all_nodes(attr_rel_dict=parsed_data_model) + + assert node_to_add in all_nodes + + all_node_dict = {} + for node in all_nodes: + node_dict = dmn.generate_node_dict(node, parsed_data_model) + all_node_dict[node] = node_dict + G = dmn.generate_node(G, node_dict) + + before_edges = deepcopy(G.edges) + + G = dme.generate_edge(G, node_to_add, all_node_dict, parsed_data_model, edge_relationships) + + assert G.edges != before_edges + + relationship_df = pd.DataFrame(G.edges, columns= ['u', 'v', 'edge']) + + assert (relationship_df['edge'] == edge_relationship).any() + return def test_generate_weights(self, helpers): From 1be618d7dc12a999d3987c0ca45252f4cd058e76 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Fri, 29 Sep 2023 15:00:57 -0700 Subject: [PATCH 110/239] WIP add test for edge weights --- tests/test_schemas.py | 37 ++++++++++++++++++++++++++++++++++++- 1 file changed, 36 insertions(+), 1 deletion(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 632fd8b40..f7f4daf68 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -373,8 +373,43 @@ def test_generate_edge(self, helpers, node_to_add, edge_relationship): assert (relationship_df['edge'] == edge_relationship).any() return + + @pytest.mark.parametrize("node_to_add, expected_weight", + [("Patient ID", 1)], + ids=["list"]) + def test_generate_weights(self, helpers, node_to_add, expected_weight): + G = nx.MultiDiGraph() + + # Instantiate Parser + data_model_parser = DataModelParser(helpers.get_data_path("validator_dag_test.model.csv")) + + #Parse Model + parsed_data_model = data_model_parser.parse_model() + + dmr = DataModelRelationships() + dmn = DataModelNodes(parsed_data_model) + dme = DataModelEdges() + + + edge_relationships = dmr.define_edge_relationships() + all_nodes = dmn.gather_all_nodes(attr_rel_dict=parsed_data_model) + + assert node_to_add in all_nodes + + all_node_dict = {} + for node in all_nodes: + node_dict = dmn.generate_node_dict(node, parsed_data_model) + all_node_dict[node] = node_dict + G = dmn.generate_node(G, node_dict) + + before_edges = deepcopy(G.edges) + + G = dme.generate_edge(G, node_to_add, all_node_dict, parsed_data_model, edge_relationships) + + assert G.edges != before_edges + + print(G.edges.data()) - def test_generate_weights(self, helpers): return From 02b1c61c6d71dad7e8590790f593fbaf83b4d42b Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Tue, 10 Oct 2023 09:59:08 -0700 Subject: [PATCH 111/239] add comments --- tests/test_schemas.py | 37 ++++++++++++++++++++++++++++++------- 1 file changed, 30 insertions(+), 7 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index f7f4daf68..4ce845c46 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -296,35 +296,43 @@ class TestDataModelEdges: """ def test_skip_edge(self, helpers): + # Instantiate graph object and set node G = nx.MultiDiGraph() node = "Diagnosis" # Instantiate Parser data_model_parser = DataModelParser(helpers.get_data_path("validator_dag_test.model.csv")) - #Parse Model + # Parse Model parsed_data_model = data_model_parser.parse_model() + # Instantiate data model objects dmr = DataModelRelationships() dmn = DataModelNodes(parsed_data_model) dme = DataModelEdges() - + # Get edge relationships and all nodes from the parsed model edge_relationships = dmr.define_edge_relationships() all_nodes = dmn.gather_all_nodes(attr_rel_dict=parsed_data_model) + # Sanity check to ensure that the node we intend to test exists in the data model assert node in all_nodes + # Add a single node to the graph node_dict = {} - node_dict = dmn.generate_node_dict(node, parsed_data_model) node_dict[node] = node_dict G = dmn.generate_node(G, node_dict) + # Check the edges in the graph, there should be none before_edges = deepcopy(G.edges) + # Generate an edge in the graph with one node and a subset of the parsed data model + # We're attempting to add an edge for a node that is the only one in the graph, + # so `generate_edge` should skip adding edges and return the same graph G = dme.generate_edge(G, node, node_dict, {node:parsed_data_model[node]}, edge_relationships) + # Assert that no edges were added and that the current graph edges are the same as before the call to `generate_edge` assert before_edges == G.edges return @@ -338,6 +346,7 @@ def test_skip_edge(self, helpers): "all others" ]) def test_generate_edge(self, helpers, node_to_add, edge_relationship): + # Instantiate graph object G = nx.MultiDiGraph() # Instantiate Parser @@ -346,30 +355,36 @@ def test_generate_edge(self, helpers, node_to_add, edge_relationship): #Parse Model parsed_data_model = data_model_parser.parse_model() + # Instantiate data model objects dmr = DataModelRelationships() dmn = DataModelNodes(parsed_data_model) dme = DataModelEdges() - + # Get edge relationships and all nodes from the parsed model edge_relationships = dmr.define_edge_relationships() all_nodes = dmn.gather_all_nodes(attr_rel_dict=parsed_data_model) + # Sanity check to ensure that the node we intend to test exists in the data model assert node_to_add in all_nodes + # Add all nodes to the graph all_node_dict = {} for node in all_nodes: node_dict = dmn.generate_node_dict(node, parsed_data_model) all_node_dict[node] = node_dict G = dmn.generate_node(G, node_dict) + # Check the edges in the graph, there should be none before_edges = deepcopy(G.edges) + # Generate edges for whichever node we are testing G = dme.generate_edge(G, node_to_add, all_node_dict, parsed_data_model, edge_relationships) + # Assert that the current edges are different from the edges of the graph before assert G.edges != before_edges + # Assert that somewhere in the current edges for the node we added, that the correct relationship exists relationship_df = pd.DataFrame(G.edges, columns= ['u', 'v', 'edge']) - assert (relationship_df['edge'] == edge_relationship).any() return @@ -378,6 +393,7 @@ def test_generate_edge(self, helpers, node_to_add, edge_relationship): [("Patient ID", 1)], ids=["list"]) def test_generate_weights(self, helpers, node_to_add, expected_weight): + # Instantiate graph object G = nx.MultiDiGraph() # Instantiate Parser @@ -386,26 +402,33 @@ def test_generate_weights(self, helpers, node_to_add, expected_weight): #Parse Model parsed_data_model = data_model_parser.parse_model() + # Instantiate data model objects dmr = DataModelRelationships() dmn = DataModelNodes(parsed_data_model) dme = DataModelEdges() - + # Get edge relationships and all nodes from the parsed model edge_relationships = dmr.define_edge_relationships() all_nodes = dmn.gather_all_nodes(attr_rel_dict=parsed_data_model) - assert node_to_add in all_nodes + # Sanity check to ensure that the node we intend to test exists in the data model + assert node_to_add in all_nodes + + # Add all nodes to the graph all_node_dict = {} for node in all_nodes: node_dict = dmn.generate_node_dict(node, parsed_data_model) all_node_dict[node] = node_dict G = dmn.generate_node(G, node_dict) + # Check the edges in the graph, there should be none before_edges = deepcopy(G.edges) + # Generate edges for whichever node we are testing G = dme.generate_edge(G, node_to_add, all_node_dict, parsed_data_model, edge_relationships) + # Assert that the current edges are different from the edges of the graph before assert G.edges != before_edges print(G.edges.data()) From 8e21b6cc99e038b9ae5ed3baf79df5dc96845efd Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Tue, 10 Oct 2023 10:30:36 -0700 Subject: [PATCH 112/239] add weight checks in test --- tests/test_schemas.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 4ce845c46..85e1ee819 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -389,10 +389,10 @@ def test_generate_edge(self, helpers, node_to_add, edge_relationship): return - @pytest.mark.parametrize("node_to_add, expected_weight", - [("Patient ID", 1)], + @pytest.mark.parametrize("node_to_add, other_node, expected_weight", + [("Patient ID", "Patient", 0)], ids=["list"]) - def test_generate_weights(self, helpers, node_to_add, expected_weight): + def test_generate_weights(self, helpers, node_to_add, other_node, expected_weight): # Instantiate graph object G = nx.MultiDiGraph() @@ -433,6 +433,12 @@ def test_generate_weights(self, helpers, node_to_add, expected_weight): print(G.edges.data()) + # Cast the edges and weights to a DataFrame for easier indexing + edges_and_weights = pd.DataFrame(G.edges.data(), columns= ['node1', 'node2', 'weights']).set_index('node1') + + # Assert that the weight added is what is expected + assert edges_and_weights.loc[other_node, 'weights']['weight'] == expected_weight + return From 368834a447ef5c2e2769125c12ae3180e77c4e32 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Tue, 10 Oct 2023 10:40:34 -0700 Subject: [PATCH 113/239] change test case --- tests/test_schemas.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 85e1ee819..e9b92430b 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -390,7 +390,7 @@ def test_generate_edge(self, helpers, node_to_add, edge_relationship): return @pytest.mark.parametrize("node_to_add, other_node, expected_weight", - [("Patient ID", "Patient", 0)], + [("Patient ID", "Biospecimen", 1)], ids=["list"]) def test_generate_weights(self, helpers, node_to_add, other_node, expected_weight): # Instantiate graph object From 5cc5a2c250e804069c8f245ee211d6f39714b88a Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Tue, 10 Oct 2023 10:46:14 -0700 Subject: [PATCH 114/239] update expected schema validator errors --- tests/test_validator.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/test_validator.py b/tests/test_validator.py index c4e743c80..0278fabc6 100644 --- a/tests/test_validator.py +++ b/tests/test_validator.py @@ -101,6 +101,8 @@ def test_dag(self, helpers): # nodes could be in different order so need to account for that expected_errors = ['Schematic requires models be a directed acyclic graph (DAG). Your graph is not a DAG, we found a loop between: Patient and PatientID, please remove this loop from your model and submit again.', - 'Schematic requires models be a directed acyclic graph (DAG). Your graph is not a DAG, we found a loop between: PatientID and Patient, please remove this loop from your model and submit again.'] + 'Schematic requires models be a directed acyclic graph (DAG). Your graph is not a DAG, we found a loop between: PatientID and Patient, please remove this loop from your model and submit again.', + 'Schematic requires models be a directed acyclic graph (DAG). Your graph is not a DAG, we found a loop between: Diagnosis and Diagnosis, please remove this loop from your model and submit again.'] + assert validator_errors[0] in expected_errors From 3f2070643c10b0d146384712afc931ed759933fb Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Tue, 10 Oct 2023 11:24:29 -0700 Subject: [PATCH 115/239] add tests for generate node dict and test_generate_nod --- tests/test_schemas.py | 53 +++++++++++++++++++++++++++++++++++++++---- 1 file changed, 49 insertions(+), 4 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 2cc54a507..997632642 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -40,6 +40,8 @@ def test_fake_func(): 'bio things': {'class': 'Biothings', 'property': 'biothings'}, } +NODE_DISPLAY_NAME_DICT = {'Patient':False, + 'Sex': True} def generate_graph_data_model(helpers, data_model_name): @@ -435,10 +437,53 @@ def test_run_rel_functions(self, helpers, data_model, rel_func, test_dn, test_bo convert_worked = True assert convert_worked==True return - def test_generate_node_dict(self): - return - def test_generate_node(self): - return + + @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) + @pytest.mark.parametrize("node_display_name", list(NODE_DISPLAY_NAME_DICT.keys()), ids=[str(v) for v in NODE_DISPLAY_NAME_DICT.values()]) + def test_generate_node_dict(self, helpers, data_model, node_display_name): + # Instantiate Parser + data_model_parser = helpers.get_data_model_parser(data_model_name=data_model) + + # Parse Model + attr_rel_dictionary = data_model_parser.parse_model() + + # Instantiate DataModelNodes + data_model_nodes = generate_data_model_nodes(helpers, data_model_name=data_model) + + node_dict = data_model_nodes.generate_node_dict( + node_display_name=node_display_name, + attr_rel_dict=attr_rel_dictionary, + ) + + # Check that the output is as expected for the required key. + if NODE_DISPLAY_NAME_DICT[node_display_name]: + assert node_dict['required'] == True + else: + #Looking up this way, in case we add empty defaults back to JSONLD it wont fail, but will only be absent in JSONLD not CSV. + if not node_dict['required'] == False: + assert DATA_MODEL_DICT[data_model] == 'JSONLD' + + @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) + def test_generate_node(self, helpers, data_model): + # Test adding a dummy node + node_dict = {'label': 'test_label'} + + path_to_data_model = helpers.get_data_path(data_model) + + # Get Graph + graph_data_model = generate_graph_data_model(helpers, data_model_name=path_to_data_model) + + # Instantiate DataModelNodes + data_model_nodes = generate_data_model_nodes(helpers, data_model_name=data_model) + + # Assert the test node is not already in the graph + assert False == (node_dict['label'] in graph_data_model.nodes) + + # Add test node + data_model_nodes.generate_node(graph_data_model, node_dict) + + # Check that the test node has been added + assert True == (node_dict['label'] in graph_data_model.nodes) class TestDataModelEdges: def test_generate_edge(self,helpers): From 50344b459436f1c32284841893d7e1ad2c8d29af Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Tue, 10 Oct 2023 13:46:43 -0700 Subject: [PATCH 116/239] update example and test model with new component --- tests/data/example.model.csv | 7 +- tests/data/example.model.jsonld | 135 ++++++++++++++++++++++++ tests/data/validator_dag_test.model.csv | 5 + 3 files changed, 146 insertions(+), 1 deletion(-) diff --git a/tests/data/example.model.csv b/tests/data/example.model.csv index f15db469c..aeca01b35 100644 --- a/tests/data/example.model.csv +++ b/tests/data/example.model.csv @@ -41,4 +41,9 @@ Check Date,,,,,TRUE,DataProperty,,,date Check NA,,,,,TRUE,DataProperty,,,int::IsNA MockRDB,,,"Component, MockRDB_id, SourceManifest",,FALSE,DataType,,, MockRDB_id,,,,,TRUE,DataProperty,,,int -SourceManifest,,,,,TRUE,DataProperty,,, \ No newline at end of file +SourceManifest,,,,,TRUE,DataProperty,,, +cohorts,,,"Component, dataset_id, cohort_tag_id, id","name, dataset_id, cohort_tag_id, id",FALSE,,,, +cohort_tag_id,,,,,FALSE,,,,matchAtLeastOne tags.id set error +name,,,,,FALSE,,,, +dataset_id,,,,,FALSE,,,, +id,,,,,FALSE,,,, diff --git a/tests/data/example.model.jsonld b/tests/data/example.model.jsonld index 1ebcee1d9..99adb240a 100644 --- a/tests/data/example.model.jsonld +++ b/tests/data/example.model.jsonld @@ -21,6 +21,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "Patient", + "sms:required": "sms:sms:false", "sms:requiresDependency": [ { "@id": "bts:PatientID" @@ -99,6 +100,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "Year of Birth", + "sms:required": "sms:sms:false", "sms:validationRules": [] }, { @@ -140,6 +142,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "Component", + "sms:required": "sms:sms:false", "sms:validationRules": [] }, { @@ -156,6 +159,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "DataType", + "sms:required": "sms:sms:false", "sms:validationRules": [] }, { @@ -172,6 +176,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "DataProperty", + "sms:required": "sms:sms:false", "sms:validationRules": [] }, { @@ -188,6 +193,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "Female", + "sms:required": "sms:sms:false", "sms:validationRules": [] }, { @@ -204,6 +210,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "Male", + "sms:required": "sms:sms:false", "sms:validationRules": [] }, { @@ -220,6 +227,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "Other", + "sms:required": "sms:sms:false", "sms:validationRules": [] }, { @@ -239,6 +247,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "Healthy", + "sms:required": "sms:sms:false", "sms:validationRules": [] }, { @@ -258,6 +267,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "Cancer", + "sms:required": "sms:sms:false", "sms:requiresDependency": [ { "@id": "bts:CancerType" @@ -352,6 +362,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "ValidValue", + "sms:required": "sms:sms:false", "sms:validationRules": [] }, { @@ -371,6 +382,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "Breast", + "sms:required": "sms:sms:false", "sms:validationRules": [] }, { @@ -390,6 +402,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "Colorectal", + "sms:required": "sms:sms:false", "sms:validationRules": [] }, { @@ -409,6 +422,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "Lung", + "sms:required": "sms:sms:false", "sms:validationRules": [] }, { @@ -428,6 +442,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "Prostate", + "sms:required": "sms:sms:false", "sms:validationRules": [] }, { @@ -447,6 +462,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "Skin", + "sms:required": "sms:sms:false", "sms:validationRules": [] }, { @@ -463,6 +479,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "Biospecimen", + "sms:required": "sms:sms:false", "sms:requiresComponent": [ { "@id": "bts:Patient" @@ -540,6 +557,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "Malignant", + "sms:required": "sms:sms:false", "sms:validationRules": [] }, { @@ -556,6 +574,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "Bulk RNA-seq Assay", + "sms:required": "sms:sms:false", "sms:requiresComponent": [ { "@id": "bts:Biospecimen" @@ -639,6 +658,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "FASTQ", + "sms:required": "sms:sms:false", "sms:validationRules": [] }, { @@ -658,6 +678,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "BAM", + "sms:required": "sms:sms:false", "sms:requiresDependency": [ { "@id": "bts:GenomeBuild" @@ -682,6 +703,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "CRAM", + "sms:required": "sms:sms:false", "sms:requiresDependency": [ { "@id": "bts:GenomeBuild" @@ -709,6 +731,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "CSV/TSV", + "sms:required": "sms:sms:false", "sms:requiresDependency": [ { "@id": "bts:GenomeBuild" @@ -778,6 +801,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "GRCh37", + "sms:required": "sms:sms:false", "sms:validationRules": [] }, { @@ -794,6 +818,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "GRCh38", + "sms:required": "sms:sms:false", "sms:validationRules": [] }, { @@ -810,6 +835,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "GRCm38", + "sms:required": "sms:sms:false", "sms:validationRules": [] }, { @@ -826,6 +852,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "GRCm39", + "sms:required": "sms:sms:false", "sms:validationRules": [] }, { @@ -842,6 +869,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "MockComponent", + "sms:required": "sms:sms:false", "sms:requiresDependency": [ { "@id": "bts:Component" @@ -1204,6 +1232,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "Check Recommended", + "sms:required": "sms:sms:false", "sms:validationRules": [ "recommended" ] @@ -1318,6 +1347,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "ab", + "sms:required": "sms:sms:false", "sms:validationRules": [] }, { @@ -1334,6 +1364,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "cd", + "sms:required": "sms:sms:false", "sms:validationRules": [] }, { @@ -1350,6 +1381,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "ef", + "sms:required": "sms:sms:false", "sms:validationRules": [] }, { @@ -1366,6 +1398,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "gh", + "sms:required": "sms:sms:false", "sms:validationRules": [] }, { @@ -1382,6 +1415,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "MockRDB", + "sms:required": "sms:sms:false", "sms:requiresDependency": [ { "@id": "bts:Component" @@ -1430,6 +1464,107 @@ "sms:displayName": "SourceManifest", "sms:required": "sms:true", "sms:validationRules": [] + }, + { + "@id": "bts:Cohorts", + "@type": "rdfs:Class", + "rdfs:comment": "TBD", + "rdfs:label": "Cohorts", + "rdfs:subClassOf": [ + { + "@id": "bts:Thing" + } + ], + "schema:isPartOf": { + "@id": "http://schema.biothings.io" + }, + "sms:displayName": "cohorts", + "sms:required": "sms:sms:false", + "sms:requiresDependency": [ + { + "@id": "bts:Component" + }, + { + "@id": "bts:datasetId" + }, + { + "@id": "bts:cohortTagId" + }, + { + "@id": "bts:id" + } + ], + "sms:validationRules": [] + }, + { + "@id": "bts:datasetId", + "@type": "rdf:Property", + "rdfs:comment": "TBD", + "rdfs:label": "datasetId", + "schema:domainIncludes": [ + { + "@id": "bts:Cohorts" + } + ], + "schema:isPartOf": { + "@id": "http://schema.biothings.io" + }, + "sms:displayName": "dataset_id", + "sms:required": "sms:sms:false", + "sms:validationRules": [] + }, + { + "@id": "bts:cohortTagId", + "@type": "rdf:Property", + "rdfs:comment": "TBD", + "rdfs:label": "cohortTagId", + "schema:domainIncludes": [ + { + "@id": "bts:Cohorts" + } + ], + "schema:isPartOf": { + "@id": "http://schema.biothings.io" + }, + "sms:displayName": "cohort_tag_id", + "sms:required": "sms:sms:false", + "sms:validationRules": [ + "matchAtLeastOne tags.id set error" + ] + }, + { + "@id": "bts:id", + "@type": "rdf:Property", + "rdfs:comment": "TBD", + "rdfs:label": "id", + "schema:domainIncludes": [ + { + "@id": "bts:Cohorts" + } + ], + "schema:isPartOf": { + "@id": "http://schema.biothings.io" + }, + "sms:displayName": "id", + "sms:required": "sms:sms:false", + "sms:validationRules": [] + }, + { + "@id": "bts:name", + "@type": "rdf:Property", + "rdfs:comment": "TBD", + "rdfs:label": "name", + "schema:domainIncludes": [ + { + "@id": "bts:Cohorts" + } + ], + "schema:isPartOf": { + "@id": "http://schema.biothings.io" + }, + "sms:displayName": "name", + "sms:required": "sms:sms:false", + "sms:validationRules": [] } ], "@id": "http://schema.biothings.io/#0.1" diff --git a/tests/data/validator_dag_test.model.csv b/tests/data/validator_dag_test.model.csv index 3184250ee..f442d528c 100644 --- a/tests/data/validator_dag_test.model.csv +++ b/tests/data/validator_dag_test.model.csv @@ -42,3 +42,8 @@ Check NA,,,,,TRUE,DataProperty,,,int::IsNA MockRDB,,,"Component, MockRDB_id, SourceManifest",,FALSE,DataType,,, MockRDB_id,,,,,TRUE,DataProperty,,,int SourceManifest,,,,,TRUE,DataProperty,,, +cohorts,,,"Component, dataset_id, cohort_tag_id, id","name, dataset_id, cohort_tag_id, id",FALSE,,,, +cohort_tag_id,,,,,FALSE,,,,matchAtLeastOne tags.id set error +name,,,,,FALSE,,,, +dataset_id,,,,,FALSE,,,, +id,,,,,FALSE,,,, From 6af0009cb6f9e8fb656477bb1fd6da623398d799 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Tue, 10 Oct 2023 13:46:57 -0700 Subject: [PATCH 117/239] add test for property weights --- tests/test_schemas.py | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index e9b92430b..ef38c066f 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -390,14 +390,16 @@ def test_generate_edge(self, helpers, node_to_add, edge_relationship): return @pytest.mark.parametrize("node_to_add, other_node, expected_weight", - [("Patient ID", "Biospecimen", 1)], - ids=["list"]) + [("Patient ID", "Biospecimen", 1), + ('dataset_id', 'cohorts', -1)], + ids=["list", "domainIncludes"]) def test_generate_weights(self, helpers, node_to_add, other_node, expected_weight): # Instantiate graph object G = nx.MultiDiGraph() # Instantiate Parser - data_model_parser = DataModelParser(helpers.get_data_path("validator_dag_test.model.csv")) + data_model_path = "validator_dag_test.model.csv" + data_model_parser = DataModelParser(helpers.get_data_path(data_model_path)) #Parse Model parsed_data_model = data_model_parser.parse_model() @@ -436,9 +438,16 @@ def test_generate_weights(self, helpers, node_to_add, other_node, expected_weigh # Cast the edges and weights to a DataFrame for easier indexing edges_and_weights = pd.DataFrame(G.edges.data(), columns= ['node1', 'node2', 'weights']).set_index('node1') - # Assert that the weight added is what is expected - assert edges_and_weights.loc[other_node, 'weights']['weight'] == expected_weight + if expected_weight < 0: + schema = helpers.get_data_frame(path=helpers.get_data_path(data_model_path), data_model=True) + expected_weight = schema.index[schema['Attribute']==other_node][0] + logger.debug(f"Expected weight for nodes {node_to_add} and {other_node} is {expected_weight}.") + # Assert that the weight added is what is expected + if node_to_add in ['Patient ID']: + assert edges_and_weights.loc[other_node, 'weights']['weight'] == expected_weight + elif node_to_add in ['cohorts']: + assert edges_and_weights.loc[node_to_add, 'weights']['weight'] == expected_weight return From eb788e7dd3c29fcf0fbaec4b3100ad93bc5704b7 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Tue, 10 Oct 2023 16:11:02 -0700 Subject: [PATCH 118/239] fix ref to schema Generator, pulled in with merging dev --- schematic/store/synapse.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index 10392862f..ee99e41e7 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -804,7 +804,7 @@ def getProjectManifests(self, projectId: str) -> List[str]: return manifests - def upload_project_manifests_to_synapse(self, sg: SchemaGenerator, projectId: str) -> List[str]: + def upload_project_manifests_to_synapse(self, DME: DataModelGraphExplorer, projectId: str) -> List[str]: """Upload all metadata manifest files across all datasets in a specified project as tables in Synapse. Returns: String of all the manifest_table_ids of all the manifests that have been loaded. @@ -826,7 +826,7 @@ def upload_project_manifests_to_synapse(self, sg: SchemaGenerator, projectId: st manifest_name = manifest_info["properties"]["name"] manifest_path = manifest_info["path"] manifest_df = load_df(manifest_path) - manifest_table_id = uploadDB(sg=sg, manifest=manifest, datasetId=datasetId, table_name=datasetName) + manifest_table_id = uploadDB(DME=DME, manifest=manifest, datasetId=datasetId, table_name=datasetName) manifest_loaded.append(datasetName) return manifest_loaded From 98e15b6aee94cffe44f0c9fd0ff04e5ca49cddff Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Tue, 10 Oct 2023 22:25:24 -0700 Subject: [PATCH 119/239] fix additional merge conflicts --- schematic/manifest/generator.py | 4 ++-- tests/test_manifest.py | 9 ++++++++- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index 0db4f35a8..84c13d30f 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -94,7 +94,7 @@ def __init__( self.additional_metadata = additional_metadata # Check if the class is in the schema - root_in_schema = self.sg.se.is_class_in_schema(self.root) + root_in_schema = self.DME.is_class_in_schema(self.root) # If the class could not be found, give a notification if not root_in_schema: @@ -103,7 +103,7 @@ def __init__( raise LookupError(exception_message) # Determine whether current data type is file-based - self.is_file_based = "Filename" in self.sg.get_node_dependencies(self.root) + self.is_file_based = "Filename" in self.DME.get_node_dependencies(self.root) def _attribute_to_letter(self, attribute, manifest_fields): """Map attribute to column letter in a google sheet""" diff --git a/tests/test_manifest.py b/tests/test_manifest.py index 225cad542..4196fd795 100644 --- a/tests/test_manifest.py +++ b/tests/test_manifest.py @@ -122,11 +122,12 @@ def test_init(self, helpers): graph=graph_data_model, title="mock_title", path_to_json_ld=path_to_data_model, + root = "Patient", ) assert type(generator.title) is str # assert generator.sheet_service == mock_creds["sheet_service"] - assert generator.root is None + assert generator.root is "Patient" assert type(generator.DME) is DataModelGraphExplorer @pytest.mark.parametrize("data_type, exc, exc_message", @@ -137,11 +138,17 @@ def test_missing_root_error(self, helpers, data_type, exc, exc_message): """ Test for errors when either no DataType is provided or when a DataType is provided but not found in the schema """ + path_to_data_model = helpers.get_data_path("example.model.jsonld") + + # Get graph data model + graph_data_model = generate_graph_data_model(helpers, path_to_data_model=path_to_data_model) + # A LookupError should be raised and include message when the component cannot be found with pytest.raises(exc) as e: generator = ManifestGenerator( path_to_json_ld=helpers.get_data_path("example.model.jsonld"), + graph=graph_data_model, root=data_type, use_annotations=False, ) From 1044c39aea674330ef6c3c7c27cfaaa675fda9a6 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Wed, 11 Oct 2023 09:33:55 -0700 Subject: [PATCH 120/239] remove `cohorts` component from test models --- tests/data/example.model.csv | 5 -- tests/data/example.model.jsonld | 101 ------------------------ tests/data/validator_dag_test.model.csv | 5 -- 3 files changed, 111 deletions(-) diff --git a/tests/data/example.model.csv b/tests/data/example.model.csv index aeca01b35..ced7631bc 100644 --- a/tests/data/example.model.csv +++ b/tests/data/example.model.csv @@ -42,8 +42,3 @@ Check NA,,,,,TRUE,DataProperty,,,int::IsNA MockRDB,,,"Component, MockRDB_id, SourceManifest",,FALSE,DataType,,, MockRDB_id,,,,,TRUE,DataProperty,,,int SourceManifest,,,,,TRUE,DataProperty,,, -cohorts,,,"Component, dataset_id, cohort_tag_id, id","name, dataset_id, cohort_tag_id, id",FALSE,,,, -cohort_tag_id,,,,,FALSE,,,,matchAtLeastOne tags.id set error -name,,,,,FALSE,,,, -dataset_id,,,,,FALSE,,,, -id,,,,,FALSE,,,, diff --git a/tests/data/example.model.jsonld b/tests/data/example.model.jsonld index 99adb240a..483ba54c5 100644 --- a/tests/data/example.model.jsonld +++ b/tests/data/example.model.jsonld @@ -1464,107 +1464,6 @@ "sms:displayName": "SourceManifest", "sms:required": "sms:true", "sms:validationRules": [] - }, - { - "@id": "bts:Cohorts", - "@type": "rdfs:Class", - "rdfs:comment": "TBD", - "rdfs:label": "Cohorts", - "rdfs:subClassOf": [ - { - "@id": "bts:Thing" - } - ], - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "sms:displayName": "cohorts", - "sms:required": "sms:sms:false", - "sms:requiresDependency": [ - { - "@id": "bts:Component" - }, - { - "@id": "bts:datasetId" - }, - { - "@id": "bts:cohortTagId" - }, - { - "@id": "bts:id" - } - ], - "sms:validationRules": [] - }, - { - "@id": "bts:datasetId", - "@type": "rdf:Property", - "rdfs:comment": "TBD", - "rdfs:label": "datasetId", - "schema:domainIncludes": [ - { - "@id": "bts:Cohorts" - } - ], - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "sms:displayName": "dataset_id", - "sms:required": "sms:sms:false", - "sms:validationRules": [] - }, - { - "@id": "bts:cohortTagId", - "@type": "rdf:Property", - "rdfs:comment": "TBD", - "rdfs:label": "cohortTagId", - "schema:domainIncludes": [ - { - "@id": "bts:Cohorts" - } - ], - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "sms:displayName": "cohort_tag_id", - "sms:required": "sms:sms:false", - "sms:validationRules": [ - "matchAtLeastOne tags.id set error" - ] - }, - { - "@id": "bts:id", - "@type": "rdf:Property", - "rdfs:comment": "TBD", - "rdfs:label": "id", - "schema:domainIncludes": [ - { - "@id": "bts:Cohorts" - } - ], - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "sms:displayName": "id", - "sms:required": "sms:sms:false", - "sms:validationRules": [] - }, - { - "@id": "bts:name", - "@type": "rdf:Property", - "rdfs:comment": "TBD", - "rdfs:label": "name", - "schema:domainIncludes": [ - { - "@id": "bts:Cohorts" - } - ], - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "sms:displayName": "name", - "sms:required": "sms:sms:false", - "sms:validationRules": [] } ], "@id": "http://schema.biothings.io/#0.1" diff --git a/tests/data/validator_dag_test.model.csv b/tests/data/validator_dag_test.model.csv index f442d528c..3184250ee 100644 --- a/tests/data/validator_dag_test.model.csv +++ b/tests/data/validator_dag_test.model.csv @@ -42,8 +42,3 @@ Check NA,,,,,TRUE,DataProperty,,,int::IsNA MockRDB,,,"Component, MockRDB_id, SourceManifest",,FALSE,DataType,,, MockRDB_id,,,,,TRUE,DataProperty,,,int SourceManifest,,,,,TRUE,DataProperty,,, -cohorts,,,"Component, dataset_id, cohort_tag_id, id","name, dataset_id, cohort_tag_id, id",FALSE,,,, -cohort_tag_id,,,,,FALSE,,,,matchAtLeastOne tags.id set error -name,,,,,FALSE,,,, -dataset_id,,,,,FALSE,,,, -id,,,,,FALSE,,,, From 026862e91845f7ac4c83e24392cad07251c44929 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Wed, 11 Oct 2023 09:34:13 -0700 Subject: [PATCH 121/239] create new model for component w/ properties --- tests/data/properties.test.model.csv | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 tests/data/properties.test.model.csv diff --git a/tests/data/properties.test.model.csv b/tests/data/properties.test.model.csv new file mode 100644 index 000000000..1f2121356 --- /dev/null +++ b/tests/data/properties.test.model.csv @@ -0,0 +1,6 @@ +Attribute,Description,Valid Values,DependsOn,Properties,Required,Parent,DependsOn Component,Source,Validation Rules +cohorts,,,"Component, dataset_id, cohort_tag_id, id","name, dataset_id, cohort_tag_id, id",FALSE,,,, +cohort_tag_id,,,,,FALSE,,,,matchAtLeastOne tags.id set error +name,,,,,FALSE,,,, +dataset_id,,,,,FALSE,,,, +id,,,,,FALSE,,,, From 1c4b8809a078555c26fd09c439e8381990e47fa8 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Wed, 11 Oct 2023 09:37:44 -0700 Subject: [PATCH 122/239] parametrize data model path --- tests/test_schemas.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index ef38c066f..553bcb630 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -389,16 +389,15 @@ def test_generate_edge(self, helpers, node_to_add, edge_relationship): return - @pytest.mark.parametrize("node_to_add, other_node, expected_weight", - [("Patient ID", "Biospecimen", 1), - ('dataset_id', 'cohorts', -1)], + @pytest.mark.parametrize("node_to_add, other_node, expected_weight, data_model_path", + [("Patient ID", "Biospecimen", 1, "validator_dag_test.model.csv"), + ("dataset_id", "cohorts", -1, "properties.test.model.csv")], ids=["list", "domainIncludes"]) - def test_generate_weights(self, helpers, node_to_add, other_node, expected_weight): + def test_generate_weights(self, helpers, node_to_add, other_node, expected_weight, data_model_path): # Instantiate graph object G = nx.MultiDiGraph() # Instantiate Parser - data_model_path = "validator_dag_test.model.csv" data_model_parser = DataModelParser(helpers.get_data_path(data_model_path)) #Parse Model From 5c5991c3ba4d8c364b85a24d8a6d0503a181d7aa Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Wed, 11 Oct 2023 09:37:53 -0700 Subject: [PATCH 123/239] remove print statement --- tests/test_schemas.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 553bcb630..0211c3b28 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -431,8 +431,6 @@ def test_generate_weights(self, helpers, node_to_add, other_node, expected_weigh # Assert that the current edges are different from the edges of the graph before assert G.edges != before_edges - - print(G.edges.data()) # Cast the edges and weights to a DataFrame for easier indexing edges_and_weights = pd.DataFrame(G.edges.data(), columns= ['node1', 'node2', 'weights']).set_index('node1') From dfc5ca30da4233ce869f73a0a0f257fff7f33f49 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Wed, 11 Oct 2023 09:53:40 -0700 Subject: [PATCH 124/239] use DMR fixture, change name cap --- tests/test_schemas.py | 35 ++++++++++++++++------------------- 1 file changed, 16 insertions(+), 19 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 0211c3b28..ab9e7f312 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -54,8 +54,8 @@ def DME(helpers, data_model_name='example.model.csv'): DME = DataModelGraphExplorer(graph_data_model) yield DME -@pytest.fixture(name="dmr") -def fixture_dmr(): +@pytest.fixture(name="DMR") +def fixture_DMR(): """Yields a data model relationships object for testing""" yield DataModelRelationships() @@ -85,7 +85,7 @@ def test_parse_jsonld_model(self): class TestDataModelRelationships: """Tests for DataModelRelationships class""" - def test_define_data_model_relationships(self, dmr: DataModelRelationships): + def test_define_data_model_relationships(self, DMR: DataModelRelationships): """Tests relationships_dictionary created has correct keys""" required_keys = [ 'jsonld_key', @@ -97,7 +97,7 @@ def test_define_data_model_relationships(self, dmr: DataModelRelationships): required_edge_keys = ['edge_key', 'edge_dir'] required_node_keys = ['node_label', 'node_attr_dict'] - relationships = dmr.relationships_dictionary + relationships = DMR.relationships_dictionary for relationship in relationships.values(): for key in required_keys: @@ -109,9 +109,9 @@ def test_define_data_model_relationships(self, dmr: DataModelRelationships): for key in required_node_keys: assert key in relationship.keys() - def test_define_required_csv_headers(self, dmr: DataModelRelationships): + def test_define_required_csv_headers(self, DMR: DataModelRelationships): """Tests method returns correct values""" - assert dmr.define_required_csv_headers() == [ + assert DMR.define_required_csv_headers() == [ 'Attribute', 'Description', 'Valid Values', @@ -123,9 +123,9 @@ def test_define_required_csv_headers(self, dmr: DataModelRelationships): 'Source' ] - def test_define_edge_relationships(self, dmr: DataModelRelationships): + def test_define_edge_relationships(self, DMR: DataModelRelationships): """Tests method returns correct values""" - assert dmr.define_edge_relationships() == { + assert DMR.define_edge_relationships() == { 'rangeIncludes': 'Valid Values', 'requiresDependency': 'DependsOn', 'requiresComponent': 'DependsOn Component', @@ -133,9 +133,9 @@ def test_define_edge_relationships(self, dmr: DataModelRelationships): 'domainIncludes': 'Properties' } - def test_define_value_relationships(self, dmr: DataModelRelationships): + def test_define_value_relationships(self, DMR: DataModelRelationships): """Tests method returns correct values""" - assert dmr.define_value_relationships() == { + assert DMR.define_value_relationships() == { 'displayName': 'Attribute', 'label': None, 'comment': 'Description', @@ -295,7 +295,7 @@ class TestDataModelEdges: rangeIncludes relationship edge """ - def test_skip_edge(self, helpers): + def test_skip_edge(self, helpers, DMR): # Instantiate graph object and set node G = nx.MultiDiGraph() node = "Diagnosis" @@ -307,12 +307,11 @@ def test_skip_edge(self, helpers): parsed_data_model = data_model_parser.parse_model() # Instantiate data model objects - dmr = DataModelRelationships() dmn = DataModelNodes(parsed_data_model) dme = DataModelEdges() # Get edge relationships and all nodes from the parsed model - edge_relationships = dmr.define_edge_relationships() + edge_relationships = DMR.define_edge_relationships() all_nodes = dmn.gather_all_nodes(attr_rel_dict=parsed_data_model) # Sanity check to ensure that the node we intend to test exists in the data model @@ -345,7 +344,7 @@ def test_skip_edge(self, helpers): "Valid Value", "all others" ]) - def test_generate_edge(self, helpers, node_to_add, edge_relationship): + def test_generate_edge(self, helpers, node_to_add, edge_relationship, DMR): # Instantiate graph object G = nx.MultiDiGraph() @@ -356,12 +355,11 @@ def test_generate_edge(self, helpers, node_to_add, edge_relationship): parsed_data_model = data_model_parser.parse_model() # Instantiate data model objects - dmr = DataModelRelationships() dmn = DataModelNodes(parsed_data_model) dme = DataModelEdges() # Get edge relationships and all nodes from the parsed model - edge_relationships = dmr.define_edge_relationships() + edge_relationships = DMR.define_edge_relationships() all_nodes = dmn.gather_all_nodes(attr_rel_dict=parsed_data_model) # Sanity check to ensure that the node we intend to test exists in the data model @@ -393,7 +391,7 @@ def test_generate_edge(self, helpers, node_to_add, edge_relationship): [("Patient ID", "Biospecimen", 1, "validator_dag_test.model.csv"), ("dataset_id", "cohorts", -1, "properties.test.model.csv")], ids=["list", "domainIncludes"]) - def test_generate_weights(self, helpers, node_to_add, other_node, expected_weight, data_model_path): + def test_generate_weights(self, helpers, DMR, node_to_add, other_node, expected_weight, data_model_path): # Instantiate graph object G = nx.MultiDiGraph() @@ -404,12 +402,11 @@ def test_generate_weights(self, helpers, node_to_add, other_node, expected_weigh parsed_data_model = data_model_parser.parse_model() # Instantiate data model objects - dmr = DataModelRelationships() dmn = DataModelNodes(parsed_data_model) dme = DataModelEdges() # Get edge relationships and all nodes from the parsed model - edge_relationships = dmr.define_edge_relationships() + edge_relationships = DMR.define_edge_relationships() all_nodes = dmn.gather_all_nodes(attr_rel_dict=parsed_data_model) From 2ab025e7dc66127b60b3d9ec50de5d01b8aabc2d Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Wed, 11 Oct 2023 09:57:31 -0700 Subject: [PATCH 125/239] add fixture for DataModelEdges object --- tests/test_schemas.py | 23 ++++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index ab9e7f312..14eb4014e 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -59,6 +59,14 @@ def fixture_DMR(): """Yields a data model relationships object for testing""" yield DataModelRelationships() +@pytest.fixture +def DMEdges(): + """ + Yields a Data Model Edges object for testing + TODO: Update naming for DataModelGraphExplorer and fixture to avoid overlapping namespace + """ + yield DataModelEdges() + class TestDataModelParser: def test_get_base_schema_path(self, helpers): return @@ -295,7 +303,7 @@ class TestDataModelEdges: rangeIncludes relationship edge """ - def test_skip_edge(self, helpers, DMR): + def test_skip_edge(self, helpers, DMR, DMEdges): # Instantiate graph object and set node G = nx.MultiDiGraph() node = "Diagnosis" @@ -308,7 +316,6 @@ def test_skip_edge(self, helpers, DMR): # Instantiate data model objects dmn = DataModelNodes(parsed_data_model) - dme = DataModelEdges() # Get edge relationships and all nodes from the parsed model edge_relationships = DMR.define_edge_relationships() @@ -329,7 +336,7 @@ def test_skip_edge(self, helpers, DMR): # Generate an edge in the graph with one node and a subset of the parsed data model # We're attempting to add an edge for a node that is the only one in the graph, # so `generate_edge` should skip adding edges and return the same graph - G = dme.generate_edge(G, node, node_dict, {node:parsed_data_model[node]}, edge_relationships) + G = DMEdges.generate_edge(G, node, node_dict, {node:parsed_data_model[node]}, edge_relationships) # Assert that no edges were added and that the current graph edges are the same as before the call to `generate_edge` assert before_edges == G.edges @@ -344,7 +351,7 @@ def test_skip_edge(self, helpers, DMR): "Valid Value", "all others" ]) - def test_generate_edge(self, helpers, node_to_add, edge_relationship, DMR): + def test_generate_edge(self, helpers, DMR, DMEdges, node_to_add, edge_relationship): # Instantiate graph object G = nx.MultiDiGraph() @@ -356,7 +363,6 @@ def test_generate_edge(self, helpers, node_to_add, edge_relationship, DMR): # Instantiate data model objects dmn = DataModelNodes(parsed_data_model) - dme = DataModelEdges() # Get edge relationships and all nodes from the parsed model edge_relationships = DMR.define_edge_relationships() @@ -376,7 +382,7 @@ def test_generate_edge(self, helpers, node_to_add, edge_relationship, DMR): before_edges = deepcopy(G.edges) # Generate edges for whichever node we are testing - G = dme.generate_edge(G, node_to_add, all_node_dict, parsed_data_model, edge_relationships) + G = DMEdges.generate_edge(G, node_to_add, all_node_dict, parsed_data_model, edge_relationships) # Assert that the current edges are different from the edges of the graph before assert G.edges != before_edges @@ -391,7 +397,7 @@ def test_generate_edge(self, helpers, node_to_add, edge_relationship, DMR): [("Patient ID", "Biospecimen", 1, "validator_dag_test.model.csv"), ("dataset_id", "cohorts", -1, "properties.test.model.csv")], ids=["list", "domainIncludes"]) - def test_generate_weights(self, helpers, DMR, node_to_add, other_node, expected_weight, data_model_path): + def test_generate_weights(self, helpers, DMR, DMEdges, node_to_add, other_node, expected_weight, data_model_path): # Instantiate graph object G = nx.MultiDiGraph() @@ -403,7 +409,6 @@ def test_generate_weights(self, helpers, DMR, node_to_add, other_node, expected_ # Instantiate data model objects dmn = DataModelNodes(parsed_data_model) - dme = DataModelEdges() # Get edge relationships and all nodes from the parsed model edge_relationships = DMR.define_edge_relationships() @@ -424,7 +429,7 @@ def test_generate_weights(self, helpers, DMR, node_to_add, other_node, expected_ before_edges = deepcopy(G.edges) # Generate edges for whichever node we are testing - G = dme.generate_edge(G, node_to_add, all_node_dict, parsed_data_model, edge_relationships) + G = DMEdges.generate_edge(G, node_to_add, all_node_dict, parsed_data_model, edge_relationships) # Assert that the current edges are different from the edges of the graph before assert G.edges != before_edges From af51f09d58660fc796f9d3270cd27f5330c911c4 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Wed, 11 Oct 2023 10:10:31 -0700 Subject: [PATCH 126/239] use dataModelParser helper fxn --- tests/test_schemas.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 14eb4014e..d4f9ac6d4 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -309,7 +309,7 @@ def test_skip_edge(self, helpers, DMR, DMEdges): node = "Diagnosis" # Instantiate Parser - data_model_parser = DataModelParser(helpers.get_data_path("validator_dag_test.model.csv")) + data_model_parser = helpers.get_data_model_parser("validator_dag_test.model.csv") # Parse Model parsed_data_model = data_model_parser.parse_model() @@ -356,7 +356,7 @@ def test_generate_edge(self, helpers, DMR, DMEdges, node_to_add, edge_relationsh G = nx.MultiDiGraph() # Instantiate Parser - data_model_parser = DataModelParser(helpers.get_data_path("validator_dag_test.model.csv")) + data_model_parser = helpers.get_data_model_parser("validator_dag_test.model.csv") #Parse Model parsed_data_model = data_model_parser.parse_model() @@ -402,7 +402,7 @@ def test_generate_weights(self, helpers, DMR, DMEdges, node_to_add, other_node, G = nx.MultiDiGraph() # Instantiate Parser - data_model_parser = DataModelParser(helpers.get_data_path(data_model_path)) + data_model_parser = helpers.get_data_model_parser(data_model_path) #Parse Model parsed_data_model = data_model_parser.parse_model() From a7a9e7cf9ad6d16a1bf585a4647f294504da3b30 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Wed, 11 Oct 2023 10:10:56 -0700 Subject: [PATCH 127/239] change df column names --- tests/test_schemas.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index d4f9ac6d4..61efb8125 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -388,7 +388,7 @@ def test_generate_edge(self, helpers, DMR, DMEdges, node_to_add, edge_relationsh assert G.edges != before_edges # Assert that somewhere in the current edges for the node we added, that the correct relationship exists - relationship_df = pd.DataFrame(G.edges, columns= ['u', 'v', 'edge']) + relationship_df = pd.DataFrame(G.edges, columns= ['node1', 'node2', 'edge']) assert (relationship_df['edge'] == edge_relationship).any() return From 47a49fbb457d95169ccc2243ef14ac1bb163e67a Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Wed, 11 Oct 2023 10:14:14 -0700 Subject: [PATCH 128/239] change assertion logic to explicitly check that nodes were added --- tests/test_schemas.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 61efb8125..92525588a 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -385,7 +385,7 @@ def test_generate_edge(self, helpers, DMR, DMEdges, node_to_add, edge_relationsh G = DMEdges.generate_edge(G, node_to_add, all_node_dict, parsed_data_model, edge_relationships) # Assert that the current edges are different from the edges of the graph before - assert G.edges != before_edges + assert G.edges > before_edges # Assert that somewhere in the current edges for the node we added, that the correct relationship exists relationship_df = pd.DataFrame(G.edges, columns= ['node1', 'node2', 'edge']) @@ -432,7 +432,7 @@ def test_generate_weights(self, helpers, DMR, DMEdges, node_to_add, other_node, G = DMEdges.generate_edge(G, node_to_add, all_node_dict, parsed_data_model, edge_relationships) # Assert that the current edges are different from the edges of the graph before - assert G.edges != before_edges + assert G.edges > before_edges # Cast the edges and weights to a DataFrame for easier indexing edges_and_weights = pd.DataFrame(G.edges.data(), columns= ['node1', 'node2', 'weights']).set_index('node1') From f7f7e589394ea5299a55e8a6a12094a9d8c9806a Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Wed, 11 Oct 2023 10:16:16 -0700 Subject: [PATCH 129/239] update comments and log message --- tests/test_schemas.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 92525588a..8430ffa36 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -437,10 +437,13 @@ def test_generate_weights(self, helpers, DMR, DMEdges, node_to_add, other_node, # Cast the edges and weights to a DataFrame for easier indexing edges_and_weights = pd.DataFrame(G.edges.data(), columns= ['node1', 'node2', 'weights']).set_index('node1') + # Get the index of the property in the schema + # Weights for properties are determined by their order in the schema. + # This would allow the tests to continue to function correctly in the case were other attributes were added to the schema if expected_weight < 0: schema = helpers.get_data_frame(path=helpers.get_data_path(data_model_path), data_model=True) expected_weight = schema.index[schema['Attribute']==other_node][0] - logger.debug(f"Expected weight for nodes {node_to_add} and {other_node} is {expected_weight}.") + logger.debug(f"Expected weight for the edge of nodes {node_to_add} and {other_node} is {expected_weight}.") # Assert that the weight added is what is expected if node_to_add in ['Patient ID']: From 641748fa66d1478cc8ee95090694299086485335 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Wed, 11 Oct 2023 10:17:26 -0700 Subject: [PATCH 130/239] clean spacing / update comments --- tests/test_schemas.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 8430ffa36..4cab48f8b 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -314,7 +314,7 @@ def test_skip_edge(self, helpers, DMR, DMEdges): # Parse Model parsed_data_model = data_model_parser.parse_model() - # Instantiate data model objects + # Instantiate data model Nodes object dmn = DataModelNodes(parsed_data_model) # Get edge relationships and all nodes from the parsed model @@ -361,7 +361,7 @@ def test_generate_edge(self, helpers, DMR, DMEdges, node_to_add, edge_relationsh #Parse Model parsed_data_model = data_model_parser.parse_model() - # Instantiate data model objects + # Instantiate data model Nodes object dmn = DataModelNodes(parsed_data_model) # Get edge relationships and all nodes from the parsed model @@ -407,14 +407,13 @@ def test_generate_weights(self, helpers, DMR, DMEdges, node_to_add, other_node, #Parse Model parsed_data_model = data_model_parser.parse_model() - # Instantiate data model objects + # Instantiate data model Nodes object dmn = DataModelNodes(parsed_data_model) # Get edge relationships and all nodes from the parsed model edge_relationships = DMR.define_edge_relationships() all_nodes = dmn.gather_all_nodes(attr_rel_dict=parsed_data_model) - # Sanity check to ensure that the node we intend to test exists in the data model assert node_to_add in all_nodes @@ -450,6 +449,7 @@ def test_generate_weights(self, helpers, DMR, DMEdges, node_to_add, other_node, assert edges_and_weights.loc[other_node, 'weights']['weight'] == expected_weight elif node_to_add in ['cohorts']: assert edges_and_weights.loc[node_to_add, 'weights']['weight'] == expected_weight + return From ec06372c4f67fe5e87f5da6de25e6feed52d5158 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Wed, 11 Oct 2023 10:26:18 -0700 Subject: [PATCH 131/239] Revert "update expected schema validator errors" This reverts commit 5cc5a2c250e804069c8f245ee211d6f39714b88a. --- tests/test_validator.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/tests/test_validator.py b/tests/test_validator.py index 0278fabc6..c4e743c80 100644 --- a/tests/test_validator.py +++ b/tests/test_validator.py @@ -101,8 +101,6 @@ def test_dag(self, helpers): # nodes could be in different order so need to account for that expected_errors = ['Schematic requires models be a directed acyclic graph (DAG). Your graph is not a DAG, we found a loop between: Patient and PatientID, please remove this loop from your model and submit again.', - 'Schematic requires models be a directed acyclic graph (DAG). Your graph is not a DAG, we found a loop between: PatientID and Patient, please remove this loop from your model and submit again.', - 'Schematic requires models be a directed acyclic graph (DAG). Your graph is not a DAG, we found a loop between: Diagnosis and Diagnosis, please remove this loop from your model and submit again.'] - + 'Schematic requires models be a directed acyclic graph (DAG). Your graph is not a DAG, we found a loop between: PatientID and Patient, please remove this loop from your model and submit again.'] assert validator_errors[0] in expected_errors From 19fafd374b83c72acff985a59162bff233f99eaf Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 11 Oct 2023 10:43:38 -0700 Subject: [PATCH 132/239] fix issue in test_get_json_validation__schema --- tests/test_schemas.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 0c6bc0ba1..37e4ac993 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -19,6 +19,11 @@ logging.basicConfig(level=logging.DEBUG) logger = logging.getLogger(__name__) +DATA_MODEL_DICT = { + 'example.model.csv': "CSV", + 'example.model.jsonld': "JSONLD" + } + def generate_graph_data_model(helpers, data_model_name): """ Simple helper function to generate a networkx graph data model from a CSV or JSONLD data model @@ -389,7 +394,7 @@ def test_get_json_validation_schema(self, helpers, data_model, source_node, sche # Check contents of validation schema assert 'Diagnosis' in json_validation_schema['properties'] - assert json_validation_schema['properties']['Diagnosis'] == {'enum': ['Cancer', 'Healthy']} + assert 'Cancer' in json_validation_schema['properties']['Diagnosis']['enum'] except: # Should only fail if no source node is provided. assert source_node == '' From 0271f7faebe5bf6db434b06f937698fd1d2f1385 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Wed, 11 Oct 2023 10:48:41 -0700 Subject: [PATCH 133/239] Revert "Revert "update expected schema validator errors"" This reverts commit ec06372c4f67fe5e87f5da6de25e6feed52d5158. --- tests/test_validator.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/test_validator.py b/tests/test_validator.py index c4e743c80..0278fabc6 100644 --- a/tests/test_validator.py +++ b/tests/test_validator.py @@ -101,6 +101,8 @@ def test_dag(self, helpers): # nodes could be in different order so need to account for that expected_errors = ['Schematic requires models be a directed acyclic graph (DAG). Your graph is not a DAG, we found a loop between: Patient and PatientID, please remove this loop from your model and submit again.', - 'Schematic requires models be a directed acyclic graph (DAG). Your graph is not a DAG, we found a loop between: PatientID and Patient, please remove this loop from your model and submit again.'] + 'Schematic requires models be a directed acyclic graph (DAG). Your graph is not a DAG, we found a loop between: PatientID and Patient, please remove this loop from your model and submit again.', + 'Schematic requires models be a directed acyclic graph (DAG). Your graph is not a DAG, we found a loop between: Diagnosis and Diagnosis, please remove this loop from your model and submit again.'] + assert validator_errors[0] in expected_errors From 0b02a8e1b2c9399209508fa4ff2c8aa3998885f8 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 11 Oct 2023 11:44:54 -0700 Subject: [PATCH 134/239] add tests for TestDataModelJsonLd, test_init, and test_base_jsonld_template --- tests/test_schemas.py | 34 ++++++++++++++++++++++++++++++++-- 1 file changed, 32 insertions(+), 2 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 37e4ac993..80748ef6c 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -1,5 +1,6 @@ import os import logging +import networkx as nx import pandas as pd import pytest @@ -400,8 +401,37 @@ def test_get_json_validation_schema(self, helpers, data_model, source_node, sche assert source_node == '' class TestDataModelJsonLd: - def test_base_jsonld_template(self): - return + @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) + def test_init(self, helpers, data_model): + # Get Graph + graph_data_model = generate_graph_data_model(helpers, data_model_name=data_model) + + # Instantiate DataModelJsonLD: + data_model_jsonld = DataModelJsonLD(Graph=graph_data_model) + + # Test that __init__ is being set up properly + assert type(data_model_jsonld.graph) == nx.MultiDiGraph + assert type(data_model_jsonld.rel_dict) == dict + assert 'required' in data_model_jsonld.rel_dict + assert type(data_model_jsonld.DME) == DataModelGraphExplorer + assert data_model_jsonld.output_path == '' + + @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) + def test_base_jsonld_template(self, helpers, data_model): + # Get Graph + graph_data_model = generate_graph_data_model(helpers, data_model_name=data_model) + + # Instantiate DataModelJsonLD + data_model_jsonld = DataModelJsonLD(Graph=graph_data_model) + + # Get base jsonld template + base_template = data_model_jsonld.base_jsonld_template() + + # Test base template is constructed as expected + assert '@context' in base_template + assert '@graph' in base_template + assert '@id' in base_template + def test_create_object(self): return def test_add_contexts_to_entries(self): From b43abdda85d957ce8a3ff226b7705134ea56655d Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 11 Oct 2023 12:58:42 -0700 Subject: [PATCH 135/239] add tests for TestDataModelJsonLD, test_init, test_base_jsonld_template, test_create_object, test_add_contexts_to_entries --- tests/test_schemas.py | 94 ++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 88 insertions(+), 6 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 37e4ac993..2f73c2c33 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -1,6 +1,6 @@ import os import logging - +import numpy as np import pandas as pd import pytest @@ -400,11 +400,93 @@ def test_get_json_validation_schema(self, helpers, data_model, source_node, sche assert source_node == '' class TestDataModelJsonLd: - def test_base_jsonld_template(self): - return - def test_create_object(self): - return - def test_add_contexts_to_entries(self): + @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) + def test_init(self, helpers, data_model): + # Get Graph + graph_data_model = generate_graph_data_model(helpers, data_model_name=data_model) + + # Instantiate DataModelJsonLD + data_model_jsonld = DataModelJsonLD(Graph=graph_data_model) + + # Test that __init__ is being set up properly + assert type(data_model_jsonld.graph) == nx.MultiDiGraph + assert type(data_model_jsonld.rel_dict) == dict + assert 'required' in data_model_jsonld.rel_dict + assert type(data_model_jsonld.DME) == DataModelGraphExplorer + assert data_model_jsonld.output_path == '' + + @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) + def test_base_jsonld_template(self, helpers, data_model): + # Get Graph + graph_data_model = generate_graph_data_model(helpers, data_model_name=data_model) + + # Instantiate DataModelJsonLD + data_model_jsonld = DataModelJsonLD(Graph=graph_data_model) + + # Get base jsonld template + base_template = data_model_jsonld.base_jsonld_template() + + # Test base template is constructed as expected + assert '@context' in base_template + assert '@graph' in base_template + assert '@id' in base_template + + @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) + @pytest.mark.parametrize("template_type", ['property', 'class'], ids=['property', 'class']) + @pytest.mark.parametrize("node", ['', 'Patient'], ids=['no node', 'Patient']) + def test_create_object(self, helpers, data_model, template_type, node): + # Get Graph + graph_data_model = generate_graph_data_model(helpers, data_model_name=data_model) + + # Instantiate DataModelJsonLD + data_model_jsonld = DataModelJsonLD(Graph=graph_data_model) + + # Get empty template + if template_type == 'property': + template = data_model_jsonld.property_template() + elif template_type == 'class': + template = data_model_jsonld.class_template() + + try: + # Fill out template for given node. + object_template = data_model_jsonld.create_object(template=template, node=node) + # Ensure template keys are present (not all original keys will be present due to cleaning empty values): + except: + # Should only fail if no node is given + assert node == '' + + if 'object_template' in locals(): + # Check that object template keys match the expected keys + actual_keys = list(object_template.keys()) + if template_type == 'property': + expected_keys = ['@id', '@type', 'rdfs:comment', 'rdfs:label', 'schema:isPartOf', 'sms:displayName', 'sms:required', 'sms:validationRules'] + elif template_type == 'class': + expected_keys = ['@id', '@type', 'rdfs:comment', 'rdfs:label', 'rdfs:subClassOf', 'schema:isPartOf', 'sms:displayName', 'sms:required', 'sms:requiresDependency', 'sms:validationRules'] + assert (set(actual_keys) - set(expected_keys)) == (set(expected_keys) - set(actual_keys)) + + @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) + @pytest.mark.parametrize("template_type", ['property', 'class'], ids=['property', 'class']) + def test_add_contexts_to_entries(self, helpers, data_model, template_type): + # Will likely need to change when contexts added to model. + # Get Graph + graph_data_model = generate_graph_data_model(helpers, data_model_name=data_model) + + # Instantiate DataModelJsonLD + data_model_jsonld = DataModelJsonLD(Graph=graph_data_model) + + # Get empty template + if template_type == 'property': + template = data_model_jsonld.property_template() + elif template_type == 'class': + template = data_model_jsonld.class_template() + + # Fill out template for given node. + object_template = data_model_jsonld.create_object(template=template, node='Patient') + + if 'sms:required' in object_template: + assert 'sms' in object_template['sms:required'] + if '@id' in object_template: + assert 'bts' in object_template['@id'] return def test_clean_template(self): return From bd5b45051b806fa5b0ee2360652ab17e0e392bda Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 11 Oct 2023 14:07:15 -0700 Subject: [PATCH 136/239] add test for test_clean_template --- tests/test_schemas.py | 35 ++++++++++++++++++++++++++++++++--- 1 file changed, 32 insertions(+), 3 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 2f73c2c33..8d3019aa8 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -487,9 +487,38 @@ def test_add_contexts_to_entries(self, helpers, data_model, template_type): assert 'sms' in object_template['sms:required'] if '@id' in object_template: assert 'bts' in object_template['@id'] - return - def test_clean_template(self): - return + + @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) + #@pytest.mark.parametrize("node", ['Patient', 'CheckURL'], ids=['Patient', 'CheckURL']) + def test_clean_template(self, helpers, data_model): + # TODO: This will need to change with contexts bc they are hard coded here. + # Get Graph + graph_data_model = generate_graph_data_model(helpers, data_model_name=data_model) + + # Instantiate DataModelJsonLD + data_model_jsonld = DataModelJsonLD(Graph=graph_data_model) + + # Get empty template + template = data_model_jsonld.class_template() + assert 'sms:requiresDependency' in template + + # Fill out some mock entries in the template: + template['@id'] == 'bts:CheckURL' + template['rdfs:label'] == 'CheckURL' + data_model_relationships=data_model_jsonld.dmr.relationships_dictionary + + # Clean template + data_model_jsonld.clean_template(template=template, data_model_relationships=data_model_relationships) + + # Look for expected changes after cleaning + + # Check that expected JSONLD default is added + assert template['sms:required'] == 'sms:false' + assert template['sms:validationRules'] == [] + + # Check that non-required JSONLD keys are removed. + assert 'sms:requiresDependency' not in template + def test_strip_context(self): return def test_reorder_template_entries(self): From 5158b86f039a4c0086fe6b3d48914021ceeae33e Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 11 Oct 2023 14:19:27 -0700 Subject: [PATCH 137/239] add test for test_strip_context --- tests/test_schemas.py | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 8d3019aa8..5f15f3079 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -489,7 +489,6 @@ def test_add_contexts_to_entries(self, helpers, data_model, template_type): assert 'bts' in object_template['@id'] @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) - #@pytest.mark.parametrize("node", ['Patient', 'CheckURL'], ids=['Patient', 'CheckURL']) def test_clean_template(self, helpers, data_model): # TODO: This will need to change with contexts bc they are hard coded here. # Get Graph @@ -518,9 +517,22 @@ def test_clean_template(self, helpers, data_model): # Check that non-required JSONLD keys are removed. assert 'sms:requiresDependency' not in template - - def test_strip_context(self): - return + + @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) + @pytest.mark.parametrize("context_value", ['@id', 'sms:required'], ids=['remove_at', 'remove_sms']) + def test_strip_context(self, helpers, data_model, context_value): + # Get Graph + graph_data_model = generate_graph_data_model(helpers, data_model_name=data_model) + + # Instantiate DataModelJsonLD + data_model_jsonld = DataModelJsonLD(Graph=graph_data_model) + + stripped_contex = data_model_jsonld.strip_context(context_value=context_value) + if '@id' == context_value: + assert stripped_contex == ('', 'id') + elif 'sms:required' == context_value: + assert stripped_contex == ('sms', 'required') + def test_reorder_template_entries(self): return def test_property_template(self): From 3f66bfc85cace40b2e84280b53369022d2781952 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 11 Oct 2023 15:07:13 -0700 Subject: [PATCH 138/239] add test for test_reorder_template_entries --- tests/test_schemas.py | 29 +++++++++++++++++++++++++++-- 1 file changed, 27 insertions(+), 2 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 5f15f3079..cc5a493c9 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -533,8 +533,33 @@ def test_strip_context(self, helpers, data_model, context_value): elif 'sms:required' == context_value: assert stripped_contex == ('sms', 'required') - def test_reorder_template_entries(self): - return + @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) + @pytest.mark.parametrize("valid_values", [[], ['Other', 'Female', 'Male'], ['A', 'Bad', 'Entry']], ids=['Empty List', 'Disordered List', 'Incorrect List']) + def test_reorder_template_entries(self, helpers, data_model, valid_values): + # Note the way test_reorder_template_entries works, is that as long as an entry has recordings in the template + # even if they are incorrect, they will be corrected within this function. + # Get Graph + graph_data_model = generate_graph_data_model(helpers, data_model_name=data_model) + + # Instantiate DataModelJsonLD + data_model_jsonld = DataModelJsonLD(Graph=graph_data_model) + + # Get empty template + template = data_model_jsonld.class_template() + + # Fill out template with some 'Sex' information + template['@id'] = 'Sex' + template['rdfs:label'] = 'Sex' + template['sms:required'] = 'sms:false' + template['schema:rangeIncludes'] = valid_values + + # Now reorder: + data_model_jsonld.reorder_template_entries(template=template) + if valid_values: + assert template['schema:rangeIncludes'] == [{'@id': 'bts:Female'}, {'@id': 'bts:Male'}, {'@id': 'bts:Other'}] + else: + assert template['schema:rangeIncludes'] == [] + def test_property_template(self): return def test_class_template(self): From 77a38377705d2940cedf9db03d12c289f32175bd Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 11 Oct 2023 15:11:32 -0700 Subject: [PATCH 139/239] add test for test_property_template --- tests/test_schemas.py | 27 +++++++++++++++++++++++++-- 1 file changed, 25 insertions(+), 2 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index cc5a493c9..162e9a3ac 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -560,8 +560,31 @@ def test_reorder_template_entries(self, helpers, data_model, valid_values): else: assert template['schema:rangeIncludes'] == [] - def test_property_template(self): - return + @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) + def test_property_template(self, helpers, data_model): + # Get Graph + graph_data_model = generate_graph_data_model(helpers, data_model_name=data_model) + + # Instantiate DataModelJsonLD + data_model_jsonld = DataModelJsonLD(Graph=graph_data_model) + + # Get Property Template + property_template = data_model_jsonld.property_template() + + expected_property_template = { + "@id": "", + "@type": "rdf:Property", + "rdfs:comment": "", + "rdfs:label": "", + "schema:domainIncludes": [], + "schema:rangeIncludes": [], + "schema:isPartOf": {}, + "sms:displayName": "", + "sms:required": "sms:false", + "sms:validationRules": [], + } + assert property_template == expected_property_template + def test_class_template(self): return def test_generate_jsonld_object(self): From 78cf6e78352600534b5b6ab518721b4fc7e30c91 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 11 Oct 2023 15:16:07 -0700 Subject: [PATCH 140/239] add tests for test_class_template --- tests/test_schemas.py | 29 +++++++++++++++++++++++++++-- 1 file changed, 27 insertions(+), 2 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 162e9a3ac..83077005b 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -585,8 +585,33 @@ def test_property_template(self, helpers, data_model): } assert property_template == expected_property_template - def test_class_template(self): - return + @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) + def test_class_template(self, helpers, data_model): + # Get Graph + graph_data_model = generate_graph_data_model(helpers, data_model_name=data_model) + + # Instantiate DataModelJsonLD + data_model_jsonld = DataModelJsonLD(Graph=graph_data_model) + + # Get Class Template + class_template = data_model_jsonld.class_template() + + expected_class_template = { + "@id": "", + "@type": "rdfs:Class", + "rdfs:comment": "", + "rdfs:label": "", + "rdfs:subClassOf": [], + "schema:isPartOf": {}, + "schema:rangeIncludes": [], + "sms:displayName": "", + "sms:required": "sms:false", + "sms:requiresDependency": [], + "sms:requiresComponent": [], + "sms:validationRules": [], + } + assert class_template == expected_class_template + def test_generate_jsonld_object(self): return def test_convert_graph_to_jsonld(self): From 8cd9c60b4cf3d2f8e16c2948380116d19e1ac89b Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 11 Oct 2023 15:45:16 -0700 Subject: [PATCH 141/239] add tests for test_generate_jsonld_object --- tests/test_schemas.py | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 83077005b..4b06c5969 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -612,8 +612,20 @@ def test_class_template(self, helpers, data_model): } assert class_template == expected_class_template - def test_generate_jsonld_object(self): - return + @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) + def test_generate_jsonld_object(self, helpers, data_model): + # Check that JSONLD object is being made, and has some populated entries. + + # Get Graph + graph_data_model = generate_graph_data_model(helpers, data_model_name=data_model) + + # Instantiate DataModelJsonLD + data_model_jsonld = DataModelJsonLD(Graph=graph_data_model) + jsonld_dm = data_model_jsonld.generate_jsonld_object() + + assert list(jsonld_dm.keys()) == ['@context', '@graph', '@id'] + assert len(jsonld_dm['@graph']) > 1 + def test_convert_graph_to_jsonld(self): return class TestSchemas: From bf8b80c89d59a57e4a01f719d11691b46ee82769 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 11 Oct 2023 15:54:02 -0700 Subject: [PATCH 142/239] add tests for test_convert_graph_to_jsonld --- tests/test_schemas.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 4b06c5969..8ec2181ec 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -12,7 +12,7 @@ from schematic.schemas.data_model_edges import DataModelEdges from schematic.schemas.data_model_graph import DataModelGraphExplorer from schematic.schemas.data_model_relationships import DataModelRelationships -from schematic.schemas.data_model_jsonld import DataModelJsonLD +from schematic.schemas.data_model_jsonld import DataModelJsonLD, convert_graph_to_jsonld from schematic.schemas.data_model_json_schema import DataModelJSONSchema from schematic.schemas.data_model_parser import DataModelParser, DataModelCSVParser, DataModelJSONLDParser @@ -626,8 +626,16 @@ def test_generate_jsonld_object(self, helpers, data_model): assert list(jsonld_dm.keys()) == ['@context', '@graph', '@id'] assert len(jsonld_dm['@graph']) > 1 - def test_convert_graph_to_jsonld(self): - return + @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) + def test_convert_graph_to_jsonld(self, helpers, data_model): + # Get Graph + graph_data_model = generate_graph_data_model(helpers, data_model_name=data_model) + + # Generate JSONLD + jsonld_dm = convert_graph_to_jsonld(Graph=graph_data_model) + assert list(jsonld_dm.keys()) == ['@context', '@graph', '@id'] + assert len(jsonld_dm['@graph']) > 1 + class TestSchemas: def test_convert_csv_to_graph(self, helpers): return From 7fd08b44ce6ecae6940bb6cdd0ad8d023462b7be Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 11 Oct 2023 16:05:58 -0700 Subject: [PATCH 143/239] add nx import --- tests/test_schemas.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 8ec2181ec..0ef15d45b 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -1,5 +1,6 @@ import os import logging +import networkx as nx import numpy as np import pandas as pd import pytest From 34097008c9dc99889c5babf267b2c36262994b51 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 11 Oct 2023 16:31:03 -0700 Subject: [PATCH 144/239] add data model dict and change assert statements --- tests/test_schemas.py | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 8beb9789e..f475d4f82 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -17,6 +17,12 @@ logging.basicConfig(level=logging.DEBUG) logger = logging.getLogger(__name__) +ogger = logging.getLogger(__name__) + +DATA_MODEL_DICT = { + 'example.model.csv': "CSV", + 'example.model.jsonld': "JSONLD" + } def generate_graph_data_model(helpers, data_model_name): """ @@ -63,7 +69,7 @@ def test_get_base_schema_path(self, helpers): assert os.path.basename(biothings_path) == "biothings.model.jsonld" - @pytest.mark.parametrize("data_model", ['example.model.csv', 'example.model.jsonld'], ids=["csv", "jsonld"]) + @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) def test_get_model_type(self, helpers, data_model): # Instantiate Data model parser. data_model_parser = helpers.get_data_model_parser(data_model_name=data_model) @@ -72,12 +78,13 @@ def test_get_model_type(self, helpers, data_model): assert (data_model == 'example.model.csv') == (data_model_parser.model_type == 'CSV') assert (data_model == 'example.model.jsonld') == (data_model_parser.model_type == 'JSONLD') - @pytest.mark.parametrize("data_model", ['example.model.csv', 'example.model.jsonld'], ids=["csv", "jsonld"]) + @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) def test_parse_model(self, helpers, data_model): '''Test that the correct parser is called and that a dictionary is returned in the expected structure. ''' # Instantiate Data model parser. data_model_parser = helpers.get_data_model_parser(data_model_name=data_model) + # Parse Model model_dict = data_model_parser.parse_model() @@ -85,10 +92,10 @@ def test_parse_model(self, helpers, data_model): attribute_key = list(model_dict.keys())[0] # Check that the structure of the model dictionary conforms to expectations. - assert True == (type(model_dict) == dict) - assert True == (attribute_key in model_dict.keys()) - assert True == ('Relationships' in model_dict[attribute_key]) - assert True == ('Attribute' in model_dict[attribute_key]['Relationships']) + assert type(model_dict) == dict + assert attribute_key in model_dict.keys() + assert 'Relationships' in model_dict[attribute_key] + assert 'Attribute' in model_dict[attribute_key]['Relationships'] class TestDataModelCsvParser: def test_check_schema_definition(self): From 99c57dddef865a677bef34458ea21be276c950ca Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Thu, 12 Oct 2023 10:38:23 -0700 Subject: [PATCH 145/239] add comments about the state of the graph --- schematic/schemas/data_model_edges.py | 2 +- schematic/schemas/data_model_graph.py | 8 ++++---- schematic/schemas/data_model_jsonld.py | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/schematic/schemas/data_model_edges.py b/schematic/schemas/data_model_edges.py index 9680e5e1b..2e83f9dc6 100644 --- a/schematic/schemas/data_model_edges.py +++ b/schematic/schemas/data_model_edges.py @@ -12,7 +12,7 @@ def __init__(self): def generate_edge(self, G: nx.MultiDiGraph, node: str, all_node_dict: dict, attr_rel_dict: dict, edge_relationships: dict) -> nx.MultiDiGraph: """Generate an edge between a target node and relevant other nodes the data model. In short, does this current node belong to a recorded relationship in the attribute, relationshps dictionary. Go through each attribute and relationship to find where the node may be. Args: - G, nx.MultiDiGraph: networkx graph representation of the data model, that is in the process of being fully built. + G, nx.MultiDiGraph: networkx graph representation of the data model, that is in the process of being fully built. At this point, all the nodes would have been added, and edges are being added per target node. node, str: target node to look for connecting edges all_node_dict, dict: a dictionary containing information about all nodes in the model key: node display name diff --git a/schematic/schemas/data_model_graph.py b/schematic/schemas/data_model_graph.py index 0e5a9d408..d3664d51c 100644 --- a/schematic/schemas/data_model_graph.py +++ b/schematic/schemas/data_model_graph.py @@ -62,7 +62,7 @@ def __call__(cls, *args, **kwargs): class DataModelGraph(): ''' Generate graph network (networkx) from the attributes and relationships returned - fromt the data model parser. + from the data model parser. Create a singleton. ''' @@ -91,7 +91,7 @@ def __init__(self, attribute_relationships_dict: dict) -> None: def generate_data_model_graph(self) -> nx.MultiDiGraph: - '''Generate NetworkX Graph from the Relationships/attributes dictionary + '''Generate NetworkX Graph from the Relationships/attributes dictionary, the graph is built by first adding all nodes to the graph, then connecting nodes by the relationships defined in the attributes_relationship dictionary. Returns: G: nx.MultiDiGraph, networkx graph representation of the data model ''' @@ -114,7 +114,7 @@ def generate_data_model_graph(self) -> nx.MultiDiGraph: # Add each node to the all_node_dict to be used for generating edges all_node_dict[node] = node_dict - # Generate node and attach information + # Generate node and attach information (attributes) to each node G = self.dmn.generate_node(G, node_dict) ## Connect nodes via edges @@ -130,7 +130,7 @@ def __init__(self, Args: G: nx.MultiDiGraph, networkx graph representation of the data model ''' - self.graph = G + self.graph = G #At this point the graph is expected to be fully formed. self.dmr = DataModelRelationships() self.rel_dict = self.dmr.relationships_dictionary diff --git a/schematic/schemas/data_model_jsonld.py b/schematic/schemas/data_model_jsonld.py index 2b4b2b19d..4b5bc2447 100644 --- a/schematic/schemas/data_model_jsonld.py +++ b/schematic/schemas/data_model_jsonld.py @@ -14,7 +14,7 @@ class DataModelJsonLD(object): def __init__(self, Graph: nx.MultiDiGraph, output_path:str = ''): # Setup - self.graph = Graph + self.graph = Graph # Graph would be fully made at this point. self.dmr = DataModelRelationships() self.rel_dict = self.dmr.relationships_dictionary self.DME = DataModelGraphExplorer(self.graph) From d3d89b55e599c1b8a66bb0b30f93665fd5f21349 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Thu, 12 Oct 2023 11:00:33 -0700 Subject: [PATCH 146/239] remove unused imports --- schematic/models/metadata.py | 13 ++----------- 1 file changed, 2 insertions(+), 11 deletions(-) diff --git a/schematic/models/metadata.py b/schematic/models/metadata.py index 6aa75821b..748fdfab3 100644 --- a/schematic/models/metadata.py +++ b/schematic/models/metadata.py @@ -1,18 +1,11 @@ -import json import logging -import string - -import numpy as np -import pandas as pd -import re import networkx as nx -from jsonschema import Draft7Validator, exceptions, validate, ValidationError, FormatError +from jsonschema import ValidationError from os.path import exists # allows specifying explicit variable types from typing import Any, Dict, Optional, Text, List - from schematic.manifest.generator import ManifestGenerator from schematic.schemas.data_model_graph import DataModelGraph, DataModelGraphExplorer from schematic.schemas.data_model_parser import DataModelParser @@ -25,10 +18,8 @@ from schematic.utils.df_utils import load_df -#from schematic.models.validate_attribute import ValidateAttribute #looks unused. from schematic.models.validate_manifest import validate_all - logger = logging.getLogger(__name__) @@ -309,7 +300,7 @@ def submit_metadata_manifest( access_token: str = None, project_scope: List = None, table_manipulation: str = 'replace' - ) -> string: + ) -> str: """Wrap methods that are responsible for validation of manifests for a given component, and association of the same manifest file with a specified dataset. Args: From 4ce312b2daa749f94993f074d7af7cd433e151b3 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Thu, 12 Oct 2023 11:16:02 -0700 Subject: [PATCH 147/239] change key to rel_key to be more descriptive --- schematic/schemas/data_model_edges.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/schematic/schemas/data_model_edges.py b/schematic/schemas/data_model_edges.py index 2e83f9dc6..5940c029d 100644 --- a/schematic/schemas/data_model_edges.py +++ b/schematic/schemas/data_model_edges.py @@ -21,7 +21,7 @@ def generate_edge(self, G: nx.MultiDiGraph, node: str, all_node_dict: dict, attr {Attribute Display Name: { Relationships: { CSV Header: Value}}} - edge_relationships: dict, key: csv_header if the key represents a value relationship. + edge_relationships: dict, rel_key: csv_header if the key represents a value relationship. Returns: G, nx.MultiDiGraph: networkx graph representation of the data model, that has had new edges attached. @@ -31,7 +31,7 @@ def generate_edge(self, G: nx.MultiDiGraph, node: str, all_node_dict: dict, attr # Get the relationships associated with the current attribute relationships = relationship['Relationships'] # Add edge relationships one at a time - for key, csv_header in edge_relationships.items(): + for rel_key, csv_header in edge_relationships.items(): # If the attribute has a relationship that matches the current edge being added if csv_header in relationships.keys(): # If the current node is part of that relationship and is not the current node @@ -39,7 +39,7 @@ def generate_edge(self, G: nx.MultiDiGraph, node: str, all_node_dict: dict, attr if node in relationships[csv_header] and node != attribute_display_name: # Generate weights based on relationship type. # Weights will allow us to preserve the order of entries order in the data model in later steps. - if key == 'domainIncludes': + if rel_key == 'domainIncludes': # For 'domainIncludes'/properties relationship, users do not explicitly provide a list order (like for valid values, or dependsOn) # so we pull the order/weight from the order of the attributes. weight = list(attr_rel_dict.keys()).index(attribute_display_name) @@ -50,15 +50,15 @@ def generate_edge(self, G: nx.MultiDiGraph, node: str, all_node_dict: dict, attr # For single (non list) entries, add weight of 0 weight = 0 # Get the edge_key for the edge relationship we are adding at this step - edge_key = self.data_model_relationships[key]['edge_key'] + edge_key = self.data_model_relationships[rel_key]['edge_key'] # Add edges, in a manner that preserves directionality # TODO: rewrite to use edge_dir - if key in ['subClassOf', 'domainIncludes']: + if rel_key in ['subClassOf', 'domainIncludes']: G.add_edge(all_node_dict[node]['label'], all_node_dict[attribute_display_name]['label'], key=edge_key, weight=weight) else: G.add_edge(all_node_dict[attribute_display_name]['label'], all_node_dict[node]['label'], key=edge_key, weight=weight) # Add add rangeIncludes/valid value relationships in reverse as well, making the attribute the parent of the valid value. - if key == 'rangeIncludes': + if rel_key == 'rangeIncludes': G.add_edge(all_node_dict[attribute_display_name]['label'], all_node_dict[node]['label'], key='parentOf', weight=weight) return G From 54154974e0429e9cab878e543b7f16705b220672 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Thu, 12 Oct 2023 11:52:13 -0700 Subject: [PATCH 148/239] update typing and comments --- schematic/schemas/data_model_json_schema.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/schematic/schemas/data_model_json_schema.py b/schematic/schemas/data_model_json_schema.py index dcecc8f6f..4adba3816 100644 --- a/schematic/schemas/data_model_json_schema.py +++ b/schematic/schemas/data_model_json_schema.py @@ -14,7 +14,7 @@ def __init__(self, jsonld_path: str, graph:nx.MultiDiGraph, ): # TODO: Change jsonld_path to data_model_path (can work with CSV too) self.jsonld_path = jsonld_path - self.graph = graph + self.graph = graph # Graph would be fully made at this point. self.DME = DataModelGraphExplorer(self.graph) self.dmr = DataModelRelationships() self.rel_dict = self.dmr.relationships_dictionary @@ -47,7 +47,7 @@ def get_array_schema( def get_non_blank_schema( self, node_name: str - ) -> Dict: # can't define heterogenous Dict generic types + ) -> Dict[str, dict[str, Any]]: # can't define heterogenous Dict generic types """Get a schema rule that does not allow null or empty values. Args: @@ -81,10 +81,8 @@ def get_range_schema( return schema_node_range - def get_json_validation_schema(self, source_node: str, schema_name: str) -> Dict: + def get_json_validation_schema(self, source_node: str, schema_name: str) -> Dict[str, dict[str, Any]]: ''' - A refactor of get_json_schema_requirements() from the - schema generator. Consolidated method that aims to gather dependencies and value constraints across terms / nodes in a schema.org schema and store them in a jsonschema /JSON Schema schema. It does so for any given node in the schema.org schema (recursively) using the given node as starting point in the following manner: @@ -346,12 +344,13 @@ def get_json_validation_schema(self, source_node: str, schema_name: str) -> Dict if prefix_ext == ".model": prefix = prefix_root json_schema_log_file = f"{prefix}.{source_node}.schema.json" - + ''' + # Commenting out loggins since the JSON Schema file is not currently saved. logger.info( "The JSON schema file can be inspected by setting the following " - "nested key in the configuration: (model > input > log_location)." + "nested key in the configuration: (model > location)." ) logger.info(f"JSON schema file log stored as {json_schema_log_file}") - + ''' return json_schema \ No newline at end of file From f16055baa55e7efec00f6ffa6cb2048fa4814850 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Thu, 12 Oct 2023 12:06:14 -0700 Subject: [PATCH 149/239] updat function name and reference, also typing in data_model_nodes --- schematic/schemas/data_model_graph.py | 2 +- schematic/schemas/data_model_nodes.py | 19 ++++++------------- 2 files changed, 7 insertions(+), 14 deletions(-) diff --git a/schematic/schemas/data_model_graph.py b/schematic/schemas/data_model_graph.py index d3664d51c..140c1cbf2 100644 --- a/schematic/schemas/data_model_graph.py +++ b/schematic/schemas/data_model_graph.py @@ -99,7 +99,7 @@ def generate_data_model_graph(self) -> nx.MultiDiGraph: edge_relationships = self.dmr.define_edge_relationships() # Find all nodes - all_nodes = self.dmn.gather_all_nodes(attr_rel_dict=self.attribute_relationships_dict) + all_nodes = self.dmn.gather_all_nodes_in_model(attr_rel_dict=self.attribute_relationships_dict) # Instantiate NetworkX MultiDigraph G = nx.MultiDiGraph() diff --git a/schematic/schemas/data_model_nodes.py b/schematic/schemas/data_model_nodes.py index 8e50401ae..99c82b3b6 100644 --- a/schematic/schemas/data_model_nodes.py +++ b/schematic/schemas/data_model_nodes.py @@ -19,14 +19,9 @@ def __init__(self, attribute_relationships_dict): self.value_relationships = self.data_model_relationships.define_value_relationships() self.edge_relationships_dictionary = self.data_model_relationships.define_edge_relationships() self.properties = self.get_data_model_properties(attr_rel_dict=attribute_relationships_dict) + # retrieve a list of relationship types that will produce nodes. + self.node_relationships =list(self.edge_relationships_dictionary.values()) - ''' - def node_present(self, G, node_name): - if node_name in G.nodes(): - return True - else: - return False - ''' def gather_nodes(self, attr_info: tuple) -> list: """Take in a tuple containing attriute name and relationship dictionary, and find all nodes defined in attribute information. Args: @@ -36,8 +31,6 @@ def gather_nodes(self, attr_info: tuple) -> list: Note: Extracting nodes in this fashion ensures order is preserved. """ - # retrieve a list of relationship types that will produce nodes. - self.node_relationships =list(self.edge_relationships_dictionary.values()) # Extract attribute and relationship dictionary attribute, relationship = attr_info @@ -52,7 +45,7 @@ def gather_nodes(self, attr_info: tuple) -> list: for node in relationships[rel]]) return nodes - def gather_all_nodes(self, attr_rel_dict: dict): + def gather_all_nodes_in_model(self, attr_rel_dict: dict)->list: """Gather all nodes in the data model, in order. Args: attr_rel_dict, dict: generated in data_model_parser @@ -72,7 +65,7 @@ def gather_all_nodes(self, attr_rel_dict: dict): all_nodes = list(dict.fromkeys(all_nodes).keys()) return all_nodes - def get_rel_node_dict_info(self, relationship: str) -> tuple: + def get_rel_node_dict_info(self, relationship: str) -> Optional[tuple[str, dict]]: """For each display name get defaults for nodes. Args: relationship, str: relationship key to match. @@ -156,8 +149,8 @@ def run_rel_functions(self, rel_func:callable, node_display_name:str='', key:str else: # Raise Error if the rel_func provided is not captured. - raise KeyError(f"The function provided ({rel_func}) to define the relationship {key} is not captured in the function run_rel_functions, please update.") - return + raise ValueError(f"The function provided ({rel_func}) to define the relationship {key} is not captured in the function run_rel_functions, please update.") + def generate_node_dict(self, node_display_name: str, attr_rel_dict: dict) -> dict: """Gather information to be attached to each node. Args: From a8742c36db9ce47f3dd76b6701aac14f4e2d8817 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Thu, 12 Oct 2023 12:08:47 -0700 Subject: [PATCH 150/239] add comment about graph state --- schematic/manifest/generator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index 84c13d30f..428e56372 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -37,7 +37,7 @@ class ManifestGenerator(object): def __init__( self, path_to_json_ld: str, # JSON-LD file to be used for generating the manifest - graph: nx.MultiDiGraph, + graph: nx.MultiDiGraph, # At this point, the graph is fully formed. alphabetize_valid_values: str = 'ascending', title: str = None, # manifest sheet title root: str = None, From 9d09d805668948185c1f5e8a0b53cfee0b189934 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Thu, 12 Oct 2023 12:54:23 -0700 Subject: [PATCH 151/239] to data_model_parse, rewrite try, except, move self ref, and update docstrings --- schematic/schemas/data_model_parser.py | 41 +++++++++++++------------- 1 file changed, 20 insertions(+), 21 deletions(-) diff --git a/schematic/schemas/data_model_parser.py b/schematic/schemas/data_model_parser.py index cac6f9321..1937e72fa 100644 --- a/schematic/schemas/data_model_parser.py +++ b/schematic/schemas/data_model_parser.py @@ -57,7 +57,7 @@ def get_model_type(self, path_to_data_model: str) -> str: ''' return pathlib.Path(path_to_data_model).suffix.replace('.', '').upper() - def parse_base_model(self): + def parse_base_model(self)-> Dict: '''Parse base data model that new model could be built upon. Returns: base_model, dict: @@ -75,13 +75,15 @@ def parse_base_model(self): base_model = jsonld_parser.parse_jsonld_model(base_model_path) return base_model - def parse_model(self): + def parse_model(self)->Dict[str, dict[str, Any]]: '''Given a data model type, instantiate and call the appropriate data model parser. Returns: model_dict, dict: {Attribute Display Name: { Relationships: { CSV Header: Value}}} + Raises: + Value Error if an incorrect model type is passed. Note: in future will add base model parsing in this step too and extend new model off base model. ''' #base_model = self.parse_base_model() @@ -95,7 +97,6 @@ def parse_model(self): model_dict = jsonld_parser.parse_jsonld_model(self.path_to_data_model) else: raise ValueError(f"Schematic only accepts models of type CSV or JSONLD, you provided a model type {self.model_type}, please resubmit in the proper format.") - return model_dict class DataModelCSVParser(): @@ -110,26 +111,28 @@ def __init__( self.edge_relationships_dictionary = self.dmr.define_edge_relationships() # Load required csv headers self.required_headers = self.dmr.define_required_csv_headers() + # Get the type for each value that needs to be submitted. + # using csv_headers as keys to match required_headers/relationship_types + self.rel_val_types = {v['csv_header']:v['type']for k, v in self.rel_dict.items() if 'type' in v.keys()} def check_schema_definition(self, model_df: pd.DataFrame) -> bool: """Checks if a schema definition data frame contains the right required headers. Args: - schema_definition: a pandas dataframe containing schema definition; see example here: https://docs.google.com/spreadsheets/d/1J2brhqO4kpeHIkNytzlqrdIiRanXDr6KD2hqjOTC9hs/edit#gid=0 - Raises: Exception + model_df: a pandas dataframe containing schema definition; see example here: https://docs.google.com/spreadsheets/d/1J2brhqO4kpeHIkNytzlqrdIiRanXDr6KD2hqjOTC9hs/edit#gid=0 + Raises: Exception if model_df does not have the required headers. """ - try: - if set(self.required_headers).issubset(set(list(model_df.columns))): - return - elif "Requires" in list(model_df.columns) or "Requires Component" in list( - model_df.columns - ): - raise ValueError( - "The input CSV schema file contains the 'Requires' and/or the 'Requires " - "Component' column headers. These columns were renamed to 'DependsOn' and " - "'DependsOn Component', respectively. Switch to the new column names." - ) + if set(self.required_headers).issubset(set(list(model_df.columns))): logger.debug("Schema definition csv ready for processing!") - except: + return + elif "Requires" in list(model_df.columns) or "Requires Component" in list( + model_df.columns + ): + raise ValueError( + "The input CSV schema file contains the 'Requires' and/or the 'Requires " + "Component' column headers. These columns were renamed to 'DependsOn' and " + "'DependsOn Component', respectively. Switch to the new column names." + ) + elif not set(self.required_headers).issubset(set(list(model_df.columns))): raise ValueError( f"Schema extension headers: {set(list(model_df.columns))} " f"do not match required schema headers: {self.required_headers}" @@ -150,10 +153,6 @@ def gather_csv_attributes_relationships(self, model_df: pd.DataFrame) -> Dict: # Check csv schema follows expectations. self.check_schema_definition(model_df) - # Get the type for each value that needs to be submitted. - # using csv_headers as keys to match required_headers/relationship_types - self.rel_val_types = {v['csv_header']:v['type']for k, v in self.rel_dict.items() if 'type' in v.keys()} - # get attributes from Attribute column attributes = model_df[list(self.required_headers)].to_dict("records") From 809a8176fdc7318bf80ab2ba608b4de9a98de97e Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Thu, 12 Oct 2023 13:57:42 -0700 Subject: [PATCH 152/239] add additional functions to simplify gather_csv_attributes_relationships function --- schematic/schemas/data_model_parser.py | 47 ++++++++++++++++---------- schematic/utils/schema_utils.py | 5 ++- 2 files changed, 34 insertions(+), 18 deletions(-) diff --git a/schematic/schemas/data_model_parser.py b/schematic/schemas/data_model_parser.py index 1937e72fa..eff292bde 100644 --- a/schematic/schemas/data_model_parser.py +++ b/schematic/schemas/data_model_parser.py @@ -5,6 +5,7 @@ from schematic.utils.df_utils import load_df from schematic.utils.io_utils import load_json +from schematic.utils.schema_utils import attr_dict_template from schematic.schemas.data_model_relationships import ( DataModelRelationships @@ -139,8 +140,32 @@ def check_schema_definition(self, model_df: pd.DataFrame) -> bool: ) return + def parse_entry(self, attr:dict, relationship:str)->Any: + '''Parse attr entry baed on type + Args: + attr, dict: single row of a csv model in dict form, where only the required headers are keys. Values are the entries under each header. + relationship, str: one of the header relationships to parse the entry of. + Returns: + parsed_rel_entry, any: parsed entry for downstream processing based on the entry type. + ''' + + rel_val_type = self.rel_val_types[relationship] + # Parse entry based on type: + # If the entry should be preserved as a bool dont convert to str. + if rel_val_type == bool and type(attr[relationship]) == bool: + parsed_rel_entry = attr[relationship] + # Move strings to list if they are comma separated. Schema order is preserved. + elif rel_val_type == list: + parsed_rel_entry = attr[relationship].strip().split(',') + parsed_rel_entry = [r.strip() for r in parsed_rel_entry] + # Convert value string if dictated by rel_val_type, strip whitespace. + elif rel_val_type == str: + parsed_rel_entry = str(attr[relationship]).strip() + else: + raise ValueError("The value type recorded for this relationship, is not currently supported for CSV parsing. Please check with your DCC.") + return parsed_rel_entry - def gather_csv_attributes_relationships(self, model_df: pd.DataFrame) -> Dict: + def gather_csv_attributes_relationships(self, model_df: pd.DataFrame) -> Dict[str, dict[str, Any]]: '''Parse csv into a attributes:relationshps dictionary to be used in downstream efforts. Args: model_df: pd.DataFrame, data model that has been loaded into pandas DataFrame. @@ -161,26 +186,14 @@ def gather_csv_attributes_relationships(self, model_df: pd.DataFrame) -> Dict: attr_rel_dictionary = {} for attr in attributes: + attribute_name=attr['Attribute'] # Add attribute to dictionary - attr_rel_dictionary.update({attr['Attribute']: {'Relationships': {}, - }, - } - ) + attr_rel_dictionary.update(attr_dict_template(attribute_name)) # Fill in relationship info for each attribute. for relationship in relationship_types: - rel_val_type = self.rel_val_types[relationship] if not pd.isnull(attr[relationship]): - # Fill in relationships based on type: - if rel_val_type == bool and type(attr[relationship]) == bool: - parsed_rel_entry = attr[relationship] - # Move strings to list if they are comma separated. Schema order is preserved. - elif rel_val_type == list: - parsed_rel_entry = attr[relationship].strip().split(',') - parsed_rel_entry = [r.strip() for r in parsed_rel_entry] - # Extract string from list if necessary. - elif rel_val_type == str: - parsed_rel_entry = str(attr[relationship]).strip() - attr_rel_dictionary[attr['Attribute']]['Relationships'].update({relationship:parsed_rel_entry}) + parsed_rel_entry = self.parse_entry(attr=attr, relationship=relationship) + attr_rel_dictionary[attribute_name]['Relationships'].update({relationship:parsed_rel_entry}) return attr_rel_dictionary diff --git a/schematic/utils/schema_utils.py b/schematic/utils/schema_utils.py index 4c6c331a5..52112d7a3 100644 --- a/schematic/utils/schema_utils.py +++ b/schematic/utils/schema_utils.py @@ -2,7 +2,10 @@ import json import networkx as nx import string -from typing import List +from typing import List, Dict + +def attr_dict_template(key_name:str)->Dict[str,dict[str,dict]]: + return {key_name: {'Relationships': {}}} def get_property_label_from_display_name(display_name:str, strict_camel_case:bool = False) -> str: """Convert a given display name string into a proper property label string From 65979a03af649647b9fc4b7149e5cf6edbd72e82 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Thu, 12 Oct 2023 14:23:54 -0700 Subject: [PATCH 153/239] for DataModelJsonldParser, add some functions to simplify parsing comprehension and simplify some handling --- schematic/schemas/data_model_parser.py | 97 ++++++++++++++------------ 1 file changed, 52 insertions(+), 45 deletions(-) diff --git a/schematic/schemas/data_model_parser.py b/schematic/schemas/data_model_parser.py index eff292bde..4f77b679d 100644 --- a/schematic/schemas/data_model_parser.py +++ b/schematic/schemas/data_model_parser.py @@ -226,6 +226,36 @@ def __init__( # Load relationships dictionary. self.rel_dict = self.dmr.define_data_model_relationships() + def parse_entry(self, rel_entry:any, id_jsonld_key:str)->Any: + """Parse an input entry based on certain attributes + Args: + rel_entry: Given a single entry and relationship in a JSONLD data model, the recorded value + id_jsonld_key, str: the jsonld key for id + Returns: + parsed_rel_entry: an entry that has been parsed base on its input type and characteristics. + """ + # Retrieve ID from single value dictionary + if type(rel_entry) == dict and len(rel_entry.keys()) == 1: + parsed_rel_entry = rel_entry['@id'] + # Parse list of dictionaries to make a list of entries with context stripped (will update this section when contexts added.) + elif type(rel_entry)==list and type(rel_entry[0]) == dict: + parsed_rel_entry = [r[id_jsonld_key].split(':')[1] for r in rel_entry] + # Strip context from string and convert true/false to bool + elif type(rel_entry) == str: + # Remove contexts and treat strings as appropriate. + if ':' in rel_entry and 'http:' not in rel_entry: + parsed_rel_entry = rel_entry.split(':')[1] + # Convert true/false strings to boolean + if parsed_rel_entry.lower() =='true': + parsed_rel_entry = True + elif parsed_rel_entry.lower == 'false': + parsed_rel_entry = False + else: + parsed_rel_entry = rel_entry + # For anything else get that + else: + parsed_rel_entry = rel_entry + return parsed_rel_entry def gather_jsonld_attributes_relationships( self, model_jsonld: List[dict]) -> Dict: @@ -253,60 +283,37 @@ def gather_jsonld_attributes_relationships( jsonld_keys_to_extract = ['label', 'subClassOf', 'id', 'displayName'] label_jsonld_key, subclassof_jsonld_key, id_jsonld_key, dn_jsonld_key = [self.rel_dict[key]['jsonld_key'] for key in jsonld_keys_to_extract ] - - # Gather all labels from the model. - model_labels = [v[label_jsonld_key] for v in model_jsonld] - + # Build the attr_rel_dictionary attr_rel_dictionary = {} # Move through each entry in the jsonld model for entry in model_jsonld: - # Get the label of the entry - try: - # Get the entry display name (if recorded) - entry_name = entry[dn_jsonld_key] - except: - # If no display name, get the label. - entry_name = entry[label_jsonld_key] + + # Get the attr key for the dictionary + if dn_jsonld_key in entry: + # The attr_key is the entry display name if one was recorded + attr_key = entry[dn_jsonld_key] + else: + # If not we wil use the get the label. + attr_key = entry[label_jsonld_key] - # If the entry is an attribute that has not already been added to the dictionary, add it. + # If the entry has not already been added to the dictionary, add it. if entry_name not in attr_rel_dictionary.keys(): - attr_rel_dictionary.update({entry_name: {'Relationships': {}}}) - - # Add relationships for each attribute - # - # Go through each defined relationship type (key) and its attributes (val) - for key, val in self.rel_dict.items(): - # Determine if current entry can be defined by the current reationship. - if val['jsonld_key'] in entry.keys() and 'csv_header' in val.keys(): + attr_rel_dictionary.update(attr_dict_template(attr_key)) + + # Add relationships for each entry + # Go through each defined relationship type (rel_key) and its attributes (rel_vals) + for rel_key, rel_vals in self.rel_dict.items(): + # Determine if current entry in the for loop, can be described by the current relationship that is being cycled through. + if rel_vals['jsonld_key'] in entry.keys() and 'csv_header' in rel_vals.keys(): # Retrieve entry value associated with the given relationship - rel_entry = entry[val['jsonld_key']] - # if there is an entry treat it by type and add to the attr:relationships dictionary. + rel_entry = entry[rel_vals['jsonld_key']] + # If there is an entry parset it by type and add to the attr:relationships dictionary. if rel_entry: - # Retrieve ID from dictionary single value dictionary - if type(rel_entry) == dict and len(rel_entry.keys()) == 1: - parsed_rel_entry = entry.get(val['jsonld_key'])['@id'] - # Parse list of dictionaries to make a list of entries with context stripped (will update this section when contexts added.) - elif type(rel_entry)==list and type(rel_entry[0]) == dict: - parsed_rel_entry = [r[id_jsonld_key].split(':')[1] for r in rel_entry] - # Strip context from string and convert true/false to bool - elif type(rel_entry) == str: - # Remove contexts and treat strings as appropriate. - if ':' in rel_entry and 'http:' not in rel_entry: - parsed_rel_entry = rel_entry.split(':')[1] - # Convert true/false strings to boolean - if parsed_rel_entry.lower() =='true': - parsed_rel_entry = True - elif parsed_rel_entry.lower == 'false': - parsed_rel_entry == False - else: - parsed_rel_entry = rel_entry - # For anything else get that - else: - parsed_rel_entry = rel_entry + parsed_rel_entry = self.parse_entry(rel_entry=rel_entry, id_jsonld_key=id_jsonld_key) # Add relationships for each attribute and relationship to the dictionary attr_rel_dictionary[ - entry_name]['Relationships'].update( - {self.rel_dict[key]['csv_header']: parsed_rel_entry}) + attr_key]['Relationships'].update( + {self.rel_dict[rel_key]['csv_header']: parsed_rel_entry}) return attr_rel_dictionary def parse_jsonld_model( From d4f34d76ed7971fc74a7a635903fc74b12a76a15 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Thu, 12 Oct 2023 14:26:39 -0700 Subject: [PATCH 154/239] fix ref to entry_name, to attr_key --- schematic/schemas/data_model_parser.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic/schemas/data_model_parser.py b/schematic/schemas/data_model_parser.py index 4f77b679d..2e5aafd71 100644 --- a/schematic/schemas/data_model_parser.py +++ b/schematic/schemas/data_model_parser.py @@ -297,7 +297,7 @@ def gather_jsonld_attributes_relationships( attr_key = entry[label_jsonld_key] # If the entry has not already been added to the dictionary, add it. - if entry_name not in attr_rel_dictionary.keys(): + if attr_key not in attr_rel_dictionary.keys(): attr_rel_dictionary.update(attr_dict_template(attr_key)) # Add relationships for each entry From f815e05562e910b6957df240f814f3814a79ea34 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Thu, 12 Oct 2023 19:11:17 -0700 Subject: [PATCH 155/239] convert templates to dataclass, dataclass_json, update references, and add dataclass_json to poetry --- poetry.lock | 3992 ++++++++++++------------ pyproject.toml | 1 + schematic/schemas/data_model_jsonld.py | 129 +- 3 files changed, 2083 insertions(+), 2039 deletions(-) diff --git a/poetry.lock b/poetry.lock index d1f5b59a4..b860f9153 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,3 +1,5 @@ +# This file is automatically @generated by Poetry and should not be changed by hand. + [[package]] name = "alabaster" version = "0.7.13" @@ -5,6 +7,10 @@ description = "A configurable sidebar-enabled Sphinx theme" category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3"}, + {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, +] [[package]] name = "altair" @@ -13,6 +19,10 @@ description = "Altair: A declarative statistical visualization library for Pytho category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "altair-4.2.0-py3-none-any.whl", hash = "sha256:0c724848ae53410c13fa28be2b3b9a9dcb7b5caa1a70f7f217bd663bb419935a"}, + {file = "altair-4.2.0.tar.gz", hash = "sha256:d87d9372e63b48cd96b2a6415f0cf9457f50162ab79dc7a31cd7e024dd840026"}, +] [package.dependencies] entrypoints = "*" @@ -32,6 +42,10 @@ description = "High level compatibility layer for multiple asynchronous event lo category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "anyio-3.7.1-py3-none-any.whl", hash = "sha256:91dee416e570e92c64041bd18b900d1d6fa78dff7048769ce5ac5ddad004fbb5"}, + {file = "anyio-3.7.1.tar.gz", hash = "sha256:44a3c9aba0f5defa43261a8b3efb97891f2bd7d804e0e1f56419befa1adfc780"}, +] [package.dependencies] exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} @@ -50,6 +64,10 @@ description = "Disable App Nap on macOS >= 10.9" category = "main" optional = false python-versions = "*" +files = [ + {file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"}, + {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, +] [[package]] name = "argon2-cffi" @@ -58,6 +76,10 @@ description = "The secure Argon2 password hashing algorithm." category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "argon2-cffi-21.3.0.tar.gz", hash = "sha256:d384164d944190a7dd7ef22c6aa3ff197da12962bd04b17f64d4e93d934dba5b"}, + {file = "argon2_cffi-21.3.0-py3-none-any.whl", hash = "sha256:8c976986f2c5c0e5000919e6de187906cfd81fb1c72bf9d88c01177e77da7f80"}, +] [package.dependencies] argon2-cffi-bindings = "*" @@ -74,6 +96,29 @@ description = "Low-level CFFI bindings for Argon2" category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f"}, + {file = "argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e385d1c39c520c08b53d63300c3ecc28622f076f4c2b0e6d7e796e9f6502194"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3e3cc67fdb7d82c4718f19b4e7a87123caf8a93fde7e23cf66ac0337d3cb3f"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a22ad9800121b71099d0fb0a65323810a15f2e292f2ba450810a7316e128ee5"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9f8b450ed0547e3d473fdc8612083fd08dd2120d6ac8f73828df9b7d45bb351"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:93f9bf70084f97245ba10ee36575f0c3f1e7d7724d67d8e5b08e61787c320ed7"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3b9ef65804859d335dc6b31582cad2c5166f0c3e7975f324d9ffaa34ee7e6583"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4966ef5848d820776f5f562a7d45fdd70c2f330c961d0d745b784034bd9f48d"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ef543a89dee4db46a1a6e206cd015360e5a75822f76df533845c3cbaf72670"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2937d286e2ad0cc79a7087d3c272832865f779430e0cc2b4f3718d3159b0cb"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"}, +] [package.dependencies] cffi = ">=1.0.1" @@ -89,6 +134,10 @@ description = "Better dates & times for Python" category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "arrow-1.2.3-py3-none-any.whl", hash = "sha256:5a49ab92e3b7b71d96cd6bfcc4df14efefc9dfa96ea19045815914a6ab6b1fe2"}, + {file = "arrow-1.2.3.tar.gz", hash = "sha256:3934b30ca1b9f292376d9db15b19446088d12ec58629bc3f0da28fd55fb633a1"}, +] [package.dependencies] python-dateutil = ">=2.7.0" @@ -100,6 +149,10 @@ description = "An abstract syntax tree for Python with inference support." category = "dev" optional = false python-versions = ">=3.7.2" +files = [ + {file = "astroid-2.15.6-py3-none-any.whl", hash = "sha256:389656ca57b6108f939cf5d2f9a2a825a3be50ba9d589670f393236e0a03b91c"}, + {file = "astroid-2.15.6.tar.gz", hash = "sha256:903f024859b7c7687d7a7f3a3f73b17301f8e42dfd9cc9df9d4418172d3e2dbd"}, +] [package.dependencies] lazy-object-proxy = ">=1.4.0" @@ -113,6 +166,10 @@ description = "Annotate AST trees with source code positions" category = "main" optional = false python-versions = "*" +files = [ + {file = "asttokens-2.2.1-py2.py3-none-any.whl", hash = "sha256:6b0ac9e93fb0335014d382b8fa9b3afa7df546984258005da0b9e7095b3deb1c"}, + {file = "asttokens-2.2.1.tar.gz", hash = "sha256:4622110b2a6f30b77e1473affaa97e711bc2f07d3f10848420ff1898edbe94f3"}, +] [package.dependencies] six = "*" @@ -127,6 +184,10 @@ description = "Classes Without Boilerplate" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, + {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, +] [package.extras] cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] @@ -142,6 +203,10 @@ description = "Internationalization utilities" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "Babel-2.12.1-py3-none-any.whl", hash = "sha256:b4246fb7677d3b98f501a39d43396d3cafdc8eadb045f4a31be01863f655c610"}, + {file = "Babel-2.12.1.tar.gz", hash = "sha256:cc2d99999cd01d44420ae725a21c9e3711b3aadc7976d6147f622d8581963455"}, +] [[package]] name = "backcall" @@ -150,6 +215,10 @@ description = "Specifications for callback functions passed in to an API" category = "main" optional = false python-versions = "*" +files = [ + {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, + {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, +] [[package]] name = "beautifulsoup4" @@ -158,6 +227,10 @@ description = "Screen-scraping library" category = "main" optional = false python-versions = ">=3.6.0" +files = [ + {file = "beautifulsoup4-4.12.2-py3-none-any.whl", hash = "sha256:bd2520ca0d9d7d12694a53d44ac482d181b4ec1888909b035a3dbf40d0f57d4a"}, + {file = "beautifulsoup4-4.12.2.tar.gz", hash = "sha256:492bbc69dca35d12daac71c4db1bfff0c876c00ef4a2ffacce226d4638eb72da"}, +] [package.dependencies] soupsieve = ">1.2" @@ -173,6 +246,30 @@ description = "The uncompromising code formatter." category = "dev" optional = false python-versions = ">=3.8" +files = [ + {file = "black-23.7.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:5c4bc552ab52f6c1c506ccae05681fab58c3f72d59ae6e6639e8885e94fe2587"}, + {file = "black-23.7.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:552513d5cd5694590d7ef6f46e1767a4df9af168d449ff767b13b084c020e63f"}, + {file = "black-23.7.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:86cee259349b4448adb4ef9b204bb4467aae74a386bce85d56ba4f5dc0da27be"}, + {file = "black-23.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:501387a9edcb75d7ae8a4412bb8749900386eaef258f1aefab18adddea1936bc"}, + {file = "black-23.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb074d8b213749fa1d077d630db0d5f8cc3b2ae63587ad4116e8a436e9bbe995"}, + {file = "black-23.7.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b5b0ee6d96b345a8b420100b7d71ebfdd19fab5e8301aff48ec270042cd40ac2"}, + {file = "black-23.7.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:893695a76b140881531062d48476ebe4a48f5d1e9388177e175d76234ca247cd"}, + {file = "black-23.7.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:c333286dc3ddca6fdff74670b911cccedacb4ef0a60b34e491b8a67c833b343a"}, + {file = "black-23.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:831d8f54c3a8c8cf55f64d0422ee875eecac26f5f649fb6c1df65316b67c8926"}, + {file = "black-23.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:7f3bf2dec7d541b4619b8ce526bda74a6b0bffc480a163fed32eb8b3c9aed8ad"}, + {file = "black-23.7.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:f9062af71c59c004cd519e2fb8f5d25d39e46d3af011b41ab43b9c74e27e236f"}, + {file = "black-23.7.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:01ede61aac8c154b55f35301fac3e730baf0c9cf8120f65a9cd61a81cfb4a0c3"}, + {file = "black-23.7.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:327a8c2550ddc573b51e2c352adb88143464bb9d92c10416feb86b0f5aee5ff6"}, + {file = "black-23.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1c6022b86f83b632d06f2b02774134def5d4d4f1dac8bef16d90cda18ba28a"}, + {file = "black-23.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:27eb7a0c71604d5de083757fbdb245b1a4fae60e9596514c6ec497eb63f95320"}, + {file = "black-23.7.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:8417dbd2f57b5701492cd46edcecc4f9208dc75529bcf76c514864e48da867d9"}, + {file = "black-23.7.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:47e56d83aad53ca140da0af87678fb38e44fd6bc0af71eebab2d1f59b1acf1d3"}, + {file = "black-23.7.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:25cc308838fe71f7065df53aedd20327969d05671bac95b38fdf37ebe70ac087"}, + {file = "black-23.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:642496b675095d423f9b8448243336f8ec71c9d4d57ec17bf795b67f08132a91"}, + {file = "black-23.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:ad0014efc7acf0bd745792bd0d8857413652979200ab924fbf239062adc12491"}, + {file = "black-23.7.0-py3-none-any.whl", hash = "sha256:9fd59d418c60c0348505f2ddf9609c1e1de8e7493eab96198fc89d9f865e7a96"}, + {file = "black-23.7.0.tar.gz", hash = "sha256:022a582720b0d9480ed82576c920a8c1dde97cc38ff11d8d8859b3bd6ca9eedb"}, +] [package.dependencies] click = ">=8.0.0" @@ -196,6 +293,10 @@ description = "An easy safelist-based HTML-sanitizing tool." category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "bleach-6.0.0-py3-none-any.whl", hash = "sha256:33c16e3353dbd13028ab4799a0f89a83f113405c766e9c122df8a06f5b85b3f4"}, + {file = "bleach-6.0.0.tar.gz", hash = "sha256:1a1a85c1595e07d8db14c5f09f09e6433502c51c595970edc090551f0db99414"}, +] [package.dependencies] six = ">=1.9.0" @@ -211,6 +312,10 @@ description = "Extensible memoizing collections and decorators" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.1-py3-none-any.whl", hash = "sha256:95ef631eeaea14ba2e36f06437f36463aac3a096799e876ee55e5cdccb102590"}, + {file = "cachetools-5.3.1.tar.gz", hash = "sha256:dce83f2d9b4e1f732a8cd44af8e8fab2dbe46201467fc98b3ef8f269092bf62b"}, +] [[package]] name = "certifi" @@ -219,6 +324,10 @@ description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "certifi-2023.5.7-py3-none-any.whl", hash = "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716"}, + {file = "certifi-2023.5.7.tar.gz", hash = "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7"}, +] [[package]] name = "cffi" @@ -227,6 +336,72 @@ description = "Foreign Function Interface for Python calling C code." category = "main" optional = false python-versions = "*" +files = [ + {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, + {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, + {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, + {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, + {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, + {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, + {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, + {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, + {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, + {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, + {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, + {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, + {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, + {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, + {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, + {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, + {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, + {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, + {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, +] [package.dependencies] pycparser = "*" @@ -238,6 +413,83 @@ description = "The Real First Universal Charset Detector. Open, modern and activ category = "main" optional = false python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"}, + {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, +] [[package]] name = "click" @@ -246,6 +498,10 @@ description = "Composable command line interface toolkit" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "click-8.1.5-py3-none-any.whl", hash = "sha256:e576aa487d679441d7d30abb87e1b43d24fc53bffb8758443b1a9e1cee504548"}, + {file = "click-8.1.5.tar.gz", hash = "sha256:4be4b1af8d665c6d942909916d31a213a106800c47d0eeba73d34da3cbc11367"}, +] [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} @@ -257,6 +513,10 @@ description = "Logging integration for Click" category = "main" optional = false python-versions = "*" +files = [ + {file = "click-log-0.4.0.tar.gz", hash = "sha256:3970f8570ac54491237bcdb3d8ab5e3eef6c057df29f8c3d1151a51a9c23b975"}, + {file = "click_log-0.4.0-py2.py3-none-any.whl", hash = "sha256:a43e394b528d52112af599f2fc9e4b7cf3c15f94e53581f74fa6867e68c91756"}, +] [package.dependencies] click = "*" @@ -268,6 +528,10 @@ description = "Click utility functions" category = "main" optional = false python-versions = "*" +files = [ + {file = "clickclick-20.10.2-py2.py3-none-any.whl", hash = "sha256:c8f33e6d9ec83f68416dd2136a7950125bd256ec39ccc9a85c6e280a16be2bb5"}, + {file = "clickclick-20.10.2.tar.gz", hash = "sha256:4efb13e62353e34c5eef7ed6582c4920b418d7dedc86d819e22ee089ba01802c"}, +] [package.dependencies] click = ">=4.0" @@ -280,6 +544,10 @@ description = "Cross-platform colored terminal text." category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] [[package]] name = "comm" @@ -288,6 +556,10 @@ description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus- category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "comm-0.1.3-py3-none-any.whl", hash = "sha256:16613c6211e20223f215fc6d3b266a247b6e2641bf4e0a3ad34cb1aff2aa3f37"}, + {file = "comm-0.1.3.tar.gz", hash = "sha256:a61efa9daffcfbe66fd643ba966f846a624e4e6d6767eda9cf6e993aadaab93e"}, +] [package.dependencies] traitlets = ">=5.3" @@ -304,6 +576,10 @@ description = "Connexion - API first applications with OpenAPI/Swagger and Flask category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "connexion-2.14.2-py2.py3-none-any.whl", hash = "sha256:a73b96a0e07b16979a42cde7c7e26afe8548099e352cf350f80c57185e0e0b36"}, + {file = "connexion-2.14.2.tar.gz", hash = "sha256:dbc06f52ebeebcf045c9904d570f24377e8bbd5a6521caef15a06f634cf85646"}, +] [package.dependencies] clickclick = ">=1.2,<21" @@ -331,13 +607,75 @@ description = "Code coverage measurement for Python" category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"}, + {file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495"}, + {file = "coverage-7.2.7-cp310-cp310-win32.whl", hash = "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818"}, + {file = "coverage-7.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850"}, + {file = "coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f"}, + {file = "coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a"}, + {file = "coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a"}, + {file = "coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562"}, + {file = "coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d"}, + {file = "coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511"}, + {file = "coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3"}, + {file = "coverage-7.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02"}, + {file = "coverage-7.2.7-cp37-cp37m-win32.whl", hash = "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f"}, + {file = "coverage-7.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0"}, + {file = "coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5"}, + {file = "coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f"}, + {file = "coverage-7.2.7-cp38-cp38-win32.whl", hash = "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e"}, + {file = "coverage-7.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c"}, + {file = "coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9"}, + {file = "coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2"}, + {file = "coverage-7.2.7-cp39-cp39-win32.whl", hash = "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb"}, + {file = "coverage-7.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27"}, + {file = "coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d"}, + {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"}, +] + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli"] -[package.dependencies] -tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} - -[package.extras] -toml = ["tomli"] - [[package]] name = "cryptography" version = "41.0.2" @@ -345,6 +683,31 @@ description = "cryptography is a package which provides cryptographic recipes an category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "cryptography-41.0.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:01f1d9e537f9a15b037d5d9ee442b8c22e3ae11ce65ea1f3316a41c78756b711"}, + {file = "cryptography-41.0.2-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:079347de771f9282fbfe0e0236c716686950c19dee1b76240ab09ce1624d76d7"}, + {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:439c3cc4c0d42fa999b83ded80a9a1fb54d53c58d6e59234cfe97f241e6c781d"}, + {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f14ad275364c8b4e525d018f6716537ae7b6d369c094805cae45300847e0894f"}, + {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:84609ade00a6ec59a89729e87a503c6e36af98ddcd566d5f3be52e29ba993182"}, + {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:49c3222bb8f8e800aead2e376cbef687bc9e3cb9b58b29a261210456a7783d83"}, + {file = "cryptography-41.0.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:d73f419a56d74fef257955f51b18d046f3506270a5fd2ac5febbfa259d6c0fa5"}, + {file = "cryptography-41.0.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:2a034bf7d9ca894720f2ec1d8b7b5832d7e363571828037f9e0c4f18c1b58a58"}, + {file = "cryptography-41.0.2-cp37-abi3-win32.whl", hash = "sha256:d124682c7a23c9764e54ca9ab5b308b14b18eba02722b8659fb238546de83a76"}, + {file = "cryptography-41.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:9c3fe6534d59d071ee82081ca3d71eed3210f76ebd0361798c74abc2bcf347d4"}, + {file = "cryptography-41.0.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a719399b99377b218dac6cf547b6ec54e6ef20207b6165126a280b0ce97e0d2a"}, + {file = "cryptography-41.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:182be4171f9332b6741ee818ec27daff9fb00349f706629f5cbf417bd50e66fd"}, + {file = "cryptography-41.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7a9a3bced53b7f09da251685224d6a260c3cb291768f54954e28f03ef14e3766"}, + {file = "cryptography-41.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f0dc40e6f7aa37af01aba07277d3d64d5a03dc66d682097541ec4da03cc140ee"}, + {file = "cryptography-41.0.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:674b669d5daa64206c38e507808aae49904c988fa0a71c935e7006a3e1e83831"}, + {file = "cryptography-41.0.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7af244b012711a26196450d34f483357e42aeddb04128885d95a69bd8b14b69b"}, + {file = "cryptography-41.0.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9b6d717393dbae53d4e52684ef4f022444fc1cce3c48c38cb74fca29e1f08eaa"}, + {file = "cryptography-41.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:192255f539d7a89f2102d07d7375b1e0a81f7478925b3bc2e0549ebf739dae0e"}, + {file = "cryptography-41.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f772610fe364372de33d76edcd313636a25684edb94cee53fd790195f5989d14"}, + {file = "cryptography-41.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b332cba64d99a70c1e0836902720887fb4529ea49ea7f5462cf6640e095e11d2"}, + {file = "cryptography-41.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9a6673c1828db6270b76b22cc696f40cde9043eb90373da5c2f8f2158957f42f"}, + {file = "cryptography-41.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:342f3767e25876751e14f8459ad85e77e660537ca0a066e10e75df9c9e9099f0"}, + {file = "cryptography-41.0.2.tar.gz", hash = "sha256:7d230bf856164de164ecb615ccc14c7fc6de6906ddd5b491f3af90d3514c925c"}, +] [package.dependencies] cffi = ">=1.12" @@ -359,6 +722,22 @@ ssh = ["bcrypt (>=3.1.5)"] test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] +[[package]] +name = "dataclasses-json" +version = "0.6.1" +description = "Easily serialize dataclasses to and from JSON." +category = "main" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "dataclasses_json-0.6.1-py3-none-any.whl", hash = "sha256:1bd8418a61fe3d588bb0079214d7fb71d44937da40742b787256fd53b26b6c80"}, + {file = "dataclasses_json-0.6.1.tar.gz", hash = "sha256:a53c220c35134ce08211a1057fd0e5bf76dc5331627c6b241cacbc570a89faae"}, +] + +[package.dependencies] +marshmallow = ">=3.18.0,<4.0.0" +typing-inspect = ">=0.4.0,<1" + [[package]] name = "dateparser" version = "1.1.8" @@ -366,6 +745,10 @@ description = "Date parsing library designed to parse dates from HTML pages" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "dateparser-1.1.8-py2.py3-none-any.whl", hash = "sha256:070b29b5bbf4b1ec2cd51c96ea040dc68a614de703910a91ad1abba18f9f379f"}, + {file = "dateparser-1.1.8.tar.gz", hash = "sha256:86b8b7517efcc558f085a142cdb7620f0921543fcabdb538c8a4c4001d8178e3"}, +] [package.dependencies] python-dateutil = "*" @@ -385,6 +768,26 @@ description = "An implementation of the Debug Adapter Protocol for Python" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "debugpy-1.6.7-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b3e7ac809b991006ad7f857f016fa92014445085711ef111fdc3f74f66144096"}, + {file = "debugpy-1.6.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3876611d114a18aafef6383695dfc3f1217c98a9168c1aaf1a02b01ec7d8d1e"}, + {file = "debugpy-1.6.7-cp310-cp310-win32.whl", hash = "sha256:33edb4afa85c098c24cc361d72ba7c21bb92f501104514d4ffec1fb36e09c01a"}, + {file = "debugpy-1.6.7-cp310-cp310-win_amd64.whl", hash = "sha256:ed6d5413474e209ba50b1a75b2d9eecf64d41e6e4501977991cdc755dc83ab0f"}, + {file = "debugpy-1.6.7-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:38ed626353e7c63f4b11efad659be04c23de2b0d15efff77b60e4740ea685d07"}, + {file = "debugpy-1.6.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:279d64c408c60431c8ee832dfd9ace7c396984fd7341fa3116aee414e7dcd88d"}, + {file = "debugpy-1.6.7-cp37-cp37m-win32.whl", hash = "sha256:dbe04e7568aa69361a5b4c47b4493d5680bfa3a911d1e105fbea1b1f23f3eb45"}, + {file = "debugpy-1.6.7-cp37-cp37m-win_amd64.whl", hash = "sha256:f90a2d4ad9a035cee7331c06a4cf2245e38bd7c89554fe3b616d90ab8aab89cc"}, + {file = "debugpy-1.6.7-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:5224eabbbeddcf1943d4e2821876f3e5d7d383f27390b82da5d9558fd4eb30a9"}, + {file = "debugpy-1.6.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bae1123dff5bfe548ba1683eb972329ba6d646c3a80e6b4c06cd1b1dd0205e9b"}, + {file = "debugpy-1.6.7-cp38-cp38-win32.whl", hash = "sha256:9cd10cf338e0907fdcf9eac9087faa30f150ef5445af5a545d307055141dd7a4"}, + {file = "debugpy-1.6.7-cp38-cp38-win_amd64.whl", hash = "sha256:aaf6da50377ff4056c8ed470da24632b42e4087bc826845daad7af211e00faad"}, + {file = "debugpy-1.6.7-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:0679b7e1e3523bd7d7869447ec67b59728675aadfc038550a63a362b63029d2c"}, + {file = "debugpy-1.6.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de86029696e1b3b4d0d49076b9eba606c226e33ae312a57a46dca14ff370894d"}, + {file = "debugpy-1.6.7-cp39-cp39-win32.whl", hash = "sha256:d71b31117779d9a90b745720c0eab54ae1da76d5b38c8026c654f4a066b0130a"}, + {file = "debugpy-1.6.7-cp39-cp39-win_amd64.whl", hash = "sha256:c0ff93ae90a03b06d85b2c529eca51ab15457868a377c4cc40a23ab0e4e552a3"}, + {file = "debugpy-1.6.7-py2.py3-none-any.whl", hash = "sha256:53f7a456bc50706a0eaabecf2d3ce44c4d5010e46dfc65b6b81a518b42866267"}, + {file = "debugpy-1.6.7.zip", hash = "sha256:c4c2f0810fa25323abfdfa36cbbbb24e5c3b1a42cb762782de64439c575d67f2"}, +] [[package]] name = "decorator" @@ -393,6 +796,10 @@ description = "Decorators for Humans" category = "main" optional = false python-versions = ">=3.5" +files = [ + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, +] [[package]] name = "defusedxml" @@ -401,6 +808,10 @@ description = "XML bomb protection for Python stdlib modules" category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, + {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, +] [[package]] name = "deprecated" @@ -409,6 +820,10 @@ description = "Python @deprecated decorator to deprecate old python classes, fun category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] [package.dependencies] wrapt = ">=1.10,<2" @@ -423,6 +838,10 @@ description = "A library to handle automated deprecations" category = "main" optional = false python-versions = "*" +files = [ + {file = "deprecation-2.1.0-py2.py3-none-any.whl", hash = "sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a"}, + {file = "deprecation-2.1.0.tar.gz", hash = "sha256:72b3bde64e5d778694b0cf68178aed03d15e15477116add3fb773e581f9518ff"}, +] [package.dependencies] packaging = "*" @@ -434,6 +853,10 @@ description = "serialize all of python" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "dill-0.3.6-py3-none-any.whl", hash = "sha256:a07ffd2351b8c678dfc4a856a3005f8067aea51d6ba6c700796a4d9e280f39f0"}, + {file = "dill-0.3.6.tar.gz", hash = "sha256:e5db55f3687856d8fbdab002ed78544e1c4559a130302693d839dfe8f93f2373"}, +] [package.extras] graph = ["objgraph (>=1.7.2)"] @@ -445,6 +868,10 @@ description = "Docutils -- Python Documentation Utilities" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "docutils-0.20.1-py3-none-any.whl", hash = "sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6"}, + {file = "docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"}, +] [[package]] name = "entrypoints" @@ -453,6 +880,10 @@ description = "Discover and load entry points from installed packages." category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "entrypoints-0.4-py3-none-any.whl", hash = "sha256:f174b5ff827504fd3cd97cc3f8649f3693f51538c7e4bdf3ef002c8429d42f9f"}, + {file = "entrypoints-0.4.tar.gz", hash = "sha256:b706eddaa9218a19ebcd67b56818f05bb27589b1ca9e8d797b74affad4ccacd4"}, +] [[package]] name = "et-xmlfile" @@ -461,6 +892,10 @@ description = "An implementation of lxml.xmlfile for the standard library" category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "et_xmlfile-1.1.0-py3-none-any.whl", hash = "sha256:a2ba85d1d6a74ef63837eed693bcb89c3f752169b0e3e7ae5b16ca5e1b3deada"}, + {file = "et_xmlfile-1.1.0.tar.gz", hash = "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c"}, +] [[package]] name = "exceptiongroup" @@ -469,6 +904,10 @@ description = "Backport of PEP 654 (exception groups)" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.1.2-py3-none-any.whl", hash = "sha256:e346e69d186172ca7cf029c8c1d16235aa0e04035e5750b4b95039e65204328f"}, + {file = "exceptiongroup-1.1.2.tar.gz", hash = "sha256:12c3e887d6485d16943a309616de20ae5582633e0a2eda17f4e10fd61c1e8af5"}, +] [package.extras] test = ["pytest (>=6)"] @@ -480,6 +919,10 @@ description = "Get the currently executing AST node of a frame, and other inform category = "main" optional = false python-versions = "*" +files = [ + {file = "executing-1.2.0-py2.py3-none-any.whl", hash = "sha256:0314a69e37426e3608aada02473b4161d4caf5a4b244d1d0c48072b8fee7bacc"}, + {file = "executing-1.2.0.tar.gz", hash = "sha256:19da64c18d2d851112f09c287f8d3dbbdf725ab0e569077efb6cdcbd3497c107"}, +] [package.extras] tests = ["asttokens", "littleutils", "pytest", "rich"] @@ -491,6 +934,10 @@ description = "Fastest Python implementation of JSON schema" category = "main" optional = false python-versions = "*" +files = [ + {file = "fastjsonschema-2.17.1-py3-none-any.whl", hash = "sha256:4b90b252628ca695280924d863fe37234eebadc29c5360d322571233dc9746e0"}, + {file = "fastjsonschema-2.17.1.tar.gz", hash = "sha256:f4eeb8a77cef54861dbf7424ac8ce71306f12cbb086c45131bcba2c6a4f726e3"}, +] [package.extras] devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] @@ -502,6 +949,10 @@ description = "the modular source code checker: pep8 pyflakes and co" category = "dev" optional = false python-versions = ">=3.8.1" +files = [ + {file = "flake8-6.0.0-py2.py3-none-any.whl", hash = "sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7"}, + {file = "flake8-6.0.0.tar.gz", hash = "sha256:c61007e76655af75e6785a931f452915b371dc48f56efd765247c8fe68f2b181"}, +] [package.dependencies] mccabe = ">=0.7.0,<0.8.0" @@ -515,6 +966,10 @@ description = "A simple framework for building complex web applications." category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "Flask-2.1.3-py3-none-any.whl", hash = "sha256:9013281a7402ad527f8fd56375164f3aa021ecfaff89bfe3825346c24f87e04c"}, + {file = "Flask-2.1.3.tar.gz", hash = "sha256:15972e5017df0575c3d6c090ba168b6db90259e620ac8d7ea813a396bad5b6cb"}, +] [package.dependencies] click = ">=8.0" @@ -534,6 +989,10 @@ description = "A Flask extension adding a decorator for CORS support" category = "main" optional = false python-versions = "*" +files = [ + {file = "Flask-Cors-3.0.10.tar.gz", hash = "sha256:b60839393f3b84a0f3746f6cdca56c1ad7426aa738b70d6c61375857823181de"}, + {file = "Flask_Cors-3.0.10-py2.py3-none-any.whl", hash = "sha256:74efc975af1194fc7891ff5cd85b0f7478be4f7f59fe158102e91abb72bb4438"}, +] [package.dependencies] Flask = ">=0.9" @@ -546,6 +1005,10 @@ description = "Validates fully-qualified domain names against RFC 1123, so that category = "main" optional = false python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4" +files = [ + {file = "fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014"}, + {file = "fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f"}, +] [[package]] name = "google-api-core" @@ -554,6 +1017,10 @@ description = "Google API client core library" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "google-api-core-2.11.1.tar.gz", hash = "sha256:25d29e05a0058ed5f19c61c0a78b1b53adea4d9364b464d014fbda941f6d1c9a"}, + {file = "google_api_core-2.11.1-py3-none-any.whl", hash = "sha256:d92a5a92dc36dd4f4b9ee4e55528a90e432b059f93aee6ad857f9de8cc7ae94a"}, +] [package.dependencies] google-auth = ">=2.14.1,<3.0.dev0" @@ -573,6 +1040,10 @@ description = "Google API Client Library for Python" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "google-api-python-client-2.93.0.tar.gz", hash = "sha256:62ee28e96031a10a1c341f226a75ac6a4f16bdb1d888dc8222b2cdca133d0031"}, + {file = "google_api_python_client-2.93.0-py2.py3-none-any.whl", hash = "sha256:f34abb671afd488bd19d30721ea20fb30d3796ddd825d6f91f26d8c718a9f07d"}, +] [package.dependencies] google-api-core = ">=1.31.5,<2.0.0 || >2.3.0,<3.0.0.dev0" @@ -588,6 +1059,10 @@ description = "Google Authentication Library" category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "google-auth-2.22.0.tar.gz", hash = "sha256:164cba9af4e6e4e40c3a4f90a1a6c12ee56f14c0b4868d1ca91b32826ab334ce"}, + {file = "google_auth-2.22.0-py2.py3-none-any.whl", hash = "sha256:d61d1b40897407b574da67da1a833bdc10d5a11642566e506565d1b1a46ba873"}, +] [package.dependencies] cachetools = ">=2.0.0,<6.0" @@ -610,6 +1085,10 @@ description = "Google Authentication Library: httplib2 transport" category = "main" optional = false python-versions = "*" +files = [ + {file = "google-auth-httplib2-0.1.0.tar.gz", hash = "sha256:a07c39fd632becacd3f07718dfd6021bf396978f03ad3ce4321d060015cc30ac"}, + {file = "google_auth_httplib2-0.1.0-py2.py3-none-any.whl", hash = "sha256:31e49c36c6b5643b57e82617cb3e021e3e1d2df9da63af67252c02fa9c1f4a10"}, +] [package.dependencies] google-auth = "*" @@ -623,6 +1102,10 @@ description = "Google Authentication Library" category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "google-auth-oauthlib-0.8.0.tar.gz", hash = "sha256:81056a310fb1c4a3e5a7e1a443e1eb96593c6bbc55b26c0261e4d3295d3e6593"}, + {file = "google_auth_oauthlib-0.8.0-py2.py3-none-any.whl", hash = "sha256:40cc612a13c3336d5433e94e2adb42a0c88f6feb6c55769e44500fc70043a576"}, +] [package.dependencies] google-auth = ">=2.15.0" @@ -638,6 +1121,10 @@ description = "Common protobufs used in Google APIs" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "googleapis-common-protos-1.59.1.tar.gz", hash = "sha256:b35d530fe825fb4227857bc47ad84c33c809ac96f312e13182bdeaa2abe1178a"}, + {file = "googleapis_common_protos-1.59.1-py2.py3-none-any.whl", hash = "sha256:0cbedb6fb68f1c07e18eb4c48256320777707e7d0c55063ae56c15db3224a61e"}, +] [package.dependencies] protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" @@ -652,6 +1139,10 @@ description = "Simple Python interface for Graphviz" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "graphviz-0.20.1-py3-none-any.whl", hash = "sha256:587c58a223b51611c0cf461132da386edd896a029524ca61a1462b880bf97977"}, + {file = "graphviz-0.20.1.zip", hash = "sha256:8c58f14adaa3b947daf26c19bc1e98c4e0702cdc31cf99153e6f06904d492bf8"}, +] [package.extras] dev = ["flake8", "pep8-naming", "tox (>=3)", "twine", "wheel"] @@ -665,6 +1156,10 @@ description = "Always know what to expect from your data." category = "main" optional = false python-versions = "*" +files = [ + {file = "great_expectations-0.15.50-py3-none-any.whl", hash = "sha256:bda4c6bfe199dc0610273a1c160aab3876583266b1957a34a7edb72b055fd13d"}, + {file = "great_expectations-0.15.50.tar.gz", hash = "sha256:0b00c974410d598a97b4c662d7955d80d6268e35c5f3893ddb546f75432412db"}, +] [package.dependencies] altair = ">=4.0.0,<4.2.1" @@ -736,6 +1231,72 @@ description = "Lightweight in-process concurrent programming" category = "main" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" +files = [ + {file = "greenlet-2.0.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:bdfea8c661e80d3c1c99ad7c3ff74e6e87184895bbaca6ee8cc61209f8b9b85d"}, + {file = "greenlet-2.0.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9d14b83fab60d5e8abe587d51c75b252bcc21683f24699ada8fb275d7712f5a9"}, + {file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"}, + {file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"}, + {file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"}, + {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d967650d3f56af314b72df7089d96cda1083a7fc2da05b375d2bc48c82ab3f3c"}, + {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"}, + {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"}, + {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"}, + {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d75209eed723105f9596807495d58d10b3470fa6732dd6756595e89925ce2470"}, + {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a51c9751078733d88e013587b108f1b7a1fb106d402fb390740f002b6f6551a"}, + {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"}, + {file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"}, + {file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"}, + {file = "greenlet-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d4606a527e30548153be1a9f155f4e283d109ffba663a15856089fb55f933e47"}, + {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"}, + {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"}, + {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"}, + {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:eff4eb9b7eb3e4d0cae3d28c283dc16d9bed6b193c2e1ace3ed86ce48ea8df19"}, + {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5454276c07d27a740c5892f4907c86327b632127dd9abec42ee62e12427ff7e3"}, + {file = "greenlet-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:7cafd1208fdbe93b67c7086876f061f660cfddc44f404279c1585bbf3cdc64c5"}, + {file = "greenlet-2.0.2-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:910841381caba4f744a44bf81bfd573c94e10b3045ee00de0cbf436fe50673a6"}, + {file = "greenlet-2.0.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:18a7f18b82b52ee85322d7a7874e676f34ab319b9f8cce5de06067384aa8ff43"}, + {file = "greenlet-2.0.2-cp35-cp35m-win32.whl", hash = "sha256:03a8f4f3430c3b3ff8d10a2a86028c660355ab637cee9333d63d66b56f09d52a"}, + {file = "greenlet-2.0.2-cp35-cp35m-win_amd64.whl", hash = "sha256:4b58adb399c4d61d912c4c331984d60eb66565175cdf4a34792cd9600f21b394"}, + {file = "greenlet-2.0.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:703f18f3fda276b9a916f0934d2fb6d989bf0b4fb5a64825260eb9bfd52d78f0"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:32e5b64b148966d9cccc2c8d35a671409e45f195864560829f395a54226408d3"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dd11f291565a81d71dab10b7033395b7a3a5456e637cf997a6f33ebdf06f8db"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0f72c9ddb8cd28532185f54cc1453f2c16fb417a08b53a855c4e6a418edd099"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd021c754b162c0fb55ad5d6b9d960db667faad0fa2ff25bb6e1301b0b6e6a75"}, + {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:3c9b12575734155d0c09d6c3e10dbd81665d5c18e1a7c6597df72fd05990c8cf"}, + {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b9ec052b06a0524f0e35bd8790686a1da006bd911dd1ef7d50b77bfbad74e292"}, + {file = "greenlet-2.0.2-cp36-cp36m-win32.whl", hash = "sha256:dbfcfc0218093a19c252ca8eb9aee3d29cfdcb586df21049b9d777fd32c14fd9"}, + {file = "greenlet-2.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:9f35ec95538f50292f6d8f2c9c9f8a3c6540bbfec21c9e5b4b751e0a7c20864f"}, + {file = "greenlet-2.0.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:d5508f0b173e6aa47273bdc0a0b5ba055b59662ba7c7ee5119528f466585526b"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:f82d4d717d8ef19188687aa32b8363e96062911e63ba22a0cff7802a8e58e5f1"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9c59a2120b55788e800d82dfa99b9e156ff8f2227f07c5e3012a45a399620b7"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2780572ec463d44c1d3ae850239508dbeb9fed38e294c68d19a24d925d9223ca"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:937e9020b514ceedb9c830c55d5c9872abc90f4b5862f89c0887033ae33c6f73"}, + {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:36abbf031e1c0f79dd5d596bfaf8e921c41df2bdf54ee1eed921ce1f52999a86"}, + {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:18e98fb3de7dba1c0a852731c3070cf022d14f0d68b4c87a19cc1016f3bb8b33"}, + {file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"}, + {file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"}, + {file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"}, + {file = "greenlet-2.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1087300cf9700bbf455b1b97e24db18f2f77b55302a68272c56209d5587c12d1"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acd2162a36d3de67ee896c43effcd5ee3de247eb00354db411feb025aa319857"}, + {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0bf60faf0bc2468089bdc5edd10555bab6e85152191df713e2ab1fcc86382b5a"}, + {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"}, + {file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"}, + {file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"}, + {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8512a0c38cfd4e66a858ddd1b17705587900dd760c6003998e9472b77b56d417"}, + {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be4ed120b52ae4d974aa40215fcdfde9194d63541c7ded40ee12eb4dda57b76b"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94c817e84245513926588caf1152e3b559ff794d505555211ca041f032abbb6b"}, + {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1a819eef4b0e0b96bb0d98d797bef17dc1b4a10e8d7446be32d1da33e095dbb8"}, + {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7efde645ca1cc441d6dc4b48c0f7101e8d86b54c8530141b09fd31cef5149ec9"}, + {file = "greenlet-2.0.2-cp39-cp39-win32.whl", hash = "sha256:ea9872c80c132f4663822dd2a08d404073a5a9b5ba6155bea72fb2a79d1093b5"}, + {file = "greenlet-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:db1a39669102a1d8d12b57de2bb7e2ec9066a6f2b3da35ae511ff93b01b5d564"}, + {file = "greenlet-2.0.2.tar.gz", hash = "sha256:e7c8dc13af7db097bed64a051d2dd49e9f0af495c26995c00a9ee842690d34c0"}, +] [package.extras] docs = ["Sphinx", "docutils (<0.18)"] @@ -748,6 +1309,10 @@ description = "A comprehensive HTTP client library." category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "httplib2-0.22.0-py3-none-any.whl", hash = "sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc"}, + {file = "httplib2-0.22.0.tar.gz", hash = "sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81"}, +] [package.dependencies] pyparsing = {version = ">=2.4.2,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.0.2 || >3.0.2,<3.0.3 || >3.0.3,<4", markers = "python_version > \"3.0\""} @@ -759,7 +1324,11 @@ description = "Internationalized Domain Names in Applications (IDNA)" category = "main" optional = false python-versions = ">=3.5" - +files = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] + [[package]] name = "imagesize" version = "1.4.1" @@ -767,6 +1336,10 @@ description = "Getting image size from png/jpeg/jpeg2000/gif file" category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, + {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, +] [[package]] name = "importlib-metadata" @@ -775,6 +1348,10 @@ description = "Read metadata from Python packages" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "importlib_metadata-4.13.0-py3-none-any.whl", hash = "sha256:8a8a81bcf996e74fee46f0d16bd3eaa382a7eb20fd82445c3ad11f4090334116"}, + {file = "importlib_metadata-4.13.0.tar.gz", hash = "sha256:dd0173e8f150d6815e098fd354f6414b0f079af4644ddfe90c71e2fc6174346d"}, +] [package.dependencies] zipp = ">=0.5" @@ -791,6 +1368,10 @@ description = "A port of Ruby on Rails inflector to Python" category = "main" optional = false python-versions = ">=3.5" +files = [ + {file = "inflection-0.5.1-py2.py3-none-any.whl", hash = "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2"}, + {file = "inflection-0.5.1.tar.gz", hash = "sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417"}, +] [[package]] name = "iniconfig" @@ -799,6 +1380,10 @@ description = "brain-dead simple config-ini parsing" category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] [[package]] name = "interrogate" @@ -807,6 +1392,10 @@ description = "Interrogate a codebase for docstring coverage." category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "interrogate-1.5.0-py3-none-any.whl", hash = "sha256:a4ccc5cbd727c74acc98dee6f5e79ef264c0bcfa66b68d4e123069b2af89091a"}, + {file = "interrogate-1.5.0.tar.gz", hash = "sha256:b6f325f0aa84ac3ac6779d8708264d366102226c5af7d69058cecffcff7a6d6c"}, +] [package.dependencies] attrs = "*" @@ -829,6 +1418,10 @@ description = "IPython Kernel for Jupyter" category = "main" optional = false python-versions = ">=3.8" +files = [ + {file = "ipykernel-6.24.0-py3-none-any.whl", hash = "sha256:2f5fffc7ad8f1fd5aadb4e171ba9129d9668dbafa374732cf9511ada52d6547f"}, + {file = "ipykernel-6.24.0.tar.gz", hash = "sha256:29cea0a716b1176d002a61d0b0c851f34536495bc4ef7dd0222c88b41b816123"}, +] [package.dependencies] appnope = {version = "*", markers = "platform_system == \"Darwin\""} @@ -859,6 +1452,10 @@ description = "IPython: Productive Interactive Computing" category = "main" optional = false python-versions = ">=3.9" +files = [ + {file = "ipython-8.14.0-py3-none-any.whl", hash = "sha256:248aca623f5c99a6635bc3857677b7320b9b8039f99f070ee0d20a5ca5a8e6bf"}, + {file = "ipython-8.14.0.tar.gz", hash = "sha256:1d197b907b6ba441b692c48cf2a3a2de280dc0ac91a3405b39349a50272ca0a1"}, +] [package.dependencies] appnope = {version = "*", markers = "sys_platform == \"darwin\""} @@ -895,6 +1492,10 @@ description = "Vestigial utilities from IPython" category = "main" optional = false python-versions = "*" +files = [ + {file = "ipython_genutils-0.2.0-py2.py3-none-any.whl", hash = "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8"}, + {file = "ipython_genutils-0.2.0.tar.gz", hash = "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"}, +] [[package]] name = "ipywidgets" @@ -903,6 +1504,10 @@ description = "Jupyter interactive widgets" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "ipywidgets-8.0.7-py3-none-any.whl", hash = "sha256:e0aed0c95a1e55b6a123f64305245578bdc09e52965a34941c2b6a578b8c64a0"}, + {file = "ipywidgets-8.0.7.tar.gz", hash = "sha256:50ace0a8886e9a0d68b980db82f94c25d55d21ff2340ed36f802dd9365e94acf"}, +] [package.dependencies] ipykernel = ">=4.5.1" @@ -921,6 +1526,10 @@ description = "An ISO 8601 date/time/duration parser and formatter" category = "main" optional = false python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] [package.dependencies] six = "*" @@ -932,6 +1541,10 @@ description = "Operations with ISO 8601 durations" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "isoduration-20.11.0-py3-none-any.whl", hash = "sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042"}, + {file = "isoduration-20.11.0.tar.gz", hash = "sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9"}, +] [package.dependencies] arrow = ">=0.15.0" @@ -943,6 +1556,10 @@ description = "A Python utility / library to sort Python imports." category = "dev" optional = false python-versions = ">=3.8.0" +files = [ + {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, + {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, +] [package.extras] colors = ["colorama (>=0.4.3)"] @@ -957,6 +1574,10 @@ description = "Safely pass data to untrusted environments and back." category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"}, + {file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"}, +] [[package]] name = "jedi" @@ -965,6 +1586,10 @@ description = "An autocompletion tool for Python that can be used for text edito category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "jedi-0.18.2-py2.py3-none-any.whl", hash = "sha256:203c1fd9d969ab8f2119ec0a3342e0b49910045abe6af0a3ae83a5764d54639e"}, + {file = "jedi-0.18.2.tar.gz", hash = "sha256:bae794c30d07f6d910d32a7048af09b5a39ed740918da923c6b780790ebac612"}, +] [package.dependencies] parso = ">=0.8.0,<0.9.0" @@ -981,6 +1606,10 @@ description = "Low-level, pure Python DBus protocol wrapper." category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "jeepney-0.8.0-py3-none-any.whl", hash = "sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755"}, + {file = "jeepney-0.8.0.tar.gz", hash = "sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806"}, +] [package.extras] test = ["async-timeout", "pytest", "pytest-asyncio (>=0.17)", "pytest-trio", "testpath", "trio"] @@ -993,6 +1622,10 @@ description = "A very fast and expressive template engine." category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, + {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, +] [package.dependencies] MarkupSafe = ">=2.0" @@ -1007,6 +1640,10 @@ description = "Apply JSON-Patches (RFC 6902)" category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, + {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, +] [package.dependencies] jsonpointer = ">=1.9" @@ -1018,6 +1655,10 @@ description = "Identify specific nodes in a JSON document (RFC 6901)" category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, + {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, +] [[package]] name = "jsonschema" @@ -1026,6 +1667,10 @@ description = "An implementation of JSON Schema validation for Python" category = "main" optional = false python-versions = ">=3.8" +files = [ + {file = "jsonschema-4.18.3-py3-none-any.whl", hash = "sha256:aab78b34c2de001c6b692232f08c21a97b436fe18e0b817bf0511046924fceef"}, + {file = "jsonschema-4.18.3.tar.gz", hash = "sha256:64b7104d72efe856bea49ca4af37a14a9eba31b40bb7238179f3803130fd34d9"}, +] [package.dependencies] attrs = ">=22.2.0" @@ -1052,6 +1697,10 @@ description = "The JSON Schema meta-schemas and vocabularies, exposed as a Regis category = "main" optional = false python-versions = ">=3.8" +files = [ + {file = "jsonschema_specifications-2023.6.1-py3-none-any.whl", hash = "sha256:3d2b82663aff01815f744bb5c7887e2121a63399b49b104a3c96145474d091d7"}, + {file = "jsonschema_specifications-2023.6.1.tar.gz", hash = "sha256:ca1c4dd059a9e7b34101cf5b3ab7ff1d18b139f35950d598d629837ef66e8f28"}, +] [package.dependencies] referencing = ">=0.28.0" @@ -1063,6 +1712,10 @@ description = "Jupyter protocol implementation and client libraries" category = "main" optional = false python-versions = ">=3.8" +files = [ + {file = "jupyter_client-8.3.0-py3-none-any.whl", hash = "sha256:7441af0c0672edc5d28035e92ba5e32fadcfa8a4e608a434c228836a89df6158"}, + {file = "jupyter_client-8.3.0.tar.gz", hash = "sha256:3af69921fe99617be1670399a0b857ad67275eefcfa291e2c81a160b7b650f5f"}, +] [package.dependencies] importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} @@ -1083,6 +1736,10 @@ description = "Jupyter core package. A base package on which Jupyter projects re category = "main" optional = false python-versions = ">=3.8" +files = [ + {file = "jupyter_core-5.3.1-py3-none-any.whl", hash = "sha256:ae9036db959a71ec1cac33081eeb040a79e681f08ab68b0883e9a676c7a90dce"}, + {file = "jupyter_core-5.3.1.tar.gz", hash = "sha256:5ba5c7938a7f97a6b0481463f7ff0dbac7c15ba48cf46fa4035ca6e838aa1aba"}, +] [package.dependencies] platformdirs = ">=2.5" @@ -1100,6 +1757,10 @@ description = "Jupyter Event System library" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "jupyter_events-0.6.3-py3-none-any.whl", hash = "sha256:57a2749f87ba387cd1bfd9b22a0875b889237dbf2edc2121ebb22bde47036c17"}, + {file = "jupyter_events-0.6.3.tar.gz", hash = "sha256:9a6e9995f75d1b7146b436ea24d696ce3a35bfa8bfe45e0c33c334c79464d0b3"}, +] [package.dependencies] jsonschema = {version = ">=3.2.0", extras = ["format-nongpl"]} @@ -1121,6 +1782,10 @@ description = "The backend—i.e. core services, APIs, and REST endpoints—to J category = "main" optional = false python-versions = ">=3.8" +files = [ + {file = "jupyter_server-2.7.0-py3-none-any.whl", hash = "sha256:6a77912aff643e53fa14bdb2634884b52b784a4be77ce8e93f7283faed0f0849"}, + {file = "jupyter_server-2.7.0.tar.gz", hash = "sha256:36da0a266d31a41ac335a366c88933c17dfa5bb817a48f5c02c16d303bc9477f"}, +] [package.dependencies] anyio = ">=3.1.0" @@ -1154,6 +1819,10 @@ description = "A Jupyter Server Extension Providing Terminals." category = "main" optional = false python-versions = ">=3.8" +files = [ + {file = "jupyter_server_terminals-0.4.4-py3-none-any.whl", hash = "sha256:75779164661cec02a8758a5311e18bb8eb70c4e86c6b699403100f1585a12a36"}, + {file = "jupyter_server_terminals-0.4.4.tar.gz", hash = "sha256:57ab779797c25a7ba68e97bcfb5d7740f2b5e8a83b5e8102b10438041a7eac5d"}, +] [package.dependencies] pywinpty = {version = ">=2.0.3", markers = "os_name == \"nt\""} @@ -1170,6 +1839,10 @@ description = "Pygments theme using JupyterLab CSS variables" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "jupyterlab_pygments-0.2.2-py2.py3-none-any.whl", hash = "sha256:2405800db07c9f770863bcf8049a529c3dd4d3e28536638bd7c1c01d2748309f"}, + {file = "jupyterlab_pygments-0.2.2.tar.gz", hash = "sha256:7405d7fde60819d905a9fa8ce89e4cd830e318cdad22a0030f7a901da705585d"}, +] [[package]] name = "jupyterlab-widgets" @@ -1178,6 +1851,10 @@ description = "Jupyter interactive widgets for JupyterLab" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "jupyterlab_widgets-3.0.8-py3-none-any.whl", hash = "sha256:4715912d6ceab839c9db35953c764b3214ebbc9161c809f6e0510168845dfdf5"}, + {file = "jupyterlab_widgets-3.0.8.tar.gz", hash = "sha256:d428ab97b8d87cc7c54cbf37644d6e0f0e662f23876e05fa460a73ec3257252a"}, +] [[package]] name = "keyring" @@ -1186,6 +1863,10 @@ description = "Store and access your passwords safely." category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "keyring-23.4.1-py3-none-any.whl", hash = "sha256:17e49fb0d6883c2b4445359434dba95aad84aabb29bbff044ad0ed7100232eca"}, + {file = "keyring-23.4.1.tar.gz", hash = "sha256:89cbd74d4683ed164c8082fb38619341097741323b3786905c6dac04d6915a55"}, +] [package.dependencies] importlib-metadata = ">=3.6" @@ -1204,6 +1885,10 @@ description = "Alternate keyring implementations" category = "main" optional = false python-versions = ">=2.7" +files = [ + {file = "keyrings.alt-3.1-py2.py3-none-any.whl", hash = "sha256:6a00fa799baf1385cf9620bd01bcc815aa56e6970342a567bcfea0c4d21abe5f"}, + {file = "keyrings.alt-3.1.tar.gz", hash = "sha256:b59c86b67b9027a86e841a49efc41025bcc3b1b0308629617b66b7011e52db5a"}, +] [package.dependencies] six = "*" @@ -1219,6 +1904,44 @@ description = "A fast and thorough lazy object proxy." category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "lazy-object-proxy-1.9.0.tar.gz", hash = "sha256:659fb5809fa4629b8a1ac5106f669cfc7bef26fbb389dda53b3e010d1ac4ebae"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b40387277b0ed2d0602b8293b94d7257e17d1479e257b4de114ea11a8cb7f2d7"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8c6cfb338b133fbdbc5cfaa10fe3c6aeea827db80c978dbd13bc9dd8526b7d4"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:721532711daa7db0d8b779b0bb0318fa87af1c10d7fe5e52ef30f8eff254d0cd"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:66a3de4a3ec06cd8af3f61b8e1ec67614fbb7c995d02fa224813cb7afefee701"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1aa3de4088c89a1b69f8ec0dcc169aa725b0ff017899ac568fe44ddc1396df46"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-win32.whl", hash = "sha256:f0705c376533ed2a9e5e97aacdbfe04cecd71e0aa84c7c0595d02ef93b6e4455"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:ea806fd4c37bf7e7ad82537b0757999264d5f70c45468447bb2b91afdbe73a6e"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:946d27deaff6cf8452ed0dba83ba38839a87f4f7a9732e8f9fd4107b21e6ff07"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79a31b086e7e68b24b99b23d57723ef7e2c6d81ed21007b6281ebcd1688acb0a"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f699ac1c768270c9e384e4cbd268d6e67aebcfae6cd623b4d7c3bfde5a35db59"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bfb38f9ffb53b942f2b5954e0f610f1e721ccebe9cce9025a38c8ccf4a5183a4"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:189bbd5d41ae7a498397287c408617fe5c48633e7755287b21d741f7db2706a9"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-win32.whl", hash = "sha256:81fc4d08b062b535d95c9ea70dbe8a335c45c04029878e62d744bdced5141586"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:f2457189d8257dd41ae9b434ba33298aec198e30adf2dcdaaa3a28b9994f6adb"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d9e25ef10a39e8afe59a5c348a4dbf29b4868ab76269f81ce1674494e2565a6e"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbf9b082426036e19c6924a9ce90c740a9861e2bdc27a4834fd0a910742ac1e8"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f5fa4a61ce2438267163891961cfd5e32ec97a2c444e5b842d574251ade27d2"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8fa02eaab317b1e9e03f69aab1f91e120e7899b392c4fc19807a8278a07a97e8"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e7c21c95cae3c05c14aafffe2865bbd5e377cfc1348c4f7751d9dc9a48ca4bda"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win32.whl", hash = "sha256:f12ad7126ae0c98d601a7ee504c1122bcef553d1d5e0c3bfa77b16b3968d2734"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:edd20c5a55acb67c7ed471fa2b5fb66cb17f61430b7a6b9c3b4a1e40293b1671"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d0daa332786cf3bb49e10dc6a17a52f6a8f9601b4cf5c295a4f85854d61de63"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cd077f3d04a58e83d04b20e334f678c2b0ff9879b9375ed107d5d07ff160171"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:660c94ea760b3ce47d1855a30984c78327500493d396eac4dfd8bd82041b22be"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:212774e4dfa851e74d393a2370871e174d7ff0ebc980907723bb67d25c8a7c30"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f0117049dd1d5635bbff65444496c90e0baa48ea405125c088e93d9cf4525b11"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-win32.whl", hash = "sha256:0a891e4e41b54fd5b8313b96399f8b0e173bbbfc03c7631f01efbe29bb0bcf82"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:9990d8e71b9f6488e91ad25f322898c136b008d87bf852ff65391b004da5e17b"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9e7551208b2aded9c1447453ee366f1c4070602b3d932ace044715d89666899b"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f83ac4d83ef0ab017683d715ed356e30dd48a93746309c8f3517e1287523ef4"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7322c3d6f1766d4ef1e51a465f47955f1e8123caee67dd641e67d539a534d006"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:18b78ec83edbbeb69efdc0e9c1cb41a3b1b1ed11ddd8ded602464c3fc6020494"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:09763491ce220c0299688940f8dc2c5d05fd1f45af1e42e636b2e8b2303e4382"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-win32.whl", hash = "sha256:9090d8e53235aa280fc9239a86ae3ea8ac58eff66a705fa6aa2ec4968b95c821"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:db1c1722726f47e10e0b5fdbf15ac3b8adb58c091d12b3ab713965795036985f"}, +] [[package]] name = "makefun" @@ -1227,6 +1950,10 @@ description = "Small library to dynamically create python functions." category = "main" optional = false python-versions = "*" +files = [ + {file = "makefun-1.15.1-py2.py3-none-any.whl", hash = "sha256:a63cfc7b47a539c76d97bd4fdb833c7d0461e759fd1225f580cb4be6200294d4"}, + {file = "makefun-1.15.1.tar.gz", hash = "sha256:40b0f118b6ded0d8d78c78f1eb679b8b6b2462e3c1b3e05fb1b2da8cd46b48a5"}, +] [[package]] name = "markupsafe" @@ -1235,6 +1962,48 @@ description = "Safely add untrusted strings to HTML/XML markup." category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3028252424c72b2602a323f70fbf50aa80a5d3aa616ea6add4ba21ae9cc9da4c"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:290b02bab3c9e216da57c1d11d2ba73a9f73a614bbdcc027d299a60cdfabb11a"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e104c0c2b4cd765b4e83909cde7ec61a1e313f8a75775897db321450e928cce"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24c3be29abb6b34052fd26fc7a8e0a49b1ee9d282e3665e8ad09a0a68faee5b3"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:204730fd5fe2fe3b1e9ccadb2bd18ba8712b111dcabce185af0b3b5285a7c989"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d3b64c65328cb4cd252c94f83e66e3d7acf8891e60ebf588d7b493a55a1dbf26"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:96de1932237abe0a13ba68b63e94113678c379dca45afa040a17b6e1ad7ed076"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:75bb36f134883fdbe13d8e63b8675f5f12b80bb6627f7714c7d6c5becf22719f"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-win32.whl", hash = "sha256:4056f752015dfa9828dce3140dbadd543b555afb3252507348c493def166d454"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:d4e702eea4a2903441f2735799d217f4ac1b55f7d8ad96ab7d4e25417cb0827c"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f0eddfcabd6936558ec020130f932d479930581171368fd728efcfb6ef0dd357"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ddea4c352a488b5e1069069f2f501006b1a4362cb906bee9a193ef1245a7a61"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09c86c9643cceb1d87ca08cdc30160d1b7ab49a8a21564868921959bd16441b8"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0a0abef2ca47b33fb615b491ce31b055ef2430de52c5b3fb19a4042dbc5cadb"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:736895a020e31b428b3382a7887bfea96102c529530299f426bf2e636aacec9e"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:679cbb78914ab212c49c67ba2c7396dc599a8479de51b9a87b174700abd9ea49"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:84ad5e29bf8bab3ad70fd707d3c05524862bddc54dc040982b0dbcff36481de7"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-win32.whl", hash = "sha256:8da5924cb1f9064589767b0f3fc39d03e3d0fb5aa29e0cb21d43106519bd624a"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:454ffc1cbb75227d15667c09f164a0099159da0c1f3d2636aa648f12675491ad"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:142119fb14a1ef6d758912b25c4e803c3ff66920635c44078666fe7cc3f8f759"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b2a5a856019d2833c56a3dcac1b80fe795c95f401818ea963594b345929dffa7"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d1fb9b2eec3c9714dd936860850300b51dbaa37404209c8d4cb66547884b7ed"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62c0285e91414f5c8f621a17b69fc0088394ccdaa961ef469e833dbff64bd5ea"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fc3150f85e2dbcf99e65238c842d1cfe69d3e7649b19864c1cc043213d9cd730"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f02cf7221d5cd915d7fa58ab64f7ee6dd0f6cddbb48683debf5d04ae9b1c2cc1"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d5653619b3eb5cbd35bfba3c12d575db2a74d15e0e1c08bf1db788069d410ce8"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7d2f5d97fcbd004c03df8d8fe2b973fe2b14e7bfeb2cfa012eaa8759ce9a762f"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-win32.whl", hash = "sha256:3cace1837bc84e63b3fd2dfce37f08f8c18aeb81ef5cf6bb9b51f625cb4e6cd8"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:fabbe18087c3d33c5824cb145ffca52eccd053061df1d79d4b66dafa5ad2a5ea"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:023af8c54fe63530545f70dd2a2a7eed18d07a9a77b94e8bf1e2ff7f252db9a3"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d66624f04de4af8bbf1c7f21cc06649c1c69a7f84109179add573ce35e46d448"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c532d5ab79be0199fa2658e24a02fce8542df196e60665dd322409a03db6a52c"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67ec74fada3841b8c5f4c4f197bea916025cb9aa3fe5abf7d52b655d042f956"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30c653fde75a6e5eb814d2a0a89378f83d1d3f502ab710904ee585c38888816c"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:961eb86e5be7d0973789f30ebcf6caab60b844203f4396ece27310295a6082c7"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:598b65d74615c021423bd45c2bc5e9b59539c875a9bdb7e5f2a6b92dfcfc268d"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:599941da468f2cf22bf90a84f6e2a65524e87be2fce844f96f2dd9a6c9d1e635"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-win32.whl", hash = "sha256:e6f7f3f41faffaea6596da86ecc2389672fa949bd035251eab26dc6697451d05"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:b8811d48078d1cf2a6863dafb896e68406c5f513048451cd2ded0473133473c7"}, + {file = "MarkupSafe-2.1.0.tar.gz", hash = "sha256:80beaf63ddfbc64a0452b841d8036ca0611e049650e20afcb882f5d3c266d65f"}, +] [[package]] name = "marshmallow" @@ -1243,6 +2012,10 @@ description = "A lightweight library for converting complex datatypes to and fro category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "marshmallow-3.19.0-py3-none-any.whl", hash = "sha256:93f0958568da045b0021ec6aeb7ac37c81bfcccbb9a0e7ed8559885070b3a19b"}, + {file = "marshmallow-3.19.0.tar.gz", hash = "sha256:90032c0fd650ce94b6ec6dc8dfeb0e3ff50c144586462c389b81a07205bedb78"}, +] [package.dependencies] packaging = ">=17.0" @@ -1260,6 +2033,10 @@ description = "Inline Matplotlib backend for Jupyter" category = "main" optional = false python-versions = ">=3.5" +files = [ + {file = "matplotlib-inline-0.1.6.tar.gz", hash = "sha256:f887e5f10ba98e8d2b150ddcf4702c1e5f8b3a20005eb0f74bfdbd360ee6f304"}, + {file = "matplotlib_inline-0.1.6-py3-none-any.whl", hash = "sha256:f1f41aab5328aa5aaea9b16d083b128102f8712542f819fe7e6a420ff581b311"}, +] [package.dependencies] traitlets = "*" @@ -1271,6 +2048,10 @@ description = "McCabe checker, plugin for flake8" category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] [[package]] name = "mistune" @@ -1279,6 +2060,10 @@ description = "A sane and fast Markdown parser with useful plugins and renderers category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "mistune-3.0.1-py3-none-any.whl", hash = "sha256:b9b3e438efbb57c62b5beb5e134dab664800bdf1284a7ee09e8b12b13eb1aac6"}, + {file = "mistune-3.0.1.tar.gz", hash = "sha256:e912116c13aa0944f9dc530db38eb88f6a77087ab128f49f84a48f4c05ea163c"}, +] [[package]] name = "mypy" @@ -1287,6 +2072,34 @@ description = "Optional static typing for Python" category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "mypy-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:566e72b0cd6598503e48ea610e0052d1b8168e60a46e0bfd34b3acf2d57f96a8"}, + {file = "mypy-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ca637024ca67ab24a7fd6f65d280572c3794665eaf5edcc7e90a866544076878"}, + {file = "mypy-1.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dde1d180cd84f0624c5dcaaa89c89775550a675aff96b5848de78fb11adabcd"}, + {file = "mypy-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8c4d8e89aa7de683e2056a581ce63c46a0c41e31bd2b6d34144e2c80f5ea53dc"}, + {file = "mypy-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:bfdca17c36ae01a21274a3c387a63aa1aafe72bff976522886869ef131b937f1"}, + {file = "mypy-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7549fbf655e5825d787bbc9ecf6028731973f78088fbca3a1f4145c39ef09462"}, + {file = "mypy-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:98324ec3ecf12296e6422939e54763faedbfcc502ea4a4c38502082711867258"}, + {file = "mypy-1.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:141dedfdbfe8a04142881ff30ce6e6653c9685b354876b12e4fe6c78598b45e2"}, + {file = "mypy-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8207b7105829eca6f3d774f64a904190bb2231de91b8b186d21ffd98005f14a7"}, + {file = "mypy-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:16f0db5b641ba159eff72cff08edc3875f2b62b2fa2bc24f68c1e7a4e8232d01"}, + {file = "mypy-1.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:470c969bb3f9a9efcedbadcd19a74ffb34a25f8e6b0e02dae7c0e71f8372f97b"}, + {file = "mypy-1.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5952d2d18b79f7dc25e62e014fe5a23eb1a3d2bc66318df8988a01b1a037c5b"}, + {file = "mypy-1.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:190b6bab0302cec4e9e6767d3eb66085aef2a1cc98fe04936d8a42ed2ba77bb7"}, + {file = "mypy-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9d40652cc4fe33871ad3338581dca3297ff5f2213d0df345bcfbde5162abf0c9"}, + {file = "mypy-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:01fd2e9f85622d981fd9063bfaef1aed6e336eaacca00892cd2d82801ab7c042"}, + {file = "mypy-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2460a58faeea905aeb1b9b36f5065f2dc9a9c6e4c992a6499a2360c6c74ceca3"}, + {file = "mypy-1.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2746d69a8196698146a3dbe29104f9eb6a2a4d8a27878d92169a6c0b74435b6"}, + {file = "mypy-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ae704dcfaa180ff7c4cfbad23e74321a2b774f92ca77fd94ce1049175a21c97f"}, + {file = "mypy-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:43d24f6437925ce50139a310a64b2ab048cb2d3694c84c71c3f2a1626d8101dc"}, + {file = "mypy-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c482e1246726616088532b5e964e39765b6d1520791348e6c9dc3af25b233828"}, + {file = "mypy-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:43b592511672017f5b1a483527fd2684347fdffc041c9ef53428c8dc530f79a3"}, + {file = "mypy-1.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34a9239d5b3502c17f07fd7c0b2ae6b7dd7d7f6af35fbb5072c6208e76295816"}, + {file = "mypy-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5703097c4936bbb9e9bce41478c8d08edd2865e177dc4c52be759f81ee4dd26c"}, + {file = "mypy-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e02d700ec8d9b1859790c0475df4e4092c7bf3272a4fd2c9f33d87fac4427b8f"}, + {file = "mypy-1.4.1-py3-none-any.whl", hash = "sha256:45d32cec14e7b97af848bddd97d85ea4f0db4d5a149ed9676caa4eb2f7402bb4"}, + {file = "mypy-1.4.1.tar.gz", hash = "sha256:9bbcd9ab8ea1f2e1c8031c21445b511442cc45c89951e49bbf852cbb70755b1b"}, +] [package.dependencies] mypy-extensions = ">=1.0.0" @@ -1303,9 +2116,13 @@ reports = ["lxml"] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "dev" +category = "main" optional = false python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] [[package]] name = "nbclassic" @@ -1314,6 +2131,10 @@ description = "Jupyter Notebook as a Jupyter Server extension." category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "nbclassic-1.0.0-py3-none-any.whl", hash = "sha256:f99e4769b4750076cd4235c044b61232110733322384a94a63791d2e7beacc66"}, + {file = "nbclassic-1.0.0.tar.gz", hash = "sha256:0ae11eb2319455d805596bf320336cda9554b41d99ab9a3c31bf8180bffa30e3"}, +] [package.dependencies] argon2-cffi = "*" @@ -1346,6 +2167,10 @@ description = "A client library for executing notebooks. Formerly nbconvert's Ex category = "main" optional = false python-versions = ">=3.8.0" +files = [ + {file = "nbclient-0.8.0-py3-none-any.whl", hash = "sha256:25e861299e5303a0477568557c4045eccc7a34c17fc08e7959558707b9ebe548"}, + {file = "nbclient-0.8.0.tar.gz", hash = "sha256:f9b179cd4b2d7bca965f900a2ebf0db4a12ebff2f36a711cb66861e4ae158e55"}, +] [package.dependencies] jupyter-client = ">=6.1.12" @@ -1365,6 +2190,10 @@ description = "Converting Jupyter Notebooks" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "nbconvert-7.6.0-py3-none-any.whl", hash = "sha256:5a445c6794b0791984bc5436608fe2c066cb43c83920c7bc91bde3b765e9a264"}, + {file = "nbconvert-7.6.0.tar.gz", hash = "sha256:24fcf27efdef2b51d7f090cc5ce5a9b178766a55be513c4ebab08c91899ab550"}, +] [package.dependencies] beautifulsoup4 = "*" @@ -1400,6 +2229,10 @@ description = "The Jupyter Notebook format" category = "main" optional = false python-versions = ">=3.8" +files = [ + {file = "nbformat-5.9.1-py3-none-any.whl", hash = "sha256:b7968ebf4811178a4108ee837eae1442e3f054132100f0359219e9ed1ce3ca45"}, + {file = "nbformat-5.9.1.tar.gz", hash = "sha256:3a7f52d040639cbd8a3890218c8b0ffb93211588c57446c90095e32ba5881b5d"}, +] [package.dependencies] fastjsonschema = "*" @@ -1418,6 +2251,10 @@ description = "Patch asyncio to allow nested event loops" category = "main" optional = false python-versions = ">=3.5" +files = [ + {file = "nest_asyncio-1.5.6-py3-none-any.whl", hash = "sha256:b9a953fb40dceaa587d109609098db21900182b16440652454a146cffb06e8b8"}, + {file = "nest_asyncio-1.5.6.tar.gz", hash = "sha256:d267cc1ff794403f7df692964d1d2a3fa9418ffea2a3f6859a439ff482fef290"}, +] [[package]] name = "networkx" @@ -1426,6 +2263,10 @@ description = "Python package for creating and manipulating graphs and networks" category = "main" optional = false python-versions = ">=3.8" +files = [ + {file = "networkx-2.8.8-py3-none-any.whl", hash = "sha256:e435dfa75b1d7195c7b8378c3859f0445cd88c6b0375c181ed66823a9ceb7524"}, + {file = "networkx-2.8.8.tar.gz", hash = "sha256:230d388117af870fce5647a3c52401fcf753e94720e6ea6b4197a5355648885e"}, +] [package.extras] default = ["matplotlib (>=3.4)", "numpy (>=1.19)", "pandas (>=1.3)", "scipy (>=1.8)"] @@ -1441,6 +2282,10 @@ description = "A web-based notebook environment for interactive computing" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "notebook-6.5.4-py3-none-any.whl", hash = "sha256:dd17e78aefe64c768737b32bf171c1c766666a21cc79a44d37a1700771cab56f"}, + {file = "notebook-6.5.4.tar.gz", hash = "sha256:517209568bd47261e2def27a140e97d49070602eea0d226a696f42a7f16c9a4e"}, +] [package.dependencies] argon2-cffi = "*" @@ -1472,6 +2317,10 @@ description = "A shim layer for notebook traits and config" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "notebook_shim-0.2.3-py3-none-any.whl", hash = "sha256:a83496a43341c1674b093bfcebf0fe8e74cbe7eda5fd2bbc56f8e39e1486c0c7"}, + {file = "notebook_shim-0.2.3.tar.gz", hash = "sha256:f69388ac283ae008cd506dda10d0288b09a017d822d5e8c7129a152cbd3ce7e9"}, +] [package.dependencies] jupyter-server = ">=1.8,<3" @@ -1486,22 +2335,53 @@ description = "Fundamental package for array computing in Python" category = "main" optional = false python-versions = ">=3.9" - -[[package]] -name = "oauth2client" -version = "4.1.3" -description = "OAuth 2.0 client library" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -httplib2 = ">=0.9.1" -pyasn1 = ">=0.1.7" -pyasn1-modules = ">=0.0.5" -rsa = ">=3.1.4" -six = ">=1.6.1" - +files = [ + {file = "numpy-1.25.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:77d339465dff3eb33c701430bcb9c325b60354698340229e1dff97745e6b3efa"}, + {file = "numpy-1.25.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d736b75c3f2cb96843a5c7f8d8ccc414768d34b0a75f466c05f3a739b406f10b"}, + {file = "numpy-1.25.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a90725800caeaa160732d6b31f3f843ebd45d6b5f3eec9e8cc287e30f2805bf"}, + {file = "numpy-1.25.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c6c9261d21e617c6dc5eacba35cb68ec36bb72adcff0dee63f8fbc899362588"}, + {file = "numpy-1.25.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0def91f8af6ec4bb94c370e38c575855bf1d0be8a8fbfba42ef9c073faf2cf19"}, + {file = "numpy-1.25.1-cp310-cp310-win32.whl", hash = "sha256:fd67b306320dcadea700a8f79b9e671e607f8696e98ec255915c0c6d6b818503"}, + {file = "numpy-1.25.1-cp310-cp310-win_amd64.whl", hash = "sha256:c1516db588987450b85595586605742879e50dcce923e8973f79529651545b57"}, + {file = "numpy-1.25.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6b82655dd8efeea69dbf85d00fca40013d7f503212bc5259056244961268b66e"}, + {file = "numpy-1.25.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e8f6049c4878cb16960fbbfb22105e49d13d752d4d8371b55110941fb3b17800"}, + {file = "numpy-1.25.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41a56b70e8139884eccb2f733c2f7378af06c82304959e174f8e7370af112e09"}, + {file = "numpy-1.25.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5154b1a25ec796b1aee12ac1b22f414f94752c5f94832f14d8d6c9ac40bcca6"}, + {file = "numpy-1.25.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:38eb6548bb91c421261b4805dc44def9ca1a6eef6444ce35ad1669c0f1a3fc5d"}, + {file = "numpy-1.25.1-cp311-cp311-win32.whl", hash = "sha256:791f409064d0a69dd20579345d852c59822c6aa087f23b07b1b4e28ff5880fcb"}, + {file = "numpy-1.25.1-cp311-cp311-win_amd64.whl", hash = "sha256:c40571fe966393b212689aa17e32ed905924120737194b5d5c1b20b9ed0fb171"}, + {file = "numpy-1.25.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3d7abcdd85aea3e6cdddb59af2350c7ab1ed764397f8eec97a038ad244d2d105"}, + {file = "numpy-1.25.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1a180429394f81c7933634ae49b37b472d343cccb5bb0c4a575ac8bbc433722f"}, + {file = "numpy-1.25.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d412c1697c3853c6fc3cb9751b4915859c7afe6a277c2bf00acf287d56c4e625"}, + {file = "numpy-1.25.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20e1266411120a4f16fad8efa8e0454d21d00b8c7cee5b5ccad7565d95eb42dd"}, + {file = "numpy-1.25.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f76aebc3358ade9eacf9bc2bb8ae589863a4f911611694103af05346637df1b7"}, + {file = "numpy-1.25.1-cp39-cp39-win32.whl", hash = "sha256:247d3ffdd7775bdf191f848be8d49100495114c82c2bd134e8d5d075fb386a1c"}, + {file = "numpy-1.25.1-cp39-cp39-win_amd64.whl", hash = "sha256:1d5d3c68e443c90b38fdf8ef40e60e2538a27548b39b12b73132456847f4b631"}, + {file = "numpy-1.25.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:35a9527c977b924042170a0887de727cd84ff179e478481404c5dc66b4170009"}, + {file = "numpy-1.25.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d3fe3dd0506a28493d82dc3cf254be8cd0d26f4008a417385cbf1ae95b54004"}, + {file = "numpy-1.25.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:012097b5b0d00a11070e8f2e261128c44157a8689f7dedcf35576e525893f4fe"}, + {file = "numpy-1.25.1.tar.gz", hash = "sha256:9a3a9f3a61480cc086117b426a8bd86869c213fc4072e606f01c4e4b66eb92bf"}, +] + +[[package]] +name = "oauth2client" +version = "4.1.3" +description = "OAuth 2.0 client library" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "oauth2client-4.1.3-py2.py3-none-any.whl", hash = "sha256:b8a81cc5d60e2d364f0b1b98f958dbd472887acaf1a5b05e21c28c31a2d6d3ac"}, + {file = "oauth2client-4.1.3.tar.gz", hash = "sha256:d486741e451287f69568a4d26d70d9acd73a2bbfa275746c535b4209891cccc6"}, +] + +[package.dependencies] +httplib2 = ">=0.9.1" +pyasn1 = ">=0.1.7" +pyasn1-modules = ">=0.0.5" +rsa = ">=3.1.4" +six = ">=1.6.1" + [[package]] name = "oauthlib" version = "3.2.2" @@ -1509,6 +2389,10 @@ description = "A generic, spec-compliant, thorough implementation of the OAuth r category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, + {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, +] [package.extras] rsa = ["cryptography (>=3.0.0)"] @@ -1522,6 +2406,10 @@ description = "A Python library to read/write Excel 2010 xlsx/xlsm files" category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "openpyxl-3.1.2-py2.py3-none-any.whl", hash = "sha256:f91456ead12ab3c6c2e9491cf33ba6d08357d802192379bb482f1033ade496f5"}, + {file = "openpyxl-3.1.2.tar.gz", hash = "sha256:a6f5977418eff3b2d5500d54d9db50c8277a368436f4e4f8ddb1be3422870184"}, +] [package.dependencies] et-xmlfile = "*" @@ -1533,6 +2421,10 @@ description = "A decorator to automatically detect mismatch when overriding a me category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "overrides-7.3.1-py3-none-any.whl", hash = "sha256:6187d8710a935d09b0bcef8238301d6ee2569d2ac1ae0ec39a8c7924e27f58ca"}, + {file = "overrides-7.3.1.tar.gz", hash = "sha256:8b97c6c1e1681b78cbc9424b138d880f0803c2254c5ebaabdde57bb6c62093f2"}, +] [[package]] name = "packaging" @@ -1541,6 +2433,10 @@ description = "Core utilities for Python packages" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, + {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, +] [[package]] name = "pandarallel" @@ -1549,6 +2445,9 @@ description = "An easy to use library to speed up computation (by parallelizing category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "pandarallel-1.6.5.tar.gz", hash = "sha256:1c2df98ff6441e8ae13ff428ceebaa7ec42d731f7f972c41ce4fdef1d3adf640"}, +] [package.dependencies] dill = ">=0.3.1" @@ -1566,6 +2465,35 @@ description = "Powerful data structures for data analysis, time series, and stat category = "main" optional = false python-versions = ">=3.8" +files = [ + {file = "pandas-1.5.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3749077d86e3a2f0ed51367f30bf5b82e131cc0f14260c4d3e499186fccc4406"}, + {file = "pandas-1.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:972d8a45395f2a2d26733eb8d0f629b2f90bebe8e8eddbb8829b180c09639572"}, + {file = "pandas-1.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:50869a35cbb0f2e0cd5ec04b191e7b12ed688874bd05dd777c19b28cbea90996"}, + {file = "pandas-1.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3ac844a0fe00bfaeb2c9b51ab1424e5c8744f89860b138434a363b1f620f354"}, + {file = "pandas-1.5.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a0a56cef15fd1586726dace5616db75ebcfec9179a3a55e78f72c5639fa2a23"}, + {file = "pandas-1.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:478ff646ca42b20376e4ed3fa2e8d7341e8a63105586efe54fa2508ee087f328"}, + {file = "pandas-1.5.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6973549c01ca91ec96199e940495219c887ea815b2083722821f1d7abfa2b4dc"}, + {file = "pandas-1.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c39a8da13cede5adcd3be1182883aea1c925476f4e84b2807a46e2775306305d"}, + {file = "pandas-1.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f76d097d12c82a535fda9dfe5e8dd4127952b45fea9b0276cb30cca5ea313fbc"}, + {file = "pandas-1.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e474390e60ed609cec869b0da796ad94f420bb057d86784191eefc62b65819ae"}, + {file = "pandas-1.5.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f2b952406a1588ad4cad5b3f55f520e82e902388a6d5a4a91baa8d38d23c7f6"}, + {file = "pandas-1.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:bc4c368f42b551bf72fac35c5128963a171b40dce866fb066540eeaf46faa003"}, + {file = "pandas-1.5.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:14e45300521902689a81f3f41386dc86f19b8ba8dd5ac5a3c7010ef8d2932813"}, + {file = "pandas-1.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9842b6f4b8479e41968eced654487258ed81df7d1c9b7b870ceea24ed9459b31"}, + {file = "pandas-1.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:26d9c71772c7afb9d5046e6e9cf42d83dd147b5cf5bcb9d97252077118543792"}, + {file = "pandas-1.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fbcb19d6fceb9e946b3e23258757c7b225ba450990d9ed63ccceeb8cae609f7"}, + {file = "pandas-1.5.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:565fa34a5434d38e9d250af3c12ff931abaf88050551d9fbcdfafca50d62babf"}, + {file = "pandas-1.5.3-cp38-cp38-win32.whl", hash = "sha256:87bd9c03da1ac870a6d2c8902a0e1fd4267ca00f13bc494c9e5a9020920e1d51"}, + {file = "pandas-1.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:41179ce559943d83a9b4bbacb736b04c928b095b5f25dd2b7389eda08f46f373"}, + {file = "pandas-1.5.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c74a62747864ed568f5a82a49a23a8d7fe171d0c69038b38cedf0976831296fa"}, + {file = "pandas-1.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c4c00e0b0597c8e4f59e8d461f797e5d70b4d025880516a8261b2817c47759ee"}, + {file = "pandas-1.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a50d9a4336a9621cab7b8eb3fb11adb82de58f9b91d84c2cd526576b881a0c5a"}, + {file = "pandas-1.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd05f7783b3274aa206a1af06f0ceed3f9b412cf665b7247eacd83be41cf7bf0"}, + {file = "pandas-1.5.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f69c4029613de47816b1bb30ff5ac778686688751a5e9c99ad8c7031f6508e5"}, + {file = "pandas-1.5.3-cp39-cp39-win32.whl", hash = "sha256:7cec0bee9f294e5de5bbfc14d0573f65526071029d036b753ee6507d2a21480a"}, + {file = "pandas-1.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:dfd681c5dc216037e0b0a2c821f5ed99ba9f03ebcf119c7dac0e9a7b960b9ec9"}, + {file = "pandas-1.5.3.tar.gz", hash = "sha256:74a3fd7e5a7ec052f183273dc7b0acd3a863edf7520f5d3a1765c04ffdb3b0b1"}, +] [package.dependencies] numpy = [ @@ -1585,6 +2513,10 @@ description = "Utilities for writing pandoc filters in python" category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pandocfilters-1.5.0-py2.py3-none-any.whl", hash = "sha256:33aae3f25fd1a026079f5d27bdd52496f0e0803b3469282162bafdcbdf6ef14f"}, + {file = "pandocfilters-1.5.0.tar.gz", hash = "sha256:0b679503337d233b4339a817bfc8c50064e2eff681314376a47cb582305a7a38"}, +] [[package]] name = "parso" @@ -1593,6 +2525,10 @@ description = "A Python Parser" category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, + {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, +] [package.extras] qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] @@ -1605,6 +2541,10 @@ description = "Utility library for gitignore style pattern matching of file path category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"}, + {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"}, +] [[package]] name = "pdoc" @@ -1613,6 +2553,10 @@ description = "API Documentation for Python Projects" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "pdoc-12.3.1-py3-none-any.whl", hash = "sha256:c3f24f31286e634de9c76fa6e67bd5c0c5e74360b41dc91e6b82499831eb52d8"}, + {file = "pdoc-12.3.1.tar.gz", hash = "sha256:453236f225feddb8a9071428f1982a78d74b9b3da4bc4433aedb64dbd0cc87ab"}, +] [package.dependencies] Jinja2 = ">=2.11.0" @@ -1629,6 +2573,10 @@ description = "Pexpect allows easy control of interactive console applications." category = "main" optional = false python-versions = "*" +files = [ + {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, + {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"}, +] [package.dependencies] ptyprocess = ">=0.5" @@ -1640,6 +2588,10 @@ description = "Tiny 'shelve'-like database with concurrency support" category = "main" optional = false python-versions = "*" +files = [ + {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, + {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, +] [[package]] name = "platformdirs" @@ -1648,6 +2600,10 @@ description = "A small Python package for determining appropriate platform-speci category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "platformdirs-3.8.1-py3-none-any.whl", hash = "sha256:cec7b889196b9144d088e4c57d9ceef7374f6c39694ad1577a0aab50d27ea28c"}, + {file = "platformdirs-3.8.1.tar.gz", hash = "sha256:f87ca4fcff7d2b0f81c6a748a77973d7af0f4d526f98f308477c3c436c74d528"}, +] [package.extras] docs = ["furo (>=2023.5.20)", "proselint (>=0.13)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] @@ -1660,6 +2616,10 @@ description = "plugin and hook calling mechanisms for python" category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, + {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, +] [package.extras] dev = ["pre-commit", "tox"] @@ -1672,6 +2632,10 @@ description = "Python client for the Prometheus monitoring system." category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "prometheus_client-0.17.1-py3-none-any.whl", hash = "sha256:e537f37160f6807b8202a6fc4764cdd19bac5480ddd3e0d463c3002b34462101"}, + {file = "prometheus_client-0.17.1.tar.gz", hash = "sha256:21e674f39831ae3f8acde238afd9a27a37d0d2fb5a28ea094f0ce25d2cbf2091"}, +] [package.extras] twisted = ["twisted"] @@ -1683,6 +2647,10 @@ description = "Library for building powerful interactive command lines in Python category = "main" optional = false python-versions = ">=3.7.0" +files = [ + {file = "prompt_toolkit-3.0.39-py3-none-any.whl", hash = "sha256:9dffbe1d8acf91e3de75f3b544e4842382fc06c6babe903ac9acb74dc6e08d88"}, + {file = "prompt_toolkit-3.0.39.tar.gz", hash = "sha256:04505ade687dc26dc4284b1ad19a83be2f2afe83e7a828ace0c72f3a1df72aac"}, +] [package.dependencies] wcwidth = "*" @@ -1694,6 +2662,21 @@ description = "" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "protobuf-4.23.4-cp310-abi3-win32.whl", hash = "sha256:5fea3c64d41ea5ecf5697b83e41d09b9589e6f20b677ab3c48e5f242d9b7897b"}, + {file = "protobuf-4.23.4-cp310-abi3-win_amd64.whl", hash = "sha256:7b19b6266d92ca6a2a87effa88ecc4af73ebc5cfde194dc737cf8ef23a9a3b12"}, + {file = "protobuf-4.23.4-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8547bf44fe8cec3c69e3042f5c4fb3e36eb2a7a013bb0a44c018fc1e427aafbd"}, + {file = "protobuf-4.23.4-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:fee88269a090ada09ca63551bf2f573eb2424035bcf2cb1b121895b01a46594a"}, + {file = "protobuf-4.23.4-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:effeac51ab79332d44fba74660d40ae79985901ac21bca408f8dc335a81aa597"}, + {file = "protobuf-4.23.4-cp37-cp37m-win32.whl", hash = "sha256:c3e0939433c40796ca4cfc0fac08af50b00eb66a40bbbc5dee711998fb0bbc1e"}, + {file = "protobuf-4.23.4-cp37-cp37m-win_amd64.whl", hash = "sha256:9053df6df8e5a76c84339ee4a9f5a2661ceee4a0dab019e8663c50ba324208b0"}, + {file = "protobuf-4.23.4-cp38-cp38-win32.whl", hash = "sha256:e1c915778d8ced71e26fcf43c0866d7499891bca14c4368448a82edc61fdbc70"}, + {file = "protobuf-4.23.4-cp38-cp38-win_amd64.whl", hash = "sha256:351cc90f7d10839c480aeb9b870a211e322bf05f6ab3f55fcb2f51331f80a7d2"}, + {file = "protobuf-4.23.4-cp39-cp39-win32.whl", hash = "sha256:6dd9b9940e3f17077e820b75851126615ee38643c2c5332aa7a359988820c720"}, + {file = "protobuf-4.23.4-cp39-cp39-win_amd64.whl", hash = "sha256:0a5759f5696895de8cc913f084e27fd4125e8fb0914bb729a17816a33819f474"}, + {file = "protobuf-4.23.4-py3-none-any.whl", hash = "sha256:e9d0be5bf34b275b9f87ba7407796556abeeba635455d036c7351f7c183ef8ff"}, + {file = "protobuf-4.23.4.tar.gz", hash = "sha256:ccd9430c0719dce806b93f89c91de7977304729e55377f872a92465d548329a9"}, +] [[package]] name = "psutil" @@ -1702,6 +2685,22 @@ description = "Cross-platform lib for process and system monitoring in Python." category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "psutil-5.9.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:be8929ce4313f9f8146caad4272f6abb8bf99fc6cf59344a3167ecd74f4f203f"}, + {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ab8ed1a1d77c95453db1ae00a3f9c50227ebd955437bcf2a574ba8adbf6a74d5"}, + {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:4aef137f3345082a3d3232187aeb4ac4ef959ba3d7c10c33dd73763fbc063da4"}, + {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ea8518d152174e1249c4f2a1c89e3e6065941df2fa13a1ab45327716a23c2b48"}, + {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:acf2aef9391710afded549ff602b5887d7a2349831ae4c26be7c807c0a39fac4"}, + {file = "psutil-5.9.5-cp27-none-win32.whl", hash = "sha256:5b9b8cb93f507e8dbaf22af6a2fd0ccbe8244bf30b1baad6b3954e935157ae3f"}, + {file = "psutil-5.9.5-cp27-none-win_amd64.whl", hash = "sha256:8c5f7c5a052d1d567db4ddd231a9d27a74e8e4a9c3f44b1032762bd7b9fdcd42"}, + {file = "psutil-5.9.5-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:3c6f686f4225553615612f6d9bc21f1c0e305f75d7d8454f9b46e901778e7217"}, + {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a7dd9997128a0d928ed4fb2c2d57e5102bb6089027939f3b722f3a210f9a8da"}, + {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89518112647f1276b03ca97b65cc7f64ca587b1eb0278383017c2a0dcc26cbe4"}, + {file = "psutil-5.9.5-cp36-abi3-win32.whl", hash = "sha256:104a5cc0e31baa2bcf67900be36acde157756b9c44017b86b2c049f11957887d"}, + {file = "psutil-5.9.5-cp36-abi3-win_amd64.whl", hash = "sha256:b258c0c1c9d145a1d5ceffab1134441c4c5113b2417fafff7315a917a026c3c9"}, + {file = "psutil-5.9.5-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:c607bb3b57dc779d55e1554846352b4e358c10fff3abf3514a7a6601beebdb30"}, + {file = "psutil-5.9.5.tar.gz", hash = "sha256:5410638e4df39c54d957fc51ce03048acd8e6d60abc0f5107af51e5fb566eb3c"}, +] [package.extras] test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] @@ -1713,6 +2712,10 @@ description = "Run a subprocess in a pseudo terminal" category = "main" optional = false python-versions = "*" +files = [ + {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, + {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, +] [[package]] name = "pure-eval" @@ -1721,6 +2724,10 @@ description = "Safely evaluate AST nodes without side effects" category = "main" optional = false python-versions = "*" +files = [ + {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, + {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, +] [package.extras] tests = ["pytest"] @@ -1732,6 +2739,10 @@ description = "library with cross-python path, ini-parsing, io, code, log facili category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] [[package]] name = "pyasn1" @@ -1740,6 +2751,10 @@ description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "pyasn1-0.5.0-py2.py3-none-any.whl", hash = "sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57"}, + {file = "pyasn1-0.5.0.tar.gz", hash = "sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde"}, +] [[package]] name = "pyasn1-modules" @@ -1748,6 +2763,10 @@ description = "A collection of ASN.1-based protocols modules" category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"}, + {file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"}, +] [package.dependencies] pyasn1 = ">=0.4.6,<0.6.0" @@ -1759,6 +2778,10 @@ description = "Python style guide checker" category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "pycodestyle-2.10.0-py2.py3-none-any.whl", hash = "sha256:8a4eaf0d0495c7395bdab3589ac2db602797d76207242c17d470186815706610"}, + {file = "pycodestyle-2.10.0.tar.gz", hash = "sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053"}, +] [[package]] name = "pycparser" @@ -1767,6 +2790,10 @@ description = "C parser in Python" category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] [[package]] name = "pydantic" @@ -1775,6 +2802,44 @@ description = "Data validation and settings management using python type hints" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ff44c5e89315b15ff1f7fdaf9853770b810936d6b01a7bcecaa227d2f8fe444f"}, + {file = "pydantic-1.10.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a6c098d4ab5e2d5b3984d3cb2527e2d6099d3de85630c8934efcfdc348a9760e"}, + {file = "pydantic-1.10.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16928fdc9cb273c6af00d9d5045434c39afba5f42325fb990add2c241402d151"}, + {file = "pydantic-1.10.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0588788a9a85f3e5e9ebca14211a496409cb3deca5b6971ff37c556d581854e7"}, + {file = "pydantic-1.10.11-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e9baf78b31da2dc3d3f346ef18e58ec5f12f5aaa17ac517e2ffd026a92a87588"}, + {file = "pydantic-1.10.11-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:373c0840f5c2b5b1ccadd9286782852b901055998136287828731868027a724f"}, + {file = "pydantic-1.10.11-cp310-cp310-win_amd64.whl", hash = "sha256:c3339a46bbe6013ef7bdd2844679bfe500347ac5742cd4019a88312aa58a9847"}, + {file = "pydantic-1.10.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:08a6c32e1c3809fbc49debb96bf833164f3438b3696abf0fbeceb417d123e6eb"}, + {file = "pydantic-1.10.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a451ccab49971af043ec4e0d207cbc8cbe53dbf148ef9f19599024076fe9c25b"}, + {file = "pydantic-1.10.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b02d24f7b2b365fed586ed73582c20f353a4c50e4be9ba2c57ab96f8091ddae"}, + {file = "pydantic-1.10.11-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f34739a89260dfa420aa3cbd069fbcc794b25bbe5c0a214f8fb29e363484b66"}, + {file = "pydantic-1.10.11-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e297897eb4bebde985f72a46a7552a7556a3dd11e7f76acda0c1093e3dbcf216"}, + {file = "pydantic-1.10.11-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d185819a7a059550ecb85d5134e7d40f2565f3dd94cfd870132c5f91a89cf58c"}, + {file = "pydantic-1.10.11-cp311-cp311-win_amd64.whl", hash = "sha256:4400015f15c9b464c9db2d5d951b6a780102cfa5870f2c036d37c23b56f7fc1b"}, + {file = "pydantic-1.10.11-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2417de68290434461a266271fc57274a138510dca19982336639484c73a07af6"}, + {file = "pydantic-1.10.11-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:331c031ba1554b974c98679bd0780d89670d6fd6f53f5d70b10bdc9addee1713"}, + {file = "pydantic-1.10.11-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8268a735a14c308923e8958363e3a3404f6834bb98c11f5ab43251a4e410170c"}, + {file = "pydantic-1.10.11-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:44e51ba599c3ef227e168424e220cd3e544288c57829520dc90ea9cb190c3248"}, + {file = "pydantic-1.10.11-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d7781f1d13b19700b7949c5a639c764a077cbbdd4322ed505b449d3ca8edcb36"}, + {file = "pydantic-1.10.11-cp37-cp37m-win_amd64.whl", hash = "sha256:7522a7666157aa22b812ce14c827574ddccc94f361237ca6ea8bb0d5c38f1629"}, + {file = "pydantic-1.10.11-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc64eab9b19cd794a380179ac0e6752335e9555d214cfcb755820333c0784cb3"}, + {file = "pydantic-1.10.11-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8dc77064471780262b6a68fe67e013298d130414d5aaf9b562c33987dbd2cf4f"}, + {file = "pydantic-1.10.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe429898f2c9dd209bd0632a606bddc06f8bce081bbd03d1c775a45886e2c1cb"}, + {file = "pydantic-1.10.11-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:192c608ad002a748e4a0bed2ddbcd98f9b56df50a7c24d9a931a8c5dd053bd3d"}, + {file = "pydantic-1.10.11-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ef55392ec4bb5721f4ded1096241e4b7151ba6d50a50a80a2526c854f42e6a2f"}, + {file = "pydantic-1.10.11-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:41e0bb6efe86281623abbeeb0be64eab740c865388ee934cd3e6a358784aca6e"}, + {file = "pydantic-1.10.11-cp38-cp38-win_amd64.whl", hash = "sha256:265a60da42f9f27e0b1014eab8acd3e53bd0bad5c5b4884e98a55f8f596b2c19"}, + {file = "pydantic-1.10.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:469adf96c8e2c2bbfa655fc7735a2a82f4c543d9fee97bd113a7fb509bf5e622"}, + {file = "pydantic-1.10.11-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e6cbfbd010b14c8a905a7b10f9fe090068d1744d46f9e0c021db28daeb8b6de1"}, + {file = "pydantic-1.10.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abade85268cc92dff86d6effcd917893130f0ff516f3d637f50dadc22ae93999"}, + {file = "pydantic-1.10.11-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9738b0f2e6c70f44ee0de53f2089d6002b10c33264abee07bdb5c7f03038303"}, + {file = "pydantic-1.10.11-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:787cf23e5a0cde753f2eabac1b2e73ae3844eb873fd1f5bdbff3048d8dbb7604"}, + {file = "pydantic-1.10.11-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:174899023337b9fc685ac8adaa7b047050616136ccd30e9070627c1aaab53a13"}, + {file = "pydantic-1.10.11-cp39-cp39-win_amd64.whl", hash = "sha256:1954f8778489a04b245a1e7b8b22a9d3ea8ef49337285693cf6959e4b757535e"}, + {file = "pydantic-1.10.11-py3-none-any.whl", hash = "sha256:008c5e266c8aada206d0627a011504e14268a62091450210eda7c07fabe6963e"}, + {file = "pydantic-1.10.11.tar.gz", hash = "sha256:f66d479cf7eb331372c470614be6511eae96f1f120344c25f3f9bb59fb1b5528"}, +] [package.dependencies] typing-extensions = ">=4.2.0" @@ -1790,6 +2855,10 @@ description = "passive checker of Python programs" category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "pyflakes-3.0.1-py2.py3-none-any.whl", hash = "sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf"}, + {file = "pyflakes-3.0.1.tar.gz", hash = "sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd"}, +] [[package]] name = "pygments" @@ -1798,6 +2867,10 @@ description = "Pygments is a syntax highlighting package written in Python." category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "Pygments-2.15.1-py3-none-any.whl", hash = "sha256:db2db3deb4b4179f399a09054b023b6a586b76499d36965813c71aa8ed7b5fd1"}, + {file = "Pygments-2.15.1.tar.gz", hash = "sha256:8ace4d3c1dd481894b2005f560ead0f9f19ee64fe983366be1a21e171d12775c"}, +] [package.extras] plugins = ["importlib-metadata"] @@ -1809,6 +2882,10 @@ description = "Google Spreadsheets Python API v4" category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pygsheets-2.0.6-py3-none-any.whl", hash = "sha256:3338c2eb8990fdee9f463b42a370ec0870c118d607d775471a6dfb8b08f6cd87"}, + {file = "pygsheets-2.0.6.tar.gz", hash = "sha256:bff46c812e99f9b8b81a09b456581365281c797620ec08530b0d0e48fa9299e2"}, +] [package.dependencies] google-api-python-client = ">=2.50.0" @@ -1824,6 +2901,10 @@ description = "python code static checker" category = "dev" optional = false python-versions = ">=3.7.2" +files = [ + {file = "pylint-2.17.4-py3-none-any.whl", hash = "sha256:7a1145fb08c251bdb5cca11739722ce64a63db479283d10ce718b2460e54123c"}, + {file = "pylint-2.17.4.tar.gz", hash = "sha256:5dcf1d9e19f41f38e4e85d10f511e5b9c35e1aa74251bf95cdd8cb23584e2db1"}, +] [package.dependencies] astroid = ">=2.15.4,<=2.17.0-dev0" @@ -1847,6 +2928,10 @@ description = "Python wrapper module around the OpenSSL library" category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "pyOpenSSL-23.2.0-py3-none-any.whl", hash = "sha256:24f0dc5227396b3e831f4c7f602b950a5e9833d292c8e4a2e06b709292806ae2"}, + {file = "pyOpenSSL-23.2.0.tar.gz", hash = "sha256:276f931f55a452e7dea69c7173e984eb2a4407ce413c918aa34b55f82f9b8bac"}, +] [package.dependencies] cryptography = ">=38.0.0,<40.0.0 || >40.0.0,<40.0.1 || >40.0.1,<42" @@ -1862,6 +2947,10 @@ description = "pyparsing module - Classes and methods to define and execute pars category = "main" optional = false python-versions = ">=3.6.8" +files = [ + {file = "pyparsing-3.1.0-py3-none-any.whl", hash = "sha256:d554a96d1a7d3ddaf7183104485bc19fd80543ad6ac5bdb6426719d766fb06c1"}, + {file = "pyparsing-3.1.0.tar.gz", hash = "sha256:edb662d6fe322d6e990b1594b5feaeadf806803359e3d4d42f11e295e588f0ea"}, +] [package.extras] diagrams = ["jinja2", "railroad-diagrams"] @@ -1873,6 +2962,10 @@ description = "pytest: simple powerful testing with Python" category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"}, + {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"}, +] [package.dependencies] colorama = {version = "*", markers = "sys_platform == \"win32\""} @@ -1892,6 +2985,10 @@ description = "Pytest plugin for measuring coverage." category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, +] [package.dependencies] coverage = {version = ">=5.2.1", extras = ["toml"]} @@ -1907,6 +3004,10 @@ description = "Thin-wrapper around the mock package for easier use with pytest" category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "pytest-mock-3.11.1.tar.gz", hash = "sha256:7f6b125602ac6d743e523ae0bfa71e1a697a2f5534064528c6ff84c2f7c2fc7f"}, + {file = "pytest_mock-3.11.1-py3-none-any.whl", hash = "sha256:21c279fff83d70763b05f8874cc9cfb3fcacd6d354247a976f9529d19f9acf39"}, +] [package.dependencies] pytest = ">=5.0" @@ -1921,6 +3022,10 @@ description = "Extensions to the standard Python datetime module" category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] [package.dependencies] six = ">=1.5" @@ -1932,6 +3037,10 @@ description = "Read key-value pairs from a .env file and set them as environment category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "python-dotenv-0.21.1.tar.gz", hash = "sha256:1c93de8f636cde3ce377292818d0e440b6e45a82f215c3744979151fa8151c49"}, + {file = "python_dotenv-0.21.1-py3-none-any.whl", hash = "sha256:41e12e0318bebc859fcc4d97d4db8d20ad21721a6aa5047dd59f090391cb549a"}, +] [package.extras] cli = ["click (>=5.0)"] @@ -1943,6 +3052,10 @@ description = "A python library adding a json log formatter" category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "python-json-logger-2.0.7.tar.gz", hash = "sha256:23e7ec02d34237c5aa1e29a070193a4ea87583bb4e7f8fd06d3de8264c4b2e1c"}, + {file = "python_json_logger-2.0.7-py3-none-any.whl", hash = "sha256:f380b826a991ebbe3de4d897aeec42760035ac760345e57b812938dc8b35e2bd"}, +] [[package]] name = "pytz" @@ -1951,6 +3064,10 @@ description = "World timezone definitions, modern and historical" category = "main" optional = false python-versions = "*" +files = [ + {file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"}, + {file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"}, +] [[package]] name = "pywin32" @@ -1959,22 +3076,50 @@ description = "Python for Window Extensions" category = "main" optional = false python-versions = "*" - -[[package]] -name = "pywin32-ctypes" -version = "0.2.2" -description = "A (partial) reimplementation of pywin32 using ctypes/cffi" -category = "main" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "pywinpty" -version = "2.0.10" -description = "Pseudo terminal support for Windows from Python." +files = [ + {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, + {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, + {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, + {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, + {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, + {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, + {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, + {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, + {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, + {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, + {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, + {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, + {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, + {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, +] + +[[package]] +name = "pywin32-ctypes" +version = "0.2.2" +description = "A (partial) reimplementation of pywin32 using ctypes/cffi" +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pywin32-ctypes-0.2.2.tar.gz", hash = "sha256:3426e063bdd5fd4df74a14fa3cf80a0b42845a87e1d1e81f6549f9daec593a60"}, + {file = "pywin32_ctypes-0.2.2-py3-none-any.whl", hash = "sha256:bf490a1a709baf35d688fe0ecf980ed4de11d2b3e37b51e5442587a75d9957e7"}, +] + +[[package]] +name = "pywinpty" +version = "2.0.10" +description = "Pseudo terminal support for Windows from Python." category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "pywinpty-2.0.10-cp310-none-win_amd64.whl", hash = "sha256:4c7d06ad10f6e92bc850a467f26d98f4f30e73d2fe5926536308c6ae0566bc16"}, + {file = "pywinpty-2.0.10-cp311-none-win_amd64.whl", hash = "sha256:7ffbd66310b83e42028fc9df7746118978d94fba8c1ebf15a7c1275fdd80b28a"}, + {file = "pywinpty-2.0.10-cp37-none-win_amd64.whl", hash = "sha256:38cb924f2778b5751ef91a75febd114776b3af0ae411bc667be45dd84fc881d3"}, + {file = "pywinpty-2.0.10-cp38-none-win_amd64.whl", hash = "sha256:902d79444b29ad1833b8d5c3c9aabdfd428f4f068504430df18074007c8c0de8"}, + {file = "pywinpty-2.0.10-cp39-none-win_amd64.whl", hash = "sha256:3c46aef80dd50979aff93de199e4a00a8ee033ba7a03cadf0a91fed45f0c39d7"}, + {file = "pywinpty-2.0.10.tar.gz", hash = "sha256:cdbb5694cf8c7242c2ecfaca35c545d31fa5d5814c3d67a4e628f803f680ebea"}, +] [[package]] name = "pyyaml" @@ -1983,6 +3128,48 @@ description = "YAML parser and emitter for Python" category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, + {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, + {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, + {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, + {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"}, + {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"}, + {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"}, + {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"}, + {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, + {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, + {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, + {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, + {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, + {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, + {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, + {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, + {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, + {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, + {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, + {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, +] [[package]] name = "pyzmq" @@ -1991,6 +3178,85 @@ description = "Python bindings for 0MQ" category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "pyzmq-25.1.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:1a6169e69034eaa06823da6a93a7739ff38716142b3596c180363dee729d713d"}, + {file = "pyzmq-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:19d0383b1f18411d137d891cab567de9afa609b214de68b86e20173dc624c101"}, + {file = "pyzmq-25.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1e931d9a92f628858a50f5bdffdfcf839aebe388b82f9d2ccd5d22a38a789dc"}, + {file = "pyzmq-25.1.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:97d984b1b2f574bc1bb58296d3c0b64b10e95e7026f8716ed6c0b86d4679843f"}, + {file = "pyzmq-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:154bddda2a351161474b36dba03bf1463377ec226a13458725183e508840df89"}, + {file = "pyzmq-25.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:cb6d161ae94fb35bb518b74bb06b7293299c15ba3bc099dccd6a5b7ae589aee3"}, + {file = "pyzmq-25.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:90146ab578931e0e2826ee39d0c948d0ea72734378f1898939d18bc9c823fcf9"}, + {file = "pyzmq-25.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:831ba20b660b39e39e5ac8603e8193f8fce1ee03a42c84ade89c36a251449d80"}, + {file = "pyzmq-25.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3a522510e3434e12aff80187144c6df556bb06fe6b9d01b2ecfbd2b5bfa5c60c"}, + {file = "pyzmq-25.1.0-cp310-cp310-win32.whl", hash = "sha256:be24a5867b8e3b9dd5c241de359a9a5217698ff616ac2daa47713ba2ebe30ad1"}, + {file = "pyzmq-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:5693dcc4f163481cf79e98cf2d7995c60e43809e325b77a7748d8024b1b7bcba"}, + {file = "pyzmq-25.1.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:13bbe36da3f8aaf2b7ec12696253c0bf6ffe05f4507985a8844a1081db6ec22d"}, + {file = "pyzmq-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:69511d604368f3dc58d4be1b0bad99b61ee92b44afe1cd9b7bd8c5e34ea8248a"}, + {file = "pyzmq-25.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a983c8694667fd76d793ada77fd36c8317e76aa66eec75be2653cef2ea72883"}, + {file = "pyzmq-25.1.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:332616f95eb400492103ab9d542b69d5f0ff628b23129a4bc0a2fd48da6e4e0b"}, + {file = "pyzmq-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58416db767787aedbfd57116714aad6c9ce57215ffa1c3758a52403f7c68cff5"}, + {file = "pyzmq-25.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:cad9545f5801a125f162d09ec9b724b7ad9b6440151b89645241d0120e119dcc"}, + {file = "pyzmq-25.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d6128d431b8dfa888bf51c22a04d48bcb3d64431caf02b3cb943269f17fd2994"}, + {file = "pyzmq-25.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:2b15247c49d8cbea695b321ae5478d47cffd496a2ec5ef47131a9e79ddd7e46c"}, + {file = "pyzmq-25.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:442d3efc77ca4d35bee3547a8e08e8d4bb88dadb54a8377014938ba98d2e074a"}, + {file = "pyzmq-25.1.0-cp311-cp311-win32.whl", hash = "sha256:65346f507a815a731092421d0d7d60ed551a80d9b75e8b684307d435a5597425"}, + {file = "pyzmq-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:8b45d722046fea5a5694cba5d86f21f78f0052b40a4bbbbf60128ac55bfcc7b6"}, + {file = "pyzmq-25.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f45808eda8b1d71308c5416ef3abe958f033fdbb356984fabbfc7887bed76b3f"}, + {file = "pyzmq-25.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b697774ea8273e3c0460cf0bba16cd85ca6c46dfe8b303211816d68c492e132"}, + {file = "pyzmq-25.1.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b324fa769577fc2c8f5efcd429cef5acbc17d63fe15ed16d6dcbac2c5eb00849"}, + {file = "pyzmq-25.1.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:5873d6a60b778848ce23b6c0ac26c39e48969823882f607516b91fb323ce80e5"}, + {file = "pyzmq-25.1.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:f0d9e7ba6a815a12c8575ba7887da4b72483e4cfc57179af10c9b937f3f9308f"}, + {file = "pyzmq-25.1.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:414b8beec76521358b49170db7b9967d6974bdfc3297f47f7d23edec37329b00"}, + {file = "pyzmq-25.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:01f06f33e12497dca86353c354461f75275a5ad9eaea181ac0dc1662da8074fa"}, + {file = "pyzmq-25.1.0-cp36-cp36m-win32.whl", hash = "sha256:b5a07c4f29bf7cb0164664ef87e4aa25435dcc1f818d29842118b0ac1eb8e2b5"}, + {file = "pyzmq-25.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:968b0c737797c1809ec602e082cb63e9824ff2329275336bb88bd71591e94a90"}, + {file = "pyzmq-25.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:47b915ba666c51391836d7ed9a745926b22c434efa76c119f77bcffa64d2c50c"}, + {file = "pyzmq-25.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5af31493663cf76dd36b00dafbc839e83bbca8a0662931e11816d75f36155897"}, + {file = "pyzmq-25.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5489738a692bc7ee9a0a7765979c8a572520d616d12d949eaffc6e061b82b4d1"}, + {file = "pyzmq-25.1.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1fc56a0221bdf67cfa94ef2d6ce5513a3d209c3dfd21fed4d4e87eca1822e3a3"}, + {file = "pyzmq-25.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:75217e83faea9edbc29516fc90c817bc40c6b21a5771ecb53e868e45594826b0"}, + {file = "pyzmq-25.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3830be8826639d801de9053cf86350ed6742c4321ba4236e4b5568528d7bfed7"}, + {file = "pyzmq-25.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3575699d7fd7c9b2108bc1c6128641a9a825a58577775ada26c02eb29e09c517"}, + {file = "pyzmq-25.1.0-cp37-cp37m-win32.whl", hash = "sha256:95bd3a998d8c68b76679f6b18f520904af5204f089beebb7b0301d97704634dd"}, + {file = "pyzmq-25.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:dbc466744a2db4b7ca05589f21ae1a35066afada2f803f92369f5877c100ef62"}, + {file = "pyzmq-25.1.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:3bed53f7218490c68f0e82a29c92335daa9606216e51c64f37b48eb78f1281f4"}, + {file = "pyzmq-25.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eb52e826d16c09ef87132c6e360e1879c984f19a4f62d8a935345deac43f3c12"}, + {file = "pyzmq-25.1.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ddbef8b53cd16467fdbfa92a712eae46dd066aa19780681a2ce266e88fbc7165"}, + {file = "pyzmq-25.1.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9301cf1d7fc1ddf668d0abbe3e227fc9ab15bc036a31c247276012abb921b5ff"}, + {file = "pyzmq-25.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7e23a8c3b6c06de40bdb9e06288180d630b562db8ac199e8cc535af81f90e64b"}, + {file = "pyzmq-25.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4a82faae00d1eed4809c2f18b37f15ce39a10a1c58fe48b60ad02875d6e13d80"}, + {file = "pyzmq-25.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c8398a1b1951aaa330269c35335ae69744be166e67e0ebd9869bdc09426f3871"}, + {file = "pyzmq-25.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d40682ac60b2a613d36d8d3a0cd14fbdf8e7e0618fbb40aa9fa7b796c9081584"}, + {file = "pyzmq-25.1.0-cp38-cp38-win32.whl", hash = "sha256:33d5c8391a34d56224bccf74f458d82fc6e24b3213fc68165c98b708c7a69325"}, + {file = "pyzmq-25.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:c66b7ff2527e18554030319b1376d81560ca0742c6e0b17ff1ee96624a5f1afd"}, + {file = "pyzmq-25.1.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:af56229ea6527a849ac9fb154a059d7e32e77a8cba27e3e62a1e38d8808cb1a5"}, + {file = "pyzmq-25.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bdca18b94c404af6ae5533cd1bc310c4931f7ac97c148bbfd2cd4bdd62b96253"}, + {file = "pyzmq-25.1.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0b6b42f7055bbc562f63f3df3b63e3dd1ebe9727ff0f124c3aa7bcea7b3a00f9"}, + {file = "pyzmq-25.1.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4c2fc7aad520a97d64ffc98190fce6b64152bde57a10c704b337082679e74f67"}, + {file = "pyzmq-25.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be86a26415a8b6af02cd8d782e3a9ae3872140a057f1cadf0133de685185c02b"}, + {file = "pyzmq-25.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:851fb2fe14036cfc1960d806628b80276af5424db09fe5c91c726890c8e6d943"}, + {file = "pyzmq-25.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2a21fec5c3cea45421a19ccbe6250c82f97af4175bc09de4d6dd78fb0cb4c200"}, + {file = "pyzmq-25.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bad172aba822444b32eae54c2d5ab18cd7dee9814fd5c7ed026603b8cae2d05f"}, + {file = "pyzmq-25.1.0-cp39-cp39-win32.whl", hash = "sha256:4d67609b37204acad3d566bb7391e0ecc25ef8bae22ff72ebe2ad7ffb7847158"}, + {file = "pyzmq-25.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:71c7b5896e40720d30cd77a81e62b433b981005bbff0cb2f739e0f8d059b5d99"}, + {file = "pyzmq-25.1.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4cb27ef9d3bdc0c195b2dc54fcb8720e18b741624686a81942e14c8b67cc61a6"}, + {file = "pyzmq-25.1.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0c4fc2741e0513b5d5a12fe200d6785bbcc621f6f2278893a9ca7bed7f2efb7d"}, + {file = "pyzmq-25.1.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fc34fdd458ff77a2a00e3c86f899911f6f269d393ca5675842a6e92eea565bae"}, + {file = "pyzmq-25.1.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8751f9c1442624da391bbd92bd4b072def6d7702a9390e4479f45c182392ff78"}, + {file = "pyzmq-25.1.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:6581e886aec3135964a302a0f5eb68f964869b9efd1dbafdebceaaf2934f8a68"}, + {file = "pyzmq-25.1.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5482f08d2c3c42b920e8771ae8932fbaa0a67dff925fc476996ddd8155a170f3"}, + {file = "pyzmq-25.1.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5e7fbcafa3ea16d1de1f213c226005fea21ee16ed56134b75b2dede5a2129e62"}, + {file = "pyzmq-25.1.0-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:adecf6d02b1beab8d7c04bc36f22bb0e4c65a35eb0b4750b91693631d4081c70"}, + {file = "pyzmq-25.1.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6d39e42a0aa888122d1beb8ec0d4ddfb6c6b45aecb5ba4013c27e2f28657765"}, + {file = "pyzmq-25.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7018289b402ebf2b2c06992813523de61d4ce17bd514c4339d8f27a6f6809492"}, + {file = "pyzmq-25.1.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9e68ae9864d260b18f311b68d29134d8776d82e7f5d75ce898b40a88df9db30f"}, + {file = "pyzmq-25.1.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e21cc00e4debe8f54c3ed7b9fcca540f46eee12762a9fa56feb8512fd9057161"}, + {file = "pyzmq-25.1.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f666ae327a6899ff560d741681fdcdf4506f990595201ed39b44278c471ad98"}, + {file = "pyzmq-25.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f5efcc29056dfe95e9c9db0dfbb12b62db9c4ad302f812931b6d21dd04a9119"}, + {file = "pyzmq-25.1.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:48e5e59e77c1a83162ab3c163fc01cd2eebc5b34560341a67421b09be0891287"}, + {file = "pyzmq-25.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:108c96ebbd573d929740d66e4c3d1bdf31d5cde003b8dc7811a3c8c5b0fc173b"}, + {file = "pyzmq-25.1.0.tar.gz", hash = "sha256:80c41023465d36280e801564a69cbfce8ae85ff79b080e1913f6e90481fb8957"}, +] [package.dependencies] cffi = {version = "*", markers = "implementation_name == \"pypy\""} @@ -2002,6 +3268,10 @@ description = "RDFLib is a Python library for working with RDF, a simple yet pow category = "main" optional = false python-versions = ">=3.7,<4.0" +files = [ + {file = "rdflib-6.3.2-py3-none-any.whl", hash = "sha256:36b4e74a32aa1e4fa7b8719876fb192f19ecd45ff932ea5ebbd2e417a0247e63"}, + {file = "rdflib-6.3.2.tar.gz", hash = "sha256:72af591ff704f4caacea7ecc0c5a9056b8553e0489dd4f35a9bc52dbd41522e0"}, +] [package.dependencies] isodate = ">=0.6.0,<0.7.0" @@ -2020,6 +3290,10 @@ description = "JSON Referencing + Python" category = "main" optional = false python-versions = ">=3.8" +files = [ + {file = "referencing-0.29.1-py3-none-any.whl", hash = "sha256:d3c8f323ee1480095da44d55917cfb8278d73d6b4d5f677e3e40eb21314ac67f"}, + {file = "referencing-0.29.1.tar.gz", hash = "sha256:90cb53782d550ba28d2166ef3f55731f38397def8832baac5d45235f1995e35e"}, +] [package.dependencies] attrs = ">=22.2.0" @@ -2032,6 +3306,96 @@ description = "Alternative regular expression module, to replace re." category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "regex-2023.6.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:824bf3ac11001849aec3fa1d69abcb67aac3e150a933963fb12bda5151fe1bfd"}, + {file = "regex-2023.6.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:05ed27acdf4465c95826962528f9e8d41dbf9b1aa8531a387dee6ed215a3e9ef"}, + {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b49c764f88a79160fa64f9a7b425620e87c9f46095ef9c9920542ab2495c8bc"}, + {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8e3f1316c2293e5469f8f09dc2d76efb6c3982d3da91ba95061a7e69489a14ef"}, + {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43e1dd9d12df9004246bacb79a0e5886b3b6071b32e41f83b0acbf293f820ee8"}, + {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4959e8bcbfda5146477d21c3a8ad81b185cd252f3d0d6e4724a5ef11c012fb06"}, + {file = "regex-2023.6.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:af4dd387354dc83a3bff67127a124c21116feb0d2ef536805c454721c5d7993d"}, + {file = "regex-2023.6.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2239d95d8e243658b8dbb36b12bd10c33ad6e6933a54d36ff053713f129aa536"}, + {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:890e5a11c97cf0d0c550eb661b937a1e45431ffa79803b942a057c4fb12a2da2"}, + {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a8105e9af3b029f243ab11ad47c19b566482c150c754e4c717900a798806b222"}, + {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:25be746a8ec7bc7b082783216de8e9473803706723b3f6bef34b3d0ed03d57e2"}, + {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:3676f1dd082be28b1266c93f618ee07741b704ab7b68501a173ce7d8d0d0ca18"}, + {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:10cb847aeb1728412c666ab2e2000ba6f174f25b2bdc7292e7dd71b16db07568"}, + {file = "regex-2023.6.3-cp310-cp310-win32.whl", hash = "sha256:dbbbfce33cd98f97f6bffb17801b0576e653f4fdb1d399b2ea89638bc8d08ae1"}, + {file = "regex-2023.6.3-cp310-cp310-win_amd64.whl", hash = "sha256:c5f8037000eb21e4823aa485149f2299eb589f8d1fe4b448036d230c3f4e68e0"}, + {file = "regex-2023.6.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c123f662be8ec5ab4ea72ea300359023a5d1df095b7ead76fedcd8babbedf969"}, + {file = "regex-2023.6.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9edcbad1f8a407e450fbac88d89e04e0b99a08473f666a3f3de0fd292badb6aa"}, + {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcba6dae7de533c876255317c11f3abe4907ba7d9aa15d13e3d9710d4315ec0e"}, + {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29cdd471ebf9e0f2fb3cac165efedc3c58db841d83a518b082077e612d3ee5df"}, + {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:12b74fbbf6cbbf9dbce20eb9b5879469e97aeeaa874145517563cca4029db65c"}, + {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c29ca1bd61b16b67be247be87390ef1d1ef702800f91fbd1991f5c4421ebae8"}, + {file = "regex-2023.6.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77f09bc4b55d4bf7cc5eba785d87001d6757b7c9eec237fe2af57aba1a071d9"}, + {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ea353ecb6ab5f7e7d2f4372b1e779796ebd7b37352d290096978fea83c4dba0c"}, + {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:10590510780b7541969287512d1b43f19f965c2ece6c9b1c00fc367b29d8dce7"}, + {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e2fbd6236aae3b7f9d514312cdb58e6494ee1c76a9948adde6eba33eb1c4264f"}, + {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:6b2675068c8b56f6bfd5a2bda55b8accbb96c02fd563704732fd1c95e2083461"}, + {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:74419d2b50ecb98360cfaa2974da8689cb3b45b9deff0dcf489c0d333bcc1477"}, + {file = "regex-2023.6.3-cp311-cp311-win32.whl", hash = "sha256:fb5ec16523dc573a4b277663a2b5a364e2099902d3944c9419a40ebd56a118f9"}, + {file = "regex-2023.6.3-cp311-cp311-win_amd64.whl", hash = "sha256:09e4a1a6acc39294a36b7338819b10baceb227f7f7dbbea0506d419b5a1dd8af"}, + {file = "regex-2023.6.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0654bca0cdf28a5956c83839162692725159f4cda8d63e0911a2c0dc76166525"}, + {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:463b6a3ceb5ca952e66550a4532cef94c9a0c80dc156c4cc343041951aec1697"}, + {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87b2a5bb5e78ee0ad1de71c664d6eb536dc3947a46a69182a90f4410f5e3f7dd"}, + {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6343c6928282c1f6a9db41f5fd551662310e8774c0e5ebccb767002fcf663ca9"}, + {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6192d5af2ccd2a38877bfef086d35e6659566a335b1492786ff254c168b1693"}, + {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74390d18c75054947e4194019077e243c06fbb62e541d8817a0fa822ea310c14"}, + {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:742e19a90d9bb2f4a6cf2862b8b06dea5e09b96c9f2df1779e53432d7275331f"}, + {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8abbc5d54ea0ee80e37fef009e3cec5dafd722ed3c829126253d3e22f3846f1e"}, + {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:c2b867c17a7a7ae44c43ebbeb1b5ff406b3e8d5b3e14662683e5e66e6cc868d3"}, + {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:d831c2f8ff278179705ca59f7e8524069c1a989e716a1874d6d1aab6119d91d1"}, + {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ee2d1a9a253b1729bb2de27d41f696ae893507c7db224436abe83ee25356f5c1"}, + {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:61474f0b41fe1a80e8dfa70f70ea1e047387b7cd01c85ec88fa44f5d7561d787"}, + {file = "regex-2023.6.3-cp36-cp36m-win32.whl", hash = "sha256:0b71e63226e393b534105fcbdd8740410dc6b0854c2bfa39bbda6b0d40e59a54"}, + {file = "regex-2023.6.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bbb02fd4462f37060122e5acacec78e49c0fbb303c30dd49c7f493cf21fc5b27"}, + {file = "regex-2023.6.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b862c2b9d5ae38a68b92e215b93f98d4c5e9454fa36aae4450f61dd33ff48487"}, + {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:976d7a304b59ede34ca2921305b57356694f9e6879db323fd90a80f865d355a3"}, + {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:83320a09188e0e6c39088355d423aa9d056ad57a0b6c6381b300ec1a04ec3d16"}, + {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9427a399501818a7564f8c90eced1e9e20709ece36be701f394ada99890ea4b3"}, + {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178bbc1b2ec40eaca599d13c092079bf529679bf0371c602edaa555e10b41c3"}, + {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:837328d14cde912af625d5f303ec29f7e28cdab588674897baafaf505341f2fc"}, + {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2d44dc13229905ae96dd2ae2dd7cebf824ee92bc52e8cf03dcead37d926da019"}, + {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d54af539295392611e7efbe94e827311eb8b29668e2b3f4cadcfe6f46df9c777"}, + {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:7117d10690c38a622e54c432dfbbd3cbd92f09401d622902c32f6d377e2300ee"}, + {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bb60b503ec8a6e4e3e03a681072fa3a5adcbfa5479fa2d898ae2b4a8e24c4591"}, + {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:65ba8603753cec91c71de423a943ba506363b0e5c3fdb913ef8f9caa14b2c7e0"}, + {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:271f0bdba3c70b58e6f500b205d10a36fb4b58bd06ac61381b68de66442efddb"}, + {file = "regex-2023.6.3-cp37-cp37m-win32.whl", hash = "sha256:9beb322958aaca059f34975b0df135181f2e5d7a13b84d3e0e45434749cb20f7"}, + {file = "regex-2023.6.3-cp37-cp37m-win_amd64.whl", hash = "sha256:fea75c3710d4f31389eed3c02f62d0b66a9da282521075061ce875eb5300cf23"}, + {file = "regex-2023.6.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8f56fcb7ff7bf7404becdfc60b1e81a6d0561807051fd2f1860b0d0348156a07"}, + {file = "regex-2023.6.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d2da3abc88711bce7557412310dfa50327d5769a31d1c894b58eb256459dc289"}, + {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a99b50300df5add73d307cf66abea093304a07eb017bce94f01e795090dea87c"}, + {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5708089ed5b40a7b2dc561e0c8baa9535b77771b64a8330b684823cfd5116036"}, + {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:687ea9d78a4b1cf82f8479cab23678aff723108df3edeac098e5b2498879f4a7"}, + {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d3850beab9f527f06ccc94b446c864059c57651b3f911fddb8d9d3ec1d1b25d"}, + {file = "regex-2023.6.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8915cc96abeb8983cea1df3c939e3c6e1ac778340c17732eb63bb96247b91d2"}, + {file = "regex-2023.6.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:841d6e0e5663d4c7b4c8099c9997be748677d46cbf43f9f471150e560791f7ff"}, + {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9edce5281f965cf135e19840f4d93d55b3835122aa76ccacfd389e880ba4cf82"}, + {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b956231ebdc45f5b7a2e1f90f66a12be9610ce775fe1b1d50414aac1e9206c06"}, + {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:36efeba71c6539d23c4643be88295ce8c82c88bbd7c65e8a24081d2ca123da3f"}, + {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:cf67ca618b4fd34aee78740bea954d7c69fdda419eb208c2c0c7060bb822d747"}, + {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b4598b1897837067a57b08147a68ac026c1e73b31ef6e36deeeb1fa60b2933c9"}, + {file = "regex-2023.6.3-cp38-cp38-win32.whl", hash = "sha256:f415f802fbcafed5dcc694c13b1292f07fe0befdb94aa8a52905bd115ff41e88"}, + {file = "regex-2023.6.3-cp38-cp38-win_amd64.whl", hash = "sha256:d4f03bb71d482f979bda92e1427f3ec9b220e62a7dd337af0aa6b47bf4498f72"}, + {file = "regex-2023.6.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ccf91346b7bd20c790310c4147eee6ed495a54ddb6737162a36ce9dbef3e4751"}, + {file = "regex-2023.6.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b28f5024a3a041009eb4c333863d7894d191215b39576535c6734cd88b0fcb68"}, + {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0bb18053dfcfed432cc3ac632b5e5e5c5b7e55fb3f8090e867bfd9b054dbcbf"}, + {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a5bfb3004f2144a084a16ce19ca56b8ac46e6fd0651f54269fc9e230edb5e4a"}, + {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c6b48d0fa50d8f4df3daf451be7f9689c2bde1a52b1225c5926e3f54b6a9ed1"}, + {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:051da80e6eeb6e239e394ae60704d2b566aa6a7aed6f2890a7967307267a5dc6"}, + {file = "regex-2023.6.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4c3b7fa4cdaa69268748665a1a6ff70c014d39bb69c50fda64b396c9116cf77"}, + {file = "regex-2023.6.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:457b6cce21bee41ac292d6753d5e94dcbc5c9e3e3a834da285b0bde7aa4a11e9"}, + {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:aad51907d74fc183033ad796dd4c2e080d1adcc4fd3c0fd4fd499f30c03011cd"}, + {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0385e73da22363778ef2324950e08b689abdf0b108a7d8decb403ad7f5191938"}, + {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c6a57b742133830eec44d9b2290daf5cbe0a2f1d6acee1b3c7b1c7b2f3606df7"}, + {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3e5219bf9e75993d73ab3d25985c857c77e614525fac9ae02b1bebd92f7cecac"}, + {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e5087a3c59eef624a4591ef9eaa6e9a8d8a94c779dade95d27c0bc24650261cd"}, + {file = "regex-2023.6.3-cp39-cp39-win32.whl", hash = "sha256:20326216cc2afe69b6e98528160b225d72f85ab080cbdf0b11528cbbaba2248f"}, + {file = "regex-2023.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:bdff5eab10e59cf26bc479f565e25ed71a7d041d1ded04ccf9aee1d9f208487a"}, + {file = "regex-2023.6.3.tar.gz", hash = "sha256:72d1a25bf36d2050ceb35b517afe13864865268dfb45910e2e17a84be6cbfeb0"}, +] [[package]] name = "requests" @@ -2040,6 +3404,10 @@ description = "Python HTTP for Humans." category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] [package.dependencies] certifi = ">=2017.4.17" @@ -2058,7 +3426,11 @@ description = "OAuthlib authentication support for Requests." category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - +files = [ + {file = "requests-oauthlib-1.3.1.tar.gz", hash = "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"}, + {file = "requests_oauthlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5"}, +] + [package.dependencies] oauthlib = ">=3.0.0" requests = ">=2.0.0" @@ -2073,6 +3445,10 @@ description = "A pure python RFC3339 validator" category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa"}, + {file = "rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b"}, +] [package.dependencies] six = "*" @@ -2084,6 +3460,10 @@ description = "Pure python rfc3986 validator" category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "rfc3986_validator-0.1.1-py2.py3-none-any.whl", hash = "sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9"}, + {file = "rfc3986_validator-0.1.1.tar.gz", hash = "sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055"}, +] [[package]] name = "rpds-py" @@ -2092,6 +3472,105 @@ description = "Python bindings to Rust's persistent data structures (rpds)" category = "main" optional = false python-versions = ">=3.8" +files = [ + {file = "rpds_py-0.8.10-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:93d06cccae15b3836247319eee7b6f1fdcd6c10dabb4e6d350d27bd0bdca2711"}, + {file = "rpds_py-0.8.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3816a890a6a9e9f1de250afa12ca71c9a7a62f2b715a29af6aaee3aea112c181"}, + {file = "rpds_py-0.8.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7c6304b894546b5a6bdc0fe15761fa53fe87d28527a7142dae8de3c663853e1"}, + {file = "rpds_py-0.8.10-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ad3bfb44c8840fb4be719dc58e229f435e227fbfbe133dc33f34981ff622a8f8"}, + {file = "rpds_py-0.8.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:14f1c356712f66653b777ecd8819804781b23dbbac4eade4366b94944c9e78ad"}, + {file = "rpds_py-0.8.10-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:82bb361cae4d0a627006dadd69dc2f36b7ad5dc1367af9d02e296ec565248b5b"}, + {file = "rpds_py-0.8.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2e3c4f2a8e3da47f850d7ea0d7d56720f0f091d66add889056098c4b2fd576c"}, + {file = "rpds_py-0.8.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:15a90d0ac11b4499171067ae40a220d1ca3cb685ec0acc356d8f3800e07e4cb8"}, + {file = "rpds_py-0.8.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:70bb9c8004b97b4ef7ae56a2aa56dfaa74734a0987c78e7e85f00004ab9bf2d0"}, + {file = "rpds_py-0.8.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d64f9f88d5203274a002b54442cafc9c7a1abff2a238f3e767b70aadf919b451"}, + {file = "rpds_py-0.8.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ccbbd276642788c4376fbe8d4e6c50f0fb4972ce09ecb051509062915891cbf0"}, + {file = "rpds_py-0.8.10-cp310-none-win32.whl", hash = "sha256:fafc0049add8043ad07ab5382ee80d80ed7e3699847f26c9a5cf4d3714d96a84"}, + {file = "rpds_py-0.8.10-cp310-none-win_amd64.whl", hash = "sha256:915031002c86a5add7c6fd4beb601b2415e8a1c956590a5f91d825858e92fe6e"}, + {file = "rpds_py-0.8.10-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:84eb541a44f7a18f07a6bfc48b95240739e93defe1fdfb4f2a295f37837945d7"}, + {file = "rpds_py-0.8.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f59996d0550894affaad8743e97b9b9c98f638b221fac12909210ec3d9294786"}, + {file = "rpds_py-0.8.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9adb5664b78fcfcd830000416c8cc69853ef43cb084d645b3f1f0296edd9bae"}, + {file = "rpds_py-0.8.10-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f96f3f98fbff7af29e9edf9a6584f3c1382e7788783d07ba3721790625caa43e"}, + {file = "rpds_py-0.8.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:376b8de737401050bd12810003d207e824380be58810c031f10ec563ff6aef3d"}, + {file = "rpds_py-0.8.10-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d1c2bc319428d50b3e0fa6b673ab8cc7fa2755a92898db3a594cbc4eeb6d1f7"}, + {file = "rpds_py-0.8.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73a1e48430f418f0ac3dfd87860e4cc0d33ad6c0f589099a298cb53724db1169"}, + {file = "rpds_py-0.8.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:134ec8f14ca7dbc6d9ae34dac632cdd60939fe3734b5d287a69683c037c51acb"}, + {file = "rpds_py-0.8.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4b519bac7c09444dd85280fd60f28c6dde4389c88dddf4279ba9b630aca3bbbe"}, + {file = "rpds_py-0.8.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9cd57981d9fab04fc74438d82460f057a2419974d69a96b06a440822d693b3c0"}, + {file = "rpds_py-0.8.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:69d089c026f6a8b9d64a06ff67dc3be196707b699d7f6ca930c25f00cf5e30d8"}, + {file = "rpds_py-0.8.10-cp311-none-win32.whl", hash = "sha256:220bdcad2d2936f674650d304e20ac480a3ce88a40fe56cd084b5780f1d104d9"}, + {file = "rpds_py-0.8.10-cp311-none-win_amd64.whl", hash = "sha256:6c6a0225b8501d881b32ebf3f5807a08ad3685b5eb5f0a6bfffd3a6e039b2055"}, + {file = "rpds_py-0.8.10-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:e3d0cd3dff0e7638a7b5390f3a53057c4e347f4ef122ee84ed93fc2fb7ea4aa2"}, + {file = "rpds_py-0.8.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d77dff3a5aa5eedcc3da0ebd10ff8e4969bc9541aa3333a8d41715b429e99f47"}, + {file = "rpds_py-0.8.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41c89a366eae49ad9e65ed443a8f94aee762931a1e3723749d72aeac80f5ef2f"}, + {file = "rpds_py-0.8.10-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3793c21494bad1373da517001d0849eea322e9a049a0e4789e50d8d1329df8e7"}, + {file = "rpds_py-0.8.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:805a5f3f05d186c5d50de2e26f765ba7896d0cc1ac5b14ffc36fae36df5d2f10"}, + {file = "rpds_py-0.8.10-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b01b39ad5411563031ea3977bbbc7324d82b088e802339e6296f082f78f6115c"}, + {file = "rpds_py-0.8.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3f1e860be21f3e83011116a65e7310486300e08d9a3028e73e8d13bb6c77292"}, + {file = "rpds_py-0.8.10-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a13c8e56c46474cd5958d525ce6a9996727a83d9335684e41f5192c83deb6c58"}, + {file = "rpds_py-0.8.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:93d99f957a300d7a4ced41615c45aeb0343bb8f067c42b770b505de67a132346"}, + {file = "rpds_py-0.8.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:148b0b38d719c0760e31ce9285a9872972bdd7774969a4154f40c980e5beaca7"}, + {file = "rpds_py-0.8.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3cc5e5b5514796f45f03a568981971b12a3570f3de2e76114f7dc18d4b60a3c4"}, + {file = "rpds_py-0.8.10-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:e8e24b210a4deb5a7744971f8f77393005bae7f873568e37dfd9effe808be7f7"}, + {file = "rpds_py-0.8.10-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b41941583adce4242af003d2a8337b066ba6148ca435f295f31ac6d9e4ea2722"}, + {file = "rpds_py-0.8.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c490204e16bca4f835dba8467869fe7295cdeaa096e4c5a7af97f3454a97991"}, + {file = "rpds_py-0.8.10-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ee45cd1d84beed6cbebc839fd85c2e70a3a1325c8cfd16b62c96e2ffb565eca"}, + {file = "rpds_py-0.8.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a8ca409f1252e1220bf09c57290b76cae2f14723746215a1e0506472ebd7bdf"}, + {file = "rpds_py-0.8.10-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96b293c0498c70162effb13100624c5863797d99df75f2f647438bd10cbf73e4"}, + {file = "rpds_py-0.8.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4627520a02fccbd324b33c7a83e5d7906ec746e1083a9ac93c41ac7d15548c7"}, + {file = "rpds_py-0.8.10-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e39d7ab0c18ac99955b36cd19f43926450baba21e3250f053e0704d6ffd76873"}, + {file = "rpds_py-0.8.10-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ba9f1d1ebe4b63801977cec7401f2d41e888128ae40b5441270d43140efcad52"}, + {file = "rpds_py-0.8.10-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:802f42200d8caf7f25bbb2a6464cbd83e69d600151b7e3b49f49a47fa56b0a38"}, + {file = "rpds_py-0.8.10-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:d19db6ba816e7f59fc806c690918da80a7d186f00247048cd833acdab9b4847b"}, + {file = "rpds_py-0.8.10-cp38-none-win32.whl", hash = "sha256:7947e6e2c2ad68b1c12ee797d15e5f8d0db36331200b0346871492784083b0c6"}, + {file = "rpds_py-0.8.10-cp38-none-win_amd64.whl", hash = "sha256:fa326b3505d5784436d9433b7980171ab2375535d93dd63fbcd20af2b5ca1bb6"}, + {file = "rpds_py-0.8.10-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:7b38a9ac96eeb6613e7f312cd0014de64c3f07000e8bf0004ad6ec153bac46f8"}, + {file = "rpds_py-0.8.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c4d42e83ddbf3445e6514f0aff96dca511421ed0392d9977d3990d9f1ba6753c"}, + {file = "rpds_py-0.8.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b21575031478609db6dbd1f0465e739fe0e7f424a8e7e87610a6c7f68b4eb16"}, + {file = "rpds_py-0.8.10-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:574868858a7ff6011192c023a5289158ed20e3f3b94b54f97210a773f2f22921"}, + {file = "rpds_py-0.8.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae40f4a70a1f40939d66ecbaf8e7edc144fded190c4a45898a8cfe19d8fc85ea"}, + {file = "rpds_py-0.8.10-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37f7ee4dc86db7af3bac6d2a2cedbecb8e57ce4ed081f6464510e537589f8b1e"}, + {file = "rpds_py-0.8.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:695f642a3a5dbd4ad2ffbbacf784716ecd87f1b7a460843b9ddf965ccaeafff4"}, + {file = "rpds_py-0.8.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f43ab4cb04bde6109eb2555528a64dfd8a265cc6a9920a67dcbde13ef53a46c8"}, + {file = "rpds_py-0.8.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a11ab0d97be374efd04f640c04fe5c2d3dabc6dfb998954ea946ee3aec97056d"}, + {file = "rpds_py-0.8.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:92cf5b3ee60eef41f41e1a2cabca466846fb22f37fc580ffbcb934d1bcab225a"}, + {file = "rpds_py-0.8.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ceaac0c603bf5ac2f505a78b2dcab78d3e6b706be6596c8364b64cc613d208d2"}, + {file = "rpds_py-0.8.10-cp39-none-win32.whl", hash = "sha256:dd4f16e57c12c0ae17606c53d1b57d8d1c8792efe3f065a37cb3341340599d49"}, + {file = "rpds_py-0.8.10-cp39-none-win_amd64.whl", hash = "sha256:c03a435d26c3999c2a8642cecad5d1c4d10c961817536af52035f6f4ee2f5dd0"}, + {file = "rpds_py-0.8.10-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:0da53292edafecba5e1d8c1218f99babf2ed0bf1c791d83c0ab5c29b57223068"}, + {file = "rpds_py-0.8.10-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:7d20a8ed227683401cc508e7be58cba90cc97f784ea8b039c8cd01111e6043e0"}, + {file = "rpds_py-0.8.10-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97cab733d303252f7c2f7052bf021a3469d764fc2b65e6dbef5af3cbf89d4892"}, + {file = "rpds_py-0.8.10-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8c398fda6df361a30935ab4c4bccb7f7a3daef2964ca237f607c90e9f3fdf66f"}, + {file = "rpds_py-0.8.10-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2eb4b08c45f8f8d8254cdbfacd3fc5d6b415d64487fb30d7380b0d0569837bf1"}, + {file = "rpds_py-0.8.10-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7dfb1cbb895810fa2b892b68153c17716c6abaa22c7dc2b2f6dcf3364932a1c"}, + {file = "rpds_py-0.8.10-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89c92b74e8bf6f53a6f4995fd52f4bd510c12f103ee62c99e22bc9e05d45583c"}, + {file = "rpds_py-0.8.10-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e9c0683cb35a9b5881b41bc01d5568ffc667910d9dbc632a1fba4e7d59e98773"}, + {file = "rpds_py-0.8.10-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:0eeb2731708207d0fe2619afe6c4dc8cb9798f7de052da891de5f19c0006c315"}, + {file = "rpds_py-0.8.10-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:7495010b658ec5b52835f21d8c8b1a7e52e194c50f095d4223c0b96c3da704b1"}, + {file = "rpds_py-0.8.10-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:c72ebc22e70e04126158c46ba56b85372bc4d54d00d296be060b0db1671638a4"}, + {file = "rpds_py-0.8.10-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:2cd3045e7f6375dda64ed7db1c5136826facb0159ea982f77d9cf6125025bd34"}, + {file = "rpds_py-0.8.10-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:2418cf17d653d24ffb8b75e81f9f60b7ba1b009a23298a433a4720b2a0a17017"}, + {file = "rpds_py-0.8.10-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a2edf8173ac0c7a19da21bc68818be1321998528b5e3f748d6ee90c0ba2a1fd"}, + {file = "rpds_py-0.8.10-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7f29b8c55fd3a2bc48e485e37c4e2df3317f43b5cc6c4b6631c33726f52ffbb3"}, + {file = "rpds_py-0.8.10-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a7d20c1cf8d7b3960c5072c265ec47b3f72a0c608a9a6ee0103189b4f28d531"}, + {file = "rpds_py-0.8.10-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:521fc8861a86ae54359edf53a15a05fabc10593cea7b3357574132f8427a5e5a"}, + {file = "rpds_py-0.8.10-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5c191713e98e7c28800233f039a32a42c1a4f9a001a8a0f2448b07391881036"}, + {file = "rpds_py-0.8.10-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:083df0fafe199371206111583c686c985dddaf95ab3ee8e7b24f1fda54515d09"}, + {file = "rpds_py-0.8.10-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:ed41f3f49507936a6fe7003985ea2574daccfef999775525d79eb67344e23767"}, + {file = "rpds_py-0.8.10-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:2614c2732bf45de5c7f9e9e54e18bc78693fa2f635ae58d2895b7965e470378c"}, + {file = "rpds_py-0.8.10-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:c60528671d9d467009a6ec284582179f6b88651e83367d0ab54cb739021cd7de"}, + {file = "rpds_py-0.8.10-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ee744fca8d1ea822480a2a4e7c5f2e1950745477143668f0b523769426060f29"}, + {file = "rpds_py-0.8.10-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a38b9f526d0d6cbdaa37808c400e3d9f9473ac4ff64d33d9163fd05d243dbd9b"}, + {file = "rpds_py-0.8.10-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60e0e86e870350e03b3e25f9b1dd2c6cc72d2b5f24e070249418320a6f9097b7"}, + {file = "rpds_py-0.8.10-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f53f55a8852f0e49b0fc76f2412045d6ad9d5772251dea8f55ea45021616e7d5"}, + {file = "rpds_py-0.8.10-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c493365d3fad241d52f096e4995475a60a80f4eba4d3ff89b713bc65c2ca9615"}, + {file = "rpds_py-0.8.10-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:300eb606e6b94a7a26f11c8cc8ee59e295c6649bd927f91e1dbd37a4c89430b6"}, + {file = "rpds_py-0.8.10-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a665f6f1a87614d1c3039baf44109094926dedf785e346d8b0a728e9cabd27a"}, + {file = "rpds_py-0.8.10-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:927d784648211447201d4c6f1babddb7971abad922b32257ab74de2f2750fad0"}, + {file = "rpds_py-0.8.10-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:c200b30dd573afa83847bed7e3041aa36a8145221bf0cfdfaa62d974d720805c"}, + {file = "rpds_py-0.8.10-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:08166467258fd0240a1256fce272f689f2360227ee41c72aeea103e9e4f63d2b"}, + {file = "rpds_py-0.8.10-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:996cc95830de9bc22b183661d95559ec6b3cd900ad7bc9154c4cbf5be0c9b734"}, + {file = "rpds_py-0.8.10.tar.gz", hash = "sha256:13e643ce8ad502a0263397362fb887594b49cf84bf518d6038c16f235f2bcea4"}, +] [[package]] name = "rsa" @@ -2100,6 +3579,10 @@ description = "Pure-Python RSA implementation" category = "main" optional = false python-versions = ">=3.6,<4" +files = [ + {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, + {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, +] [package.dependencies] pyasn1 = ">=0.1.3" @@ -2111,6 +3594,10 @@ description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip pres category = "main" optional = false python-versions = ">=3" +files = [ + {file = "ruamel.yaml-0.17.17-py3-none-any.whl", hash = "sha256:9af3ec5d7f8065582f3aa841305465025d0afd26c5fb54e15b964e11838fc74f"}, + {file = "ruamel.yaml-0.17.17.tar.gz", hash = "sha256:9751de4cbb57d4bfbf8fc394e125ed4a2f170fbff3dc3d78abf50be85924f8be"}, +] [package.dependencies] "ruamel.yaml.clib" = {version = ">=0.1.2", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.10\""} @@ -2126,6 +3613,45 @@ description = "C version of reader, parser and emitter for ruamel.yaml derived f category = "main" optional = false python-versions = ">=3.5" +files = [ + {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d5859983f26d8cd7bb5c287ef452e8aacc86501487634573d260968f753e1d71"}, + {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:debc87a9516b237d0466a711b18b6ebeb17ba9f391eb7f91c649c5c4ec5006c7"}, + {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:df5828871e6648db72d1c19b4bd24819b80a755c4541d3409f0f7acd0f335c80"}, + {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:efa08d63ef03d079dcae1dfe334f6c8847ba8b645d08df286358b1f5293d24ab"}, + {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-win32.whl", hash = "sha256:763d65baa3b952479c4e972669f679fe490eee058d5aa85da483ebae2009d231"}, + {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:d000f258cf42fec2b1bbf2863c61d7b8918d31ffee905da62dede869254d3b8a"}, + {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:045e0626baf1c52e5527bd5db361bc83180faaba2ff586e763d3d5982a876a9e"}, + {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:1a6391a7cabb7641c32517539ca42cf84b87b667bad38b78d4d42dd23e957c81"}, + {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:9c7617df90c1365638916b98cdd9be833d31d337dbcd722485597b43c4a215bf"}, + {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:41d0f1fa4c6830176eef5b276af04c89320ea616655d01327d5ce65e50575c94"}, + {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-win32.whl", hash = "sha256:f6d3d39611ac2e4f62c3128a9eed45f19a6608670c5a2f4f07f24e8de3441d38"}, + {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:da538167284de58a52109a9b89b8f6a53ff8437dd6dc26d33b57bf6699153122"}, + {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:4b3a93bb9bc662fc1f99c5c3ea8e623d8b23ad22f861eb6fce9377ac07ad6072"}, + {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-macosx_12_0_arm64.whl", hash = "sha256:a234a20ae07e8469da311e182e70ef6b199d0fbeb6c6cc2901204dd87fb867e8"}, + {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:15910ef4f3e537eea7fe45f8a5d19997479940d9196f357152a09031c5be59f3"}, + {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:370445fd795706fd291ab00c9df38a0caed0f17a6fb46b0f607668ecb16ce763"}, + {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-win32.whl", hash = "sha256:ecdf1a604009bd35c674b9225a8fa609e0282d9b896c03dd441a91e5f53b534e"}, + {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-win_amd64.whl", hash = "sha256:f34019dced51047d6f70cb9383b2ae2853b7fc4dce65129a5acd49f4f9256646"}, + {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2aa261c29a5545adfef9296b7e33941f46aa5bbd21164228e833412af4c9c75f"}, + {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f01da5790e95815eb5a8a138508c01c758e5f5bc0ce4286c4f7028b8dd7ac3d0"}, + {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:40d030e2329ce5286d6b231b8726959ebbe0404c92f0a578c0e2482182e38282"}, + {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:c3ca1fbba4ae962521e5eb66d72998b51f0f4d0f608d3c0347a48e1af262efa7"}, + {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-win32.whl", hash = "sha256:7bdb4c06b063f6fd55e472e201317a3bb6cdeeee5d5a38512ea5c01e1acbdd93"}, + {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:be2a7ad8fd8f7442b24323d24ba0b56c51219513cfa45b9ada3b87b76c374d4b"}, + {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:91a789b4aa0097b78c93e3dc4b40040ba55bef518f84a40d4442f713b4094acb"}, + {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:99e77daab5d13a48a4054803d052ff40780278240a902b880dd37a51ba01a307"}, + {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:3243f48ecd450eddadc2d11b5feb08aca941b5cd98c9b1db14b2fd128be8c697"}, + {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:8831a2cedcd0f0927f788c5bdf6567d9dc9cc235646a434986a852af1cb54b4b"}, + {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-win32.whl", hash = "sha256:3110a99e0f94a4a3470ff67fc20d3f96c25b13d24c6980ff841e82bafe827cac"}, + {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:92460ce908546ab69770b2e576e4f99fbb4ce6ab4b245345a3869a0a0410488f"}, + {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5bc0667c1eb8f83a3752b71b9c4ba55ef7c7058ae57022dd9b29065186a113d9"}, + {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:4a4d8d417868d68b979076a9be6a38c676eca060785abaa6709c7b31593c35d1"}, + {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:bf9a6bc4a0221538b1a7de3ed7bca4c93c02346853f44e1cd764be0023cd3640"}, + {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a7b301ff08055d73223058b5c46c55638917f04d21577c95e00e0c4d79201a6b"}, + {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-win32.whl", hash = "sha256:d5e51e2901ec2366b79f16c2299a03e74ba4531ddcfacc1416639c557aef0ad8"}, + {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:184faeaec61dbaa3cace407cffc5819f7b977e75360e8d5ca19461cd851a5fc5"}, + {file = "ruamel.yaml.clib-0.2.7.tar.gz", hash = "sha256:1f08fd5a2bea9c4180db71678e850b995d2a5f4537be0e94557668cf0f5f9497"}, +] [[package]] name = "schematic-db" @@ -2134,6 +3660,10 @@ description = "" category = "main" optional = false python-versions = ">=3.9,<4.0" +files = [ + {file = "schematic_db-0.0.29-py3-none-any.whl", hash = "sha256:e43f1d7c06d877d47036c5a480ac8f22333daa967df67c4d8316091ff4ddc0a5"}, + {file = "schematic_db-0.0.29.tar.gz", hash = "sha256:77d338b34dd8f1e75b9df5b9b3f20de35087285079019d48d162de0d131f3ffb"}, +] [package.dependencies] deprecation = ">=2.1.0,<3.0.0" @@ -2161,6 +3691,27 @@ description = "Fundamental algorithms for scientific computing in Python" category = "main" optional = false python-versions = "<3.13,>=3.9" +files = [ + {file = "scipy-1.11.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:aec8c62fbe52914f9cf28d846cf0401dd80ab80788bbab909434eb336ed07c04"}, + {file = "scipy-1.11.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:3b9963798df1d8a52db41a6fc0e6fa65b1c60e85d73da27ae8bb754de4792481"}, + {file = "scipy-1.11.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e8eb42db36526b130dfbc417609498a6192381abc1975b91e3eb238e0b41c1a"}, + {file = "scipy-1.11.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:366a6a937110d80dca4f63b3f5b00cc89d36f678b2d124a01067b154e692bab1"}, + {file = "scipy-1.11.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:08d957ca82d3535b3b9ba6c8ff355d78fe975271874e2af267cb5add5bd78625"}, + {file = "scipy-1.11.1-cp310-cp310-win_amd64.whl", hash = "sha256:e866514bc2d660608447b6ba95c8900d591f2865c07cca0aa4f7ff3c4ca70f30"}, + {file = "scipy-1.11.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ba94eeef3c9caa4cea7b402a35bb02a5714ee1ee77eb98aca1eed4543beb0f4c"}, + {file = "scipy-1.11.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:512fdc18c65f76dadaca139348e525646d440220d8d05f6d21965b8d4466bccd"}, + {file = "scipy-1.11.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cce154372f0ebe88556ed06d7b196e9c2e0c13080ecb58d0f35062dc7cc28b47"}, + {file = "scipy-1.11.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4bb943010203465ac81efa392e4645265077b4d9e99b66cf3ed33ae12254173"}, + {file = "scipy-1.11.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:249cfa465c379c9bb2c20123001e151ff5e29b351cbb7f9c91587260602c58d0"}, + {file = "scipy-1.11.1-cp311-cp311-win_amd64.whl", hash = "sha256:ffb28e3fa31b9c376d0fb1f74c1f13911c8c154a760312fbee87a21eb21efe31"}, + {file = "scipy-1.11.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:39154437654260a52871dfde852adf1b93b1d1bc5dc0ffa70068f16ec0be2624"}, + {file = "scipy-1.11.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:b588311875c58d1acd4ef17c983b9f1ab5391755a47c3d70b6bd503a45bfaf71"}, + {file = "scipy-1.11.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d51565560565a0307ed06fa0ec4c6f21ff094947d4844d6068ed04400c72d0c3"}, + {file = "scipy-1.11.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b41a0f322b4eb51b078cb3441e950ad661ede490c3aca66edef66f4b37ab1877"}, + {file = "scipy-1.11.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:396fae3f8c12ad14c5f3eb40499fd06a6fef8393a6baa352a652ecd51e74e029"}, + {file = "scipy-1.11.1-cp39-cp39-win_amd64.whl", hash = "sha256:be8c962a821957fdde8c4044efdab7a140c13294997a407eaee777acf63cbf0c"}, + {file = "scipy-1.11.1.tar.gz", hash = "sha256:fb5b492fa035334fd249f0973cc79ecad8b09c604b42a127a677b45a9a3d4289"}, +] [package.dependencies] numpy = ">=1.21.6,<1.28.0" @@ -2177,6 +3728,10 @@ description = "Python bindings to FreeDesktop.org Secret Service API" category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "SecretStorage-3.3.3-py3-none-any.whl", hash = "sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99"}, + {file = "SecretStorage-3.3.3.tar.gz", hash = "sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77"}, +] [package.dependencies] cryptography = ">=2.0" @@ -2189,6 +3744,10 @@ description = "Send file to trash natively under Mac OS X, Windows and Linux" category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +files = [ + {file = "Send2Trash-1.8.2-py3-none-any.whl", hash = "sha256:a384719d99c07ce1eefd6905d2decb6f8b7ed054025bb0e618919f945de4f679"}, + {file = "Send2Trash-1.8.2.tar.gz", hash = "sha256:c132d59fa44b9ca2b1699af5c86f57ce9f4c5eb56629d5d55fbb7a35f84e2312"}, +] [package.extras] nativelib = ["pyobjc-framework-Cocoa", "pywin32"] @@ -2202,6 +3761,10 @@ description = "Easily download, build, install, upgrade, and uninstall Python pa category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "setuptools-66.1.1-py3-none-any.whl", hash = "sha256:6f590d76b713d5de4e49fe4fbca24474469f53c83632d5d0fd056f7ff7e8112b"}, + {file = "setuptools-66.1.1.tar.gz", hash = "sha256:ac4008d396bc9cd983ea483cb7139c0240a07bbc74ffb6232fceffedc6cf03a8"}, +] [package.extras] docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] @@ -2215,6 +3778,10 @@ description = "Python 2 and 3 compatibility utilities" category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] [[package]] name = "sniffio" @@ -2223,6 +3790,10 @@ description = "Sniff out which async library your code is running under" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, + {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, +] [[package]] name = "snowballstemmer" @@ -2231,6 +3802,10 @@ description = "This package provides 29 stemmers for 28 languages generated from category = "main" optional = false python-versions = "*" +files = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] [[package]] name = "soupsieve" @@ -2239,6 +3814,10 @@ description = "A modern CSS selector implementation for Beautiful Soup." category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "soupsieve-2.4.1-py3-none-any.whl", hash = "sha256:1c1bfee6819544a3447586c889157365a27e10d88cde3ad3da0cf0ddf646feb8"}, + {file = "soupsieve-2.4.1.tar.gz", hash = "sha256:89d12b2d5dfcd2c9e8c22326da9d9aa9cb3dfab0a83a024f05704076ee8d35ea"}, +] [[package]] name = "sphinx" @@ -2247,6 +3826,10 @@ description = "Python documentation generator" category = "main" optional = false python-versions = ">=3.8" +files = [ + {file = "Sphinx-7.0.1.tar.gz", hash = "sha256:61e025f788c5977d9412587e733733a289e2b9fdc2fef8868ddfbfc4ccfe881d"}, + {file = "sphinx-7.0.1-py3-none-any.whl", hash = "sha256:60c5e04756c1709a98845ed27a2eed7a556af3993afb66e77fec48189f742616"}, +] [package.dependencies] alabaster = ">=0.7,<0.8" @@ -2279,6 +3862,10 @@ description = "Sphinx extension that automatically documents click applications" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "sphinx-click-4.4.0.tar.gz", hash = "sha256:cc67692bd28f482c7f01531c61b64e9d2f069bfcf3d24cbbb51d4a84a749fa48"}, + {file = "sphinx_click-4.4.0-py3-none-any.whl", hash = "sha256:2821c10a68fc9ee6ce7c92fad26540d8d8c8f45e6d7258f0e4fb7529ae8fab49"}, +] [package.dependencies] click = ">=7.0" @@ -2292,6 +3879,10 @@ description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple category = "main" optional = false python-versions = ">=3.8" +files = [ + {file = "sphinxcontrib-applehelp-1.0.4.tar.gz", hash = "sha256:828f867945bbe39817c210a1abfd1bc4895c8b73fcaade56d45357a348a07d7e"}, + {file = "sphinxcontrib_applehelp-1.0.4-py3-none-any.whl", hash = "sha256:29d341f67fb0f6f586b23ad80e072c8e6ad0b48417db2bde114a4c9746feb228"}, +] [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] @@ -2304,6 +3895,10 @@ description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp category = "main" optional = false python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, + {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, +] [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] @@ -2316,6 +3911,10 @@ description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML h category = "main" optional = false python-versions = ">=3.8" +files = [ + {file = "sphinxcontrib-htmlhelp-2.0.1.tar.gz", hash = "sha256:0cbdd302815330058422b98a113195c9249825d681e18f11e8b1f78a2f11efff"}, + {file = "sphinxcontrib_htmlhelp-2.0.1-py3-none-any.whl", hash = "sha256:c38cb46dccf316c79de6e5515e1770414b797162b23cd3d06e67020e1d2a6903"}, +] [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] @@ -2328,6 +3927,10 @@ description = "A sphinx extension which renders display math in HTML via JavaScr category = "main" optional = false python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, + {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, +] [package.extras] test = ["flake8", "mypy", "pytest"] @@ -2339,6 +3942,10 @@ description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp d category = "main" optional = false python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, + {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, +] [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] @@ -2351,6 +3958,10 @@ description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs category = "main" optional = false python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, + {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, +] [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] @@ -2363,13 +3974,63 @@ description = "Database Abstraction Library" category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "SQLAlchemy-1.4.49-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:2e126cf98b7fd38f1e33c64484406b78e937b1a280e078ef558b95bf5b6895f6"}, + {file = "SQLAlchemy-1.4.49-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:03db81b89fe7ef3857b4a00b63dedd632d6183d4ea5a31c5d8a92e000a41fc71"}, + {file = "SQLAlchemy-1.4.49-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:95b9df9afd680b7a3b13b38adf6e3a38995da5e162cc7524ef08e3be4e5ed3e1"}, + {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a63e43bf3f668c11bb0444ce6e809c1227b8f067ca1068898f3008a273f52b09"}, + {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca46de16650d143a928d10842939dab208e8d8c3a9a8757600cae9b7c579c5cd"}, + {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f835c050ebaa4e48b18403bed2c0fda986525896efd76c245bdd4db995e51a4c"}, + {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c21b172dfb22e0db303ff6419451f0cac891d2e911bb9fbf8003d717f1bcf91"}, + {file = "SQLAlchemy-1.4.49-cp310-cp310-win32.whl", hash = "sha256:5fb1ebdfc8373b5a291485757bd6431de8d7ed42c27439f543c81f6c8febd729"}, + {file = "SQLAlchemy-1.4.49-cp310-cp310-win_amd64.whl", hash = "sha256:f8a65990c9c490f4651b5c02abccc9f113a7f56fa482031ac8cb88b70bc8ccaa"}, + {file = "SQLAlchemy-1.4.49-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8923dfdf24d5aa8a3adb59723f54118dd4fe62cf59ed0d0d65d940579c1170a4"}, + {file = "SQLAlchemy-1.4.49-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9ab2c507a7a439f13ca4499db6d3f50423d1d65dc9b5ed897e70941d9e135b0"}, + {file = "SQLAlchemy-1.4.49-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5debe7d49b8acf1f3035317e63d9ec8d5e4d904c6e75a2a9246a119f5f2fdf3d"}, + {file = "SQLAlchemy-1.4.49-cp311-cp311-win32.whl", hash = "sha256:82b08e82da3756765c2e75f327b9bf6b0f043c9c3925fb95fb51e1567fa4ee87"}, + {file = "SQLAlchemy-1.4.49-cp311-cp311-win_amd64.whl", hash = "sha256:171e04eeb5d1c0d96a544caf982621a1711d078dbc5c96f11d6469169bd003f1"}, + {file = "SQLAlchemy-1.4.49-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f23755c384c2969ca2f7667a83f7c5648fcf8b62a3f2bbd883d805454964a800"}, + {file = "SQLAlchemy-1.4.49-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8396e896e08e37032e87e7fbf4a15f431aa878c286dc7f79e616c2feacdb366c"}, + {file = "SQLAlchemy-1.4.49-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66da9627cfcc43bbdebd47bfe0145bb662041472393c03b7802253993b6b7c90"}, + {file = "SQLAlchemy-1.4.49-cp312-cp312-win32.whl", hash = "sha256:9a06e046ffeb8a484279e54bda0a5abfd9675f594a2e38ef3133d7e4d75b6214"}, + {file = "SQLAlchemy-1.4.49-cp312-cp312-win_amd64.whl", hash = "sha256:7cf8b90ad84ad3a45098b1c9f56f2b161601e4670827d6b892ea0e884569bd1d"}, + {file = "SQLAlchemy-1.4.49-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:36e58f8c4fe43984384e3fbe6341ac99b6b4e083de2fe838f0fdb91cebe9e9cb"}, + {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b31e67ff419013f99ad6f8fc73ee19ea31585e1e9fe773744c0f3ce58c039c30"}, + {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ebc22807a7e161c0d8f3da34018ab7c97ef6223578fcdd99b1d3e7ed1100a5db"}, + {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c14b29d9e1529f99efd550cd04dbb6db6ba5d690abb96d52de2bff4ed518bc95"}, + {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c40f3470e084d31247aea228aa1c39bbc0904c2b9ccbf5d3cfa2ea2dac06f26d"}, + {file = "SQLAlchemy-1.4.49-cp36-cp36m-win32.whl", hash = "sha256:706bfa02157b97c136547c406f263e4c6274a7b061b3eb9742915dd774bbc264"}, + {file = "SQLAlchemy-1.4.49-cp36-cp36m-win_amd64.whl", hash = "sha256:a7f7b5c07ae5c0cfd24c2db86071fb2a3d947da7bd487e359cc91e67ac1c6d2e"}, + {file = "SQLAlchemy-1.4.49-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:4afbbf5ef41ac18e02c8dc1f86c04b22b7a2125f2a030e25bbb4aff31abb224b"}, + {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24e300c0c2147484a002b175f4e1361f102e82c345bf263242f0449672a4bccf"}, + {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:393cd06c3b00b57f5421e2133e088df9cabcececcea180327e43b937b5a7caa5"}, + {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:201de072b818f8ad55c80d18d1a788729cccf9be6d9dc3b9d8613b053cd4836d"}, + {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7653ed6817c710d0c95558232aba799307d14ae084cc9b1f4c389157ec50df5c"}, + {file = "SQLAlchemy-1.4.49-cp37-cp37m-win32.whl", hash = "sha256:647e0b309cb4512b1f1b78471fdaf72921b6fa6e750b9f891e09c6e2f0e5326f"}, + {file = "SQLAlchemy-1.4.49-cp37-cp37m-win_amd64.whl", hash = "sha256:ab73ed1a05ff539afc4a7f8cf371764cdf79768ecb7d2ec691e3ff89abbc541e"}, + {file = "SQLAlchemy-1.4.49-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:37ce517c011560d68f1ffb28af65d7e06f873f191eb3a73af5671e9c3fada08a"}, + {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1878ce508edea4a879015ab5215546c444233881301e97ca16fe251e89f1c55"}, + {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95ab792ca493891d7a45a077e35b418f68435efb3e1706cb8155e20e86a9013c"}, + {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0e8e608983e6f85d0852ca61f97e521b62e67969e6e640fe6c6b575d4db68557"}, + {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccf956da45290df6e809ea12c54c02ace7f8ff4d765d6d3dfb3655ee876ce58d"}, + {file = "SQLAlchemy-1.4.49-cp38-cp38-win32.whl", hash = "sha256:f167c8175ab908ce48bd6550679cc6ea20ae169379e73c7720a28f89e53aa532"}, + {file = "SQLAlchemy-1.4.49-cp38-cp38-win_amd64.whl", hash = "sha256:45806315aae81a0c202752558f0df52b42d11dd7ba0097bf71e253b4215f34f4"}, + {file = "SQLAlchemy-1.4.49-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:b6d0c4b15d65087738a6e22e0ff461b407533ff65a73b818089efc8eb2b3e1de"}, + {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a843e34abfd4c797018fd8d00ffffa99fd5184c421f190b6ca99def4087689bd"}, + {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:738d7321212941ab19ba2acf02a68b8ee64987b248ffa2101630e8fccb549e0d"}, + {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1c890421651b45a681181301b3497e4d57c0d01dc001e10438a40e9a9c25ee77"}, + {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d26f280b8f0a8f497bc10573849ad6dc62e671d2468826e5c748d04ed9e670d5"}, + {file = "SQLAlchemy-1.4.49-cp39-cp39-win32.whl", hash = "sha256:ec2268de67f73b43320383947e74700e95c6770d0c68c4e615e9897e46296294"}, + {file = "SQLAlchemy-1.4.49-cp39-cp39-win_amd64.whl", hash = "sha256:bbdf16372859b8ed3f4d05f925a984771cd2abd18bd187042f24be4886c2a15f"}, + {file = "SQLAlchemy-1.4.49.tar.gz", hash = "sha256:06ff25cbae30c396c4b7737464f2a7fc37a67b7da409993b182b024cec80aed9"}, +] [package.dependencies] greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} [package.extras] aiomysql = ["aiomysql", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"] asyncio = ["greenlet (!=0.4.17)"] asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"] @@ -2379,14 +4040,14 @@ mssql-pyodbc = ["pyodbc"] mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] mysql-connector = ["mysql-connector-python"] -oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"] +oracle = ["cx-oracle (>=7)", "cx-oracle (>=7,<8)"] postgresql = ["psycopg2 (>=2.7)"] postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] postgresql-psycopg2binary = ["psycopg2-binary"] postgresql-psycopg2cffi = ["psycopg2cffi"] pymysql = ["pymysql", "pymysql (<1)"] -sqlcipher = ["sqlcipher3_binary"] +sqlcipher = ["sqlcipher3-binary"] [[package]] name = "sqlalchemy-utils" @@ -2395,8 +4056,12 @@ description = "Various utility functions for SQLAlchemy." category = "main" optional = false python-versions = "~=3.6" - -[package.dependencies] +files = [ + {file = "SQLAlchemy-Utils-0.38.3.tar.gz", hash = "sha256:9f9afba607a40455cf703adfa9846584bf26168a0c5a60a70063b70d65051f4d"}, + {file = "SQLAlchemy_Utils-0.38.3-py3-none-any.whl", hash = "sha256:5c13b5d08adfaa85f3d4e8ec09a75136216fad41346980d02974a70a77988bf9"}, +] + +[package.dependencies] SQLAlchemy = ">=1.3" [package.extras] @@ -2420,6 +4085,10 @@ description = "Extract data from python stack frames and tracebacks for informat category = "main" optional = false python-versions = "*" +files = [ + {file = "stack_data-0.6.2-py3-none-any.whl", hash = "sha256:cbb2a53eb64e5785878201a97ed7c7b94883f48b87bfb0bbe8b623c74679e4a8"}, + {file = "stack_data-0.6.2.tar.gz", hash = "sha256:32d2dd0376772d01b6cb9fc996f3c8b57a357089dec328ed4b6553d037eaf815"}, +] [package.dependencies] asttokens = ">=2.1.0" @@ -2436,6 +4105,10 @@ description = "swagger_ui_bundle - swagger-ui files in a pip package" category = "main" optional = false python-versions = "*" +files = [ + {file = "swagger_ui_bundle-0.0.9-py3-none-any.whl", hash = "sha256:cea116ed81147c345001027325c1ddc9ca78c1ee7319935c3c75d3669279d575"}, + {file = "swagger_ui_bundle-0.0.9.tar.gz", hash = "sha256:b462aa1460261796ab78fd4663961a7f6f347ce01760f1303bbbdf630f11f516"}, +] [package.dependencies] Jinja2 = ">=2.0" @@ -2447,6 +4120,10 @@ description = "A client for Synapse, a collaborative compute space that allows category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "synapseclient-2.7.2-py3-none-any.whl", hash = "sha256:dd8b1a1b4667d08311bb651469431f43fe2eeab83c0ef1fe5a03c2929aeb26cd"}, + {file = "synapseclient-2.7.2.tar.gz", hash = "sha256:dc5a61f9f495109a0c89aa7d42b641b6ff278280d7961fb450dd5015704fe15b"}, +] [package.dependencies] deprecated = ">=1.2.4,<2.0" @@ -2470,6 +4147,10 @@ description = "Pretty-print tabular data" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, + {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, +] [package.extras] widechars = ["wcwidth"] @@ -2481,6 +4162,10 @@ description = "Retry code until it succeeds" category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "tenacity-8.2.2-py3-none-any.whl", hash = "sha256:2f277afb21b851637e8f52e6a613ff08734c347dc19ade928e519d7d2d8569b0"}, + {file = "tenacity-8.2.2.tar.gz", hash = "sha256:43af037822bd0029025877f3b2d97cc4d7bb0c2991000a3d59d71517c5c969e0"}, +] [package.extras] doc = ["reno", "sphinx", "tornado (>=4.5)"] @@ -2492,6 +4177,10 @@ description = "Tornado websocket backend for the Xterm.js Javascript terminal em category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "terminado-0.17.1-py3-none-any.whl", hash = "sha256:8650d44334eba354dd591129ca3124a6ba42c3d5b70df5051b6921d506fdaeae"}, + {file = "terminado-0.17.1.tar.gz", hash = "sha256:6ccbbcd3a4f8a25a5ec04991f39a0b8db52dfcd487ea0e578d977e6752380333"}, +] [package.dependencies] ptyprocess = {version = "*", markers = "os_name != \"nt\""} @@ -2505,1931 +4194,78 @@ test = ["pre-commit", "pytest (>=7.0)", "pytest-timeout"] [[package]] name = "tinycss2" version = "1.2.1" -description = "A tiny CSS parser" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -webencodings = ">=0.4" - -[package.extras] -doc = ["sphinx", "sphinx_rtd_theme"] -test = ["flake8", "isort", "pytest"] - -[[package]] -name = "toml" -version = "0.10.2" -description = "Python Library for Tom's Obvious, Minimal Language" -category = "main" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" - -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -category = "dev" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "tomlkit" -version = "0.11.8" -description = "Style preserving TOML library" -category = "dev" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "toolz" -version = "0.12.0" -description = "List processing tools and functional utilities" -category = "main" -optional = false -python-versions = ">=3.5" - -[[package]] -name = "tornado" -version = "6.3.2" -description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." -category = "main" -optional = false -python-versions = ">= 3.8" - -[[package]] -name = "tqdm" -version = "4.65.0" -description = "Fast, Extensible Progress Meter" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[package.extras] -dev = ["py-make (>=0.1.0)", "twine", "wheel"] -notebook = ["ipywidgets (>=6)"] -slack = ["slack-sdk"] -telegram = ["requests"] - -[[package]] -name = "traitlets" -version = "5.9.0" -description = "Traitlets Python configuration system" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"] - -[[package]] -name = "typing-extensions" -version = "4.5.0" -description = "Backported and Experimental Type Hints for Python 3.7+" -category = "main" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "tzdata" -version = "2023.3" -description = "Provider of IANA time zone data" -category = "main" -optional = false -python-versions = ">=2" - -[[package]] -name = "tzlocal" -version = "5.0.1" -description = "tzinfo object for the local timezone" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -tzdata = {version = "*", markers = "platform_system == \"Windows\""} - -[package.extras] -devenv = ["black", "check-manifest", "flake8", "pyroma", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3)", "zest.releaser"] - -[[package]] -name = "uri-template" -version = "1.3.0" -description = "RFC 6570 URI Template Processor" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.extras] -dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake8-commas", "flake8-comprehensions", "flake8-continuation", "flake8-datetimez", "flake8-docstrings", "flake8-import-order", "flake8-literal", "flake8-modern-annotations", "flake8-noqa", "flake8-pyproject", "flake8-requirements", "flake8-typechecking-import", "flake8-use-fstring", "mypy", "pep8-naming", "types-PyYAML"] - -[[package]] -name = "uritemplate" -version = "4.1.1" -description = "Implementation of RFC 6570 URI Templates" -category = "main" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "urllib3" -version = "1.26.16" -description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] -socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] - -[[package]] -name = "uwsgi" -version = "2.0.21" -description = "The uWSGI server" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "validators" -version = "0.20.0" -description = "Python Data Validation for Humans™." -category = "main" -optional = false -python-versions = ">=3.4" - -[package.dependencies] -decorator = ">=3.4.0" - -[package.extras] -test = ["flake8 (>=2.4.0)", "isort (>=4.2.2)", "pytest (>=2.2.3)"] - -[[package]] -name = "wcwidth" -version = "0.2.6" -description = "Measures the displayed width of unicode strings in a terminal" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "webcolors" -version = "1.13" -description = "A library for working with the color formats defined by HTML and CSS." -category = "main" -optional = false -python-versions = ">=3.7" - -[package.extras] -docs = ["furo", "sphinx", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-notfound-page", "sphinxext-opengraph"] -tests = ["pytest", "pytest-cov"] - -[[package]] -name = "webencodings" -version = "0.5.1" -description = "Character encoding aliases for legacy web content" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "websocket-client" -version = "1.6.1" -description = "WebSocket client for Python with low level API options" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.extras] -docs = ["Sphinx (>=3.4)", "sphinx-rtd-theme (>=0.5)"] -optional = ["python-socks", "wsaccel"] -test = ["websockets"] - -[[package]] -name = "werkzeug" -version = "2.1.2" -description = "The comprehensive WSGI web application library." -category = "main" -optional = false -python-versions = ">=3.7" - -[package.extras] -watchdog = ["watchdog"] - -[[package]] -name = "widgetsnbextension" -version = "4.0.8" -description = "Jupyter interactive widgets for Jupyter Notebook" -category = "main" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "wrapt" -version = "1.15.0" -description = "Module for decorators, wrappers and monkey patching." -category = "main" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" - -[[package]] -name = "zipp" -version = "3.16.1" -description = "Backport of pathlib-compatible object wrapper for zip files" -category = "main" -optional = false -python-versions = ">=3.8" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff"] - -[metadata] -lock-version = "1.1" -python-versions = ">=3.9.0,<3.11" -content-hash = "ebae29c94e793b572346ce4ca38e9744e0cda913550dc0a3c05b76f8f4796715" - -[metadata.files] -alabaster = [ - {file = "alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3"}, - {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, -] -altair = [ - {file = "altair-4.2.0-py3-none-any.whl", hash = "sha256:0c724848ae53410c13fa28be2b3b9a9dcb7b5caa1a70f7f217bd663bb419935a"}, - {file = "altair-4.2.0.tar.gz", hash = "sha256:d87d9372e63b48cd96b2a6415f0cf9457f50162ab79dc7a31cd7e024dd840026"}, -] -anyio = [ - {file = "anyio-3.7.1-py3-none-any.whl", hash = "sha256:91dee416e570e92c64041bd18b900d1d6fa78dff7048769ce5ac5ddad004fbb5"}, - {file = "anyio-3.7.1.tar.gz", hash = "sha256:44a3c9aba0f5defa43261a8b3efb97891f2bd7d804e0e1f56419befa1adfc780"}, -] -appnope = [ - {file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"}, - {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, -] -argon2-cffi = [ - {file = "argon2-cffi-21.3.0.tar.gz", hash = "sha256:d384164d944190a7dd7ef22c6aa3ff197da12962bd04b17f64d4e93d934dba5b"}, - {file = "argon2_cffi-21.3.0-py3-none-any.whl", hash = "sha256:8c976986f2c5c0e5000919e6de187906cfd81fb1c72bf9d88c01177e77da7f80"}, -] -argon2-cffi-bindings = [ - {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f"}, - {file = "argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e385d1c39c520c08b53d63300c3ecc28622f076f4c2b0e6d7e796e9f6502194"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3e3cc67fdb7d82c4718f19b4e7a87123caf8a93fde7e23cf66ac0337d3cb3f"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a22ad9800121b71099d0fb0a65323810a15f2e292f2ba450810a7316e128ee5"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9f8b450ed0547e3d473fdc8612083fd08dd2120d6ac8f73828df9b7d45bb351"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:93f9bf70084f97245ba10ee36575f0c3f1e7d7724d67d8e5b08e61787c320ed7"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3b9ef65804859d335dc6b31582cad2c5166f0c3e7975f324d9ffaa34ee7e6583"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4966ef5848d820776f5f562a7d45fdd70c2f330c961d0d745b784034bd9f48d"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ef543a89dee4db46a1a6e206cd015360e5a75822f76df533845c3cbaf72670"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2937d286e2ad0cc79a7087d3c272832865f779430e0cc2b4f3718d3159b0cb"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"}, -] -arrow = [ - {file = "arrow-1.2.3-py3-none-any.whl", hash = "sha256:5a49ab92e3b7b71d96cd6bfcc4df14efefc9dfa96ea19045815914a6ab6b1fe2"}, - {file = "arrow-1.2.3.tar.gz", hash = "sha256:3934b30ca1b9f292376d9db15b19446088d12ec58629bc3f0da28fd55fb633a1"}, -] -astroid = [ - {file = "astroid-2.15.6-py3-none-any.whl", hash = "sha256:389656ca57b6108f939cf5d2f9a2a825a3be50ba9d589670f393236e0a03b91c"}, - {file = "astroid-2.15.6.tar.gz", hash = "sha256:903f024859b7c7687d7a7f3a3f73b17301f8e42dfd9cc9df9d4418172d3e2dbd"}, -] -asttokens = [ - {file = "asttokens-2.2.1-py2.py3-none-any.whl", hash = "sha256:6b0ac9e93fb0335014d382b8fa9b3afa7df546984258005da0b9e7095b3deb1c"}, - {file = "asttokens-2.2.1.tar.gz", hash = "sha256:4622110b2a6f30b77e1473affaa97e711bc2f07d3f10848420ff1898edbe94f3"}, -] -attrs = [ - {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, - {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, -] -babel = [ - {file = "Babel-2.12.1-py3-none-any.whl", hash = "sha256:b4246fb7677d3b98f501a39d43396d3cafdc8eadb045f4a31be01863f655c610"}, - {file = "Babel-2.12.1.tar.gz", hash = "sha256:cc2d99999cd01d44420ae725a21c9e3711b3aadc7976d6147f622d8581963455"}, -] -backcall = [ - {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, - {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, -] -beautifulsoup4 = [ - {file = "beautifulsoup4-4.12.2-py3-none-any.whl", hash = "sha256:bd2520ca0d9d7d12694a53d44ac482d181b4ec1888909b035a3dbf40d0f57d4a"}, - {file = "beautifulsoup4-4.12.2.tar.gz", hash = "sha256:492bbc69dca35d12daac71c4db1bfff0c876c00ef4a2ffacce226d4638eb72da"}, -] -black = [ - {file = "black-23.7.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:5c4bc552ab52f6c1c506ccae05681fab58c3f72d59ae6e6639e8885e94fe2587"}, - {file = "black-23.7.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:552513d5cd5694590d7ef6f46e1767a4df9af168d449ff767b13b084c020e63f"}, - {file = "black-23.7.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:86cee259349b4448adb4ef9b204bb4467aae74a386bce85d56ba4f5dc0da27be"}, - {file = "black-23.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:501387a9edcb75d7ae8a4412bb8749900386eaef258f1aefab18adddea1936bc"}, - {file = "black-23.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb074d8b213749fa1d077d630db0d5f8cc3b2ae63587ad4116e8a436e9bbe995"}, - {file = "black-23.7.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b5b0ee6d96b345a8b420100b7d71ebfdd19fab5e8301aff48ec270042cd40ac2"}, - {file = "black-23.7.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:893695a76b140881531062d48476ebe4a48f5d1e9388177e175d76234ca247cd"}, - {file = "black-23.7.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:c333286dc3ddca6fdff74670b911cccedacb4ef0a60b34e491b8a67c833b343a"}, - {file = "black-23.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:831d8f54c3a8c8cf55f64d0422ee875eecac26f5f649fb6c1df65316b67c8926"}, - {file = "black-23.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:7f3bf2dec7d541b4619b8ce526bda74a6b0bffc480a163fed32eb8b3c9aed8ad"}, - {file = "black-23.7.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:f9062af71c59c004cd519e2fb8f5d25d39e46d3af011b41ab43b9c74e27e236f"}, - {file = "black-23.7.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:01ede61aac8c154b55f35301fac3e730baf0c9cf8120f65a9cd61a81cfb4a0c3"}, - {file = "black-23.7.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:327a8c2550ddc573b51e2c352adb88143464bb9d92c10416feb86b0f5aee5ff6"}, - {file = "black-23.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1c6022b86f83b632d06f2b02774134def5d4d4f1dac8bef16d90cda18ba28a"}, - {file = "black-23.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:27eb7a0c71604d5de083757fbdb245b1a4fae60e9596514c6ec497eb63f95320"}, - {file = "black-23.7.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:8417dbd2f57b5701492cd46edcecc4f9208dc75529bcf76c514864e48da867d9"}, - {file = "black-23.7.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:47e56d83aad53ca140da0af87678fb38e44fd6bc0af71eebab2d1f59b1acf1d3"}, - {file = "black-23.7.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:25cc308838fe71f7065df53aedd20327969d05671bac95b38fdf37ebe70ac087"}, - {file = "black-23.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:642496b675095d423f9b8448243336f8ec71c9d4d57ec17bf795b67f08132a91"}, - {file = "black-23.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:ad0014efc7acf0bd745792bd0d8857413652979200ab924fbf239062adc12491"}, - {file = "black-23.7.0-py3-none-any.whl", hash = "sha256:9fd59d418c60c0348505f2ddf9609c1e1de8e7493eab96198fc89d9f865e7a96"}, - {file = "black-23.7.0.tar.gz", hash = "sha256:022a582720b0d9480ed82576c920a8c1dde97cc38ff11d8d8859b3bd6ca9eedb"}, -] -bleach = [ - {file = "bleach-6.0.0-py3-none-any.whl", hash = "sha256:33c16e3353dbd13028ab4799a0f89a83f113405c766e9c122df8a06f5b85b3f4"}, - {file = "bleach-6.0.0.tar.gz", hash = "sha256:1a1a85c1595e07d8db14c5f09f09e6433502c51c595970edc090551f0db99414"}, -] -cachetools = [ - {file = "cachetools-5.3.1-py3-none-any.whl", hash = "sha256:95ef631eeaea14ba2e36f06437f36463aac3a096799e876ee55e5cdccb102590"}, - {file = "cachetools-5.3.1.tar.gz", hash = "sha256:dce83f2d9b4e1f732a8cd44af8e8fab2dbe46201467fc98b3ef8f269092bf62b"}, -] -certifi = [ - {file = "certifi-2023.5.7-py3-none-any.whl", hash = "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716"}, - {file = "certifi-2023.5.7.tar.gz", hash = "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7"}, -] -cffi = [ - {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, - {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, - {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, - {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, - {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, - {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, - {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, - {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, - {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, - {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, - {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, - {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, - {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, - {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, - {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, - {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, - {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, - {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, - {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, -] -charset-normalizer = [ - {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"}, - {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, -] -click = [ - {file = "click-8.1.5-py3-none-any.whl", hash = "sha256:e576aa487d679441d7d30abb87e1b43d24fc53bffb8758443b1a9e1cee504548"}, - {file = "click-8.1.5.tar.gz", hash = "sha256:4be4b1af8d665c6d942909916d31a213a106800c47d0eeba73d34da3cbc11367"}, -] -click-log = [ - {file = "click-log-0.4.0.tar.gz", hash = "sha256:3970f8570ac54491237bcdb3d8ab5e3eef6c057df29f8c3d1151a51a9c23b975"}, - {file = "click_log-0.4.0-py2.py3-none-any.whl", hash = "sha256:a43e394b528d52112af599f2fc9e4b7cf3c15f94e53581f74fa6867e68c91756"}, -] -clickclick = [ - {file = "clickclick-20.10.2-py2.py3-none-any.whl", hash = "sha256:c8f33e6d9ec83f68416dd2136a7950125bd256ec39ccc9a85c6e280a16be2bb5"}, - {file = "clickclick-20.10.2.tar.gz", hash = "sha256:4efb13e62353e34c5eef7ed6582c4920b418d7dedc86d819e22ee089ba01802c"}, -] -colorama = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] -comm = [ - {file = "comm-0.1.3-py3-none-any.whl", hash = "sha256:16613c6211e20223f215fc6d3b266a247b6e2641bf4e0a3ad34cb1aff2aa3f37"}, - {file = "comm-0.1.3.tar.gz", hash = "sha256:a61efa9daffcfbe66fd643ba966f846a624e4e6d6767eda9cf6e993aadaab93e"}, -] -connexion = [ - {file = "connexion-2.14.2-py2.py3-none-any.whl", hash = "sha256:a73b96a0e07b16979a42cde7c7e26afe8548099e352cf350f80c57185e0e0b36"}, - {file = "connexion-2.14.2.tar.gz", hash = "sha256:dbc06f52ebeebcf045c9904d570f24377e8bbd5a6521caef15a06f634cf85646"}, -] -coverage = [ - {file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"}, - {file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"}, - {file = "coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6"}, - {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2"}, - {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063"}, - {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1"}, - {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353"}, - {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495"}, - {file = "coverage-7.2.7-cp310-cp310-win32.whl", hash = "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818"}, - {file = "coverage-7.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850"}, - {file = "coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f"}, - {file = "coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe"}, - {file = "coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3"}, - {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f"}, - {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb"}, - {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833"}, - {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97"}, - {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a"}, - {file = "coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a"}, - {file = "coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562"}, - {file = "coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4"}, - {file = "coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4"}, - {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01"}, - {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6"}, - {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d"}, - {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de"}, - {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d"}, - {file = "coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511"}, - {file = "coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3"}, - {file = "coverage-7.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f"}, - {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb"}, - {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9"}, - {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd"}, - {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a"}, - {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959"}, - {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02"}, - {file = "coverage-7.2.7-cp37-cp37m-win32.whl", hash = "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f"}, - {file = "coverage-7.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0"}, - {file = "coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5"}, - {file = "coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5"}, - {file = "coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9"}, - {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6"}, - {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e"}, - {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050"}, - {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5"}, - {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f"}, - {file = "coverage-7.2.7-cp38-cp38-win32.whl", hash = "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e"}, - {file = "coverage-7.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c"}, - {file = "coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9"}, - {file = "coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2"}, - {file = "coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7"}, - {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e"}, - {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1"}, - {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9"}, - {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250"}, - {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2"}, - {file = "coverage-7.2.7-cp39-cp39-win32.whl", hash = "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb"}, - {file = "coverage-7.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27"}, - {file = "coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d"}, - {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"}, -] -cryptography = [ - {file = "cryptography-41.0.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:01f1d9e537f9a15b037d5d9ee442b8c22e3ae11ce65ea1f3316a41c78756b711"}, - {file = "cryptography-41.0.2-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:079347de771f9282fbfe0e0236c716686950c19dee1b76240ab09ce1624d76d7"}, - {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:439c3cc4c0d42fa999b83ded80a9a1fb54d53c58d6e59234cfe97f241e6c781d"}, - {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f14ad275364c8b4e525d018f6716537ae7b6d369c094805cae45300847e0894f"}, - {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:84609ade00a6ec59a89729e87a503c6e36af98ddcd566d5f3be52e29ba993182"}, - {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:49c3222bb8f8e800aead2e376cbef687bc9e3cb9b58b29a261210456a7783d83"}, - {file = "cryptography-41.0.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:d73f419a56d74fef257955f51b18d046f3506270a5fd2ac5febbfa259d6c0fa5"}, - {file = "cryptography-41.0.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:2a034bf7d9ca894720f2ec1d8b7b5832d7e363571828037f9e0c4f18c1b58a58"}, - {file = "cryptography-41.0.2-cp37-abi3-win32.whl", hash = "sha256:d124682c7a23c9764e54ca9ab5b308b14b18eba02722b8659fb238546de83a76"}, - {file = "cryptography-41.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:9c3fe6534d59d071ee82081ca3d71eed3210f76ebd0361798c74abc2bcf347d4"}, - {file = "cryptography-41.0.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a719399b99377b218dac6cf547b6ec54e6ef20207b6165126a280b0ce97e0d2a"}, - {file = "cryptography-41.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:182be4171f9332b6741ee818ec27daff9fb00349f706629f5cbf417bd50e66fd"}, - {file = "cryptography-41.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7a9a3bced53b7f09da251685224d6a260c3cb291768f54954e28f03ef14e3766"}, - {file = "cryptography-41.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f0dc40e6f7aa37af01aba07277d3d64d5a03dc66d682097541ec4da03cc140ee"}, - {file = "cryptography-41.0.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:674b669d5daa64206c38e507808aae49904c988fa0a71c935e7006a3e1e83831"}, - {file = "cryptography-41.0.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7af244b012711a26196450d34f483357e42aeddb04128885d95a69bd8b14b69b"}, - {file = "cryptography-41.0.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9b6d717393dbae53d4e52684ef4f022444fc1cce3c48c38cb74fca29e1f08eaa"}, - {file = "cryptography-41.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:192255f539d7a89f2102d07d7375b1e0a81f7478925b3bc2e0549ebf739dae0e"}, - {file = "cryptography-41.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f772610fe364372de33d76edcd313636a25684edb94cee53fd790195f5989d14"}, - {file = "cryptography-41.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b332cba64d99a70c1e0836902720887fb4529ea49ea7f5462cf6640e095e11d2"}, - {file = "cryptography-41.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9a6673c1828db6270b76b22cc696f40cde9043eb90373da5c2f8f2158957f42f"}, - {file = "cryptography-41.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:342f3767e25876751e14f8459ad85e77e660537ca0a066e10e75df9c9e9099f0"}, - {file = "cryptography-41.0.2.tar.gz", hash = "sha256:7d230bf856164de164ecb615ccc14c7fc6de6906ddd5b491f3af90d3514c925c"}, -] -dateparser = [ - {file = "dateparser-1.1.8-py2.py3-none-any.whl", hash = "sha256:070b29b5bbf4b1ec2cd51c96ea040dc68a614de703910a91ad1abba18f9f379f"}, - {file = "dateparser-1.1.8.tar.gz", hash = "sha256:86b8b7517efcc558f085a142cdb7620f0921543fcabdb538c8a4c4001d8178e3"}, -] -debugpy = [ - {file = "debugpy-1.6.7-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b3e7ac809b991006ad7f857f016fa92014445085711ef111fdc3f74f66144096"}, - {file = "debugpy-1.6.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3876611d114a18aafef6383695dfc3f1217c98a9168c1aaf1a02b01ec7d8d1e"}, - {file = "debugpy-1.6.7-cp310-cp310-win32.whl", hash = "sha256:33edb4afa85c098c24cc361d72ba7c21bb92f501104514d4ffec1fb36e09c01a"}, - {file = "debugpy-1.6.7-cp310-cp310-win_amd64.whl", hash = "sha256:ed6d5413474e209ba50b1a75b2d9eecf64d41e6e4501977991cdc755dc83ab0f"}, - {file = "debugpy-1.6.7-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:38ed626353e7c63f4b11efad659be04c23de2b0d15efff77b60e4740ea685d07"}, - {file = "debugpy-1.6.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:279d64c408c60431c8ee832dfd9ace7c396984fd7341fa3116aee414e7dcd88d"}, - {file = "debugpy-1.6.7-cp37-cp37m-win32.whl", hash = "sha256:dbe04e7568aa69361a5b4c47b4493d5680bfa3a911d1e105fbea1b1f23f3eb45"}, - {file = "debugpy-1.6.7-cp37-cp37m-win_amd64.whl", hash = "sha256:f90a2d4ad9a035cee7331c06a4cf2245e38bd7c89554fe3b616d90ab8aab89cc"}, - {file = "debugpy-1.6.7-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:5224eabbbeddcf1943d4e2821876f3e5d7d383f27390b82da5d9558fd4eb30a9"}, - {file = "debugpy-1.6.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bae1123dff5bfe548ba1683eb972329ba6d646c3a80e6b4c06cd1b1dd0205e9b"}, - {file = "debugpy-1.6.7-cp38-cp38-win32.whl", hash = "sha256:9cd10cf338e0907fdcf9eac9087faa30f150ef5445af5a545d307055141dd7a4"}, - {file = "debugpy-1.6.7-cp38-cp38-win_amd64.whl", hash = "sha256:aaf6da50377ff4056c8ed470da24632b42e4087bc826845daad7af211e00faad"}, - {file = "debugpy-1.6.7-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:0679b7e1e3523bd7d7869447ec67b59728675aadfc038550a63a362b63029d2c"}, - {file = "debugpy-1.6.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de86029696e1b3b4d0d49076b9eba606c226e33ae312a57a46dca14ff370894d"}, - {file = "debugpy-1.6.7-cp39-cp39-win32.whl", hash = "sha256:d71b31117779d9a90b745720c0eab54ae1da76d5b38c8026c654f4a066b0130a"}, - {file = "debugpy-1.6.7-cp39-cp39-win_amd64.whl", hash = "sha256:c0ff93ae90a03b06d85b2c529eca51ab15457868a377c4cc40a23ab0e4e552a3"}, - {file = "debugpy-1.6.7-py2.py3-none-any.whl", hash = "sha256:53f7a456bc50706a0eaabecf2d3ce44c4d5010e46dfc65b6b81a518b42866267"}, - {file = "debugpy-1.6.7.zip", hash = "sha256:c4c2f0810fa25323abfdfa36cbbbb24e5c3b1a42cb762782de64439c575d67f2"}, -] -decorator = [ - {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, - {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, -] -defusedxml = [ - {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, - {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, -] -deprecated = [ - {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, - {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, -] -deprecation = [ - {file = "deprecation-2.1.0-py2.py3-none-any.whl", hash = "sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a"}, - {file = "deprecation-2.1.0.tar.gz", hash = "sha256:72b3bde64e5d778694b0cf68178aed03d15e15477116add3fb773e581f9518ff"}, -] -dill = [ - {file = "dill-0.3.6-py3-none-any.whl", hash = "sha256:a07ffd2351b8c678dfc4a856a3005f8067aea51d6ba6c700796a4d9e280f39f0"}, - {file = "dill-0.3.6.tar.gz", hash = "sha256:e5db55f3687856d8fbdab002ed78544e1c4559a130302693d839dfe8f93f2373"}, -] -docutils = [ - {file = "docutils-0.20.1-py3-none-any.whl", hash = "sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6"}, - {file = "docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"}, -] -entrypoints = [ - {file = "entrypoints-0.4-py3-none-any.whl", hash = "sha256:f174b5ff827504fd3cd97cc3f8649f3693f51538c7e4bdf3ef002c8429d42f9f"}, - {file = "entrypoints-0.4.tar.gz", hash = "sha256:b706eddaa9218a19ebcd67b56818f05bb27589b1ca9e8d797b74affad4ccacd4"}, -] -et-xmlfile = [ - {file = "et_xmlfile-1.1.0-py3-none-any.whl", hash = "sha256:a2ba85d1d6a74ef63837eed693bcb89c3f752169b0e3e7ae5b16ca5e1b3deada"}, - {file = "et_xmlfile-1.1.0.tar.gz", hash = "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c"}, -] -exceptiongroup = [ - {file = "exceptiongroup-1.1.2-py3-none-any.whl", hash = "sha256:e346e69d186172ca7cf029c8c1d16235aa0e04035e5750b4b95039e65204328f"}, - {file = "exceptiongroup-1.1.2.tar.gz", hash = "sha256:12c3e887d6485d16943a309616de20ae5582633e0a2eda17f4e10fd61c1e8af5"}, -] -executing = [ - {file = "executing-1.2.0-py2.py3-none-any.whl", hash = "sha256:0314a69e37426e3608aada02473b4161d4caf5a4b244d1d0c48072b8fee7bacc"}, - {file = "executing-1.2.0.tar.gz", hash = "sha256:19da64c18d2d851112f09c287f8d3dbbdf725ab0e569077efb6cdcbd3497c107"}, -] -fastjsonschema = [ - {file = "fastjsonschema-2.17.1-py3-none-any.whl", hash = "sha256:4b90b252628ca695280924d863fe37234eebadc29c5360d322571233dc9746e0"}, - {file = "fastjsonschema-2.17.1.tar.gz", hash = "sha256:f4eeb8a77cef54861dbf7424ac8ce71306f12cbb086c45131bcba2c6a4f726e3"}, -] -flake8 = [ - {file = "flake8-6.0.0-py2.py3-none-any.whl", hash = "sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7"}, - {file = "flake8-6.0.0.tar.gz", hash = "sha256:c61007e76655af75e6785a931f452915b371dc48f56efd765247c8fe68f2b181"}, -] -flask = [ - {file = "Flask-2.1.3-py3-none-any.whl", hash = "sha256:9013281a7402ad527f8fd56375164f3aa021ecfaff89bfe3825346c24f87e04c"}, - {file = "Flask-2.1.3.tar.gz", hash = "sha256:15972e5017df0575c3d6c090ba168b6db90259e620ac8d7ea813a396bad5b6cb"}, -] -flask-cors = [ - {file = "Flask-Cors-3.0.10.tar.gz", hash = "sha256:b60839393f3b84a0f3746f6cdca56c1ad7426aa738b70d6c61375857823181de"}, - {file = "Flask_Cors-3.0.10-py2.py3-none-any.whl", hash = "sha256:74efc975af1194fc7891ff5cd85b0f7478be4f7f59fe158102e91abb72bb4438"}, -] -fqdn = [ - {file = "fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014"}, - {file = "fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f"}, -] -google-api-core = [ - {file = "google-api-core-2.11.1.tar.gz", hash = "sha256:25d29e05a0058ed5f19c61c0a78b1b53adea4d9364b464d014fbda941f6d1c9a"}, - {file = "google_api_core-2.11.1-py3-none-any.whl", hash = "sha256:d92a5a92dc36dd4f4b9ee4e55528a90e432b059f93aee6ad857f9de8cc7ae94a"}, -] -google-api-python-client = [ - {file = "google-api-python-client-2.93.0.tar.gz", hash = "sha256:62ee28e96031a10a1c341f226a75ac6a4f16bdb1d888dc8222b2cdca133d0031"}, - {file = "google_api_python_client-2.93.0-py2.py3-none-any.whl", hash = "sha256:f34abb671afd488bd19d30721ea20fb30d3796ddd825d6f91f26d8c718a9f07d"}, -] -google-auth = [ - {file = "google-auth-2.22.0.tar.gz", hash = "sha256:164cba9af4e6e4e40c3a4f90a1a6c12ee56f14c0b4868d1ca91b32826ab334ce"}, - {file = "google_auth-2.22.0-py2.py3-none-any.whl", hash = "sha256:d61d1b40897407b574da67da1a833bdc10d5a11642566e506565d1b1a46ba873"}, -] -google-auth-httplib2 = [ - {file = "google-auth-httplib2-0.1.0.tar.gz", hash = "sha256:a07c39fd632becacd3f07718dfd6021bf396978f03ad3ce4321d060015cc30ac"}, - {file = "google_auth_httplib2-0.1.0-py2.py3-none-any.whl", hash = "sha256:31e49c36c6b5643b57e82617cb3e021e3e1d2df9da63af67252c02fa9c1f4a10"}, -] -google-auth-oauthlib = [ - {file = "google-auth-oauthlib-0.8.0.tar.gz", hash = "sha256:81056a310fb1c4a3e5a7e1a443e1eb96593c6bbc55b26c0261e4d3295d3e6593"}, - {file = "google_auth_oauthlib-0.8.0-py2.py3-none-any.whl", hash = "sha256:40cc612a13c3336d5433e94e2adb42a0c88f6feb6c55769e44500fc70043a576"}, -] -googleapis-common-protos = [ - {file = "googleapis-common-protos-1.59.1.tar.gz", hash = "sha256:b35d530fe825fb4227857bc47ad84c33c809ac96f312e13182bdeaa2abe1178a"}, - {file = "googleapis_common_protos-1.59.1-py2.py3-none-any.whl", hash = "sha256:0cbedb6fb68f1c07e18eb4c48256320777707e7d0c55063ae56c15db3224a61e"}, -] -graphviz = [ - {file = "graphviz-0.20.1-py3-none-any.whl", hash = "sha256:587c58a223b51611c0cf461132da386edd896a029524ca61a1462b880bf97977"}, - {file = "graphviz-0.20.1.zip", hash = "sha256:8c58f14adaa3b947daf26c19bc1e98c4e0702cdc31cf99153e6f06904d492bf8"}, -] -great-expectations = [ - {file = "great_expectations-0.15.50-py3-none-any.whl", hash = "sha256:bda4c6bfe199dc0610273a1c160aab3876583266b1957a34a7edb72b055fd13d"}, - {file = "great_expectations-0.15.50.tar.gz", hash = "sha256:0b00c974410d598a97b4c662d7955d80d6268e35c5f3893ddb546f75432412db"}, -] -greenlet = [ - {file = "greenlet-2.0.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:bdfea8c661e80d3c1c99ad7c3ff74e6e87184895bbaca6ee8cc61209f8b9b85d"}, - {file = "greenlet-2.0.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9d14b83fab60d5e8abe587d51c75b252bcc21683f24699ada8fb275d7712f5a9"}, - {file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"}, - {file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"}, - {file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"}, - {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"}, - {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"}, - {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"}, - {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d75209eed723105f9596807495d58d10b3470fa6732dd6756595e89925ce2470"}, - {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a51c9751078733d88e013587b108f1b7a1fb106d402fb390740f002b6f6551a"}, - {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"}, - {file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"}, - {file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"}, - {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"}, - {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"}, - {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"}, - {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:eff4eb9b7eb3e4d0cae3d28c283dc16d9bed6b193c2e1ace3ed86ce48ea8df19"}, - {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5454276c07d27a740c5892f4907c86327b632127dd9abec42ee62e12427ff7e3"}, - {file = "greenlet-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:7cafd1208fdbe93b67c7086876f061f660cfddc44f404279c1585bbf3cdc64c5"}, - {file = "greenlet-2.0.2-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:910841381caba4f744a44bf81bfd573c94e10b3045ee00de0cbf436fe50673a6"}, - {file = "greenlet-2.0.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:18a7f18b82b52ee85322d7a7874e676f34ab319b9f8cce5de06067384aa8ff43"}, - {file = "greenlet-2.0.2-cp35-cp35m-win32.whl", hash = "sha256:03a8f4f3430c3b3ff8d10a2a86028c660355ab637cee9333d63d66b56f09d52a"}, - {file = "greenlet-2.0.2-cp35-cp35m-win_amd64.whl", hash = "sha256:4b58adb399c4d61d912c4c331984d60eb66565175cdf4a34792cd9600f21b394"}, - {file = "greenlet-2.0.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:703f18f3fda276b9a916f0934d2fb6d989bf0b4fb5a64825260eb9bfd52d78f0"}, - {file = "greenlet-2.0.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:32e5b64b148966d9cccc2c8d35a671409e45f195864560829f395a54226408d3"}, - {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dd11f291565a81d71dab10b7033395b7a3a5456e637cf997a6f33ebdf06f8db"}, - {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0f72c9ddb8cd28532185f54cc1453f2c16fb417a08b53a855c4e6a418edd099"}, - {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd021c754b162c0fb55ad5d6b9d960db667faad0fa2ff25bb6e1301b0b6e6a75"}, - {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:3c9b12575734155d0c09d6c3e10dbd81665d5c18e1a7c6597df72fd05990c8cf"}, - {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b9ec052b06a0524f0e35bd8790686a1da006bd911dd1ef7d50b77bfbad74e292"}, - {file = "greenlet-2.0.2-cp36-cp36m-win32.whl", hash = "sha256:dbfcfc0218093a19c252ca8eb9aee3d29cfdcb586df21049b9d777fd32c14fd9"}, - {file = "greenlet-2.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:9f35ec95538f50292f6d8f2c9c9f8a3c6540bbfec21c9e5b4b751e0a7c20864f"}, - {file = "greenlet-2.0.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:d5508f0b173e6aa47273bdc0a0b5ba055b59662ba7c7ee5119528f466585526b"}, - {file = "greenlet-2.0.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:f82d4d717d8ef19188687aa32b8363e96062911e63ba22a0cff7802a8e58e5f1"}, - {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9c59a2120b55788e800d82dfa99b9e156ff8f2227f07c5e3012a45a399620b7"}, - {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2780572ec463d44c1d3ae850239508dbeb9fed38e294c68d19a24d925d9223ca"}, - {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:937e9020b514ceedb9c830c55d5c9872abc90f4b5862f89c0887033ae33c6f73"}, - {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:36abbf031e1c0f79dd5d596bfaf8e921c41df2bdf54ee1eed921ce1f52999a86"}, - {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:18e98fb3de7dba1c0a852731c3070cf022d14f0d68b4c87a19cc1016f3bb8b33"}, - {file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"}, - {file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"}, - {file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"}, - {file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"}, - {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"}, - {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"}, - {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acd2162a36d3de67ee896c43effcd5ee3de247eb00354db411feb025aa319857"}, - {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0bf60faf0bc2468089bdc5edd10555bab6e85152191df713e2ab1fcc86382b5a"}, - {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"}, - {file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"}, - {file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"}, - {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"}, - {file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"}, - {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"}, - {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be4ed120b52ae4d974aa40215fcdfde9194d63541c7ded40ee12eb4dda57b76b"}, - {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94c817e84245513926588caf1152e3b559ff794d505555211ca041f032abbb6b"}, - {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1a819eef4b0e0b96bb0d98d797bef17dc1b4a10e8d7446be32d1da33e095dbb8"}, - {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7efde645ca1cc441d6dc4b48c0f7101e8d86b54c8530141b09fd31cef5149ec9"}, - {file = "greenlet-2.0.2-cp39-cp39-win32.whl", hash = "sha256:ea9872c80c132f4663822dd2a08d404073a5a9b5ba6155bea72fb2a79d1093b5"}, - {file = "greenlet-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:db1a39669102a1d8d12b57de2bb7e2ec9066a6f2b3da35ae511ff93b01b5d564"}, - {file = "greenlet-2.0.2.tar.gz", hash = "sha256:e7c8dc13af7db097bed64a051d2dd49e9f0af495c26995c00a9ee842690d34c0"}, -] -httplib2 = [ - {file = "httplib2-0.22.0-py3-none-any.whl", hash = "sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc"}, - {file = "httplib2-0.22.0.tar.gz", hash = "sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81"}, -] -idna = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, -] -imagesize = [ - {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, - {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, -] -importlib-metadata = [ - {file = "importlib_metadata-4.13.0-py3-none-any.whl", hash = "sha256:8a8a81bcf996e74fee46f0d16bd3eaa382a7eb20fd82445c3ad11f4090334116"}, - {file = "importlib_metadata-4.13.0.tar.gz", hash = "sha256:dd0173e8f150d6815e098fd354f6414b0f079af4644ddfe90c71e2fc6174346d"}, -] -inflection = [ - {file = "inflection-0.5.1-py2.py3-none-any.whl", hash = "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2"}, - {file = "inflection-0.5.1.tar.gz", hash = "sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417"}, -] -iniconfig = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, -] -interrogate = [ - {file = "interrogate-1.5.0-py3-none-any.whl", hash = "sha256:a4ccc5cbd727c74acc98dee6f5e79ef264c0bcfa66b68d4e123069b2af89091a"}, - {file = "interrogate-1.5.0.tar.gz", hash = "sha256:b6f325f0aa84ac3ac6779d8708264d366102226c5af7d69058cecffcff7a6d6c"}, -] -ipykernel = [ - {file = "ipykernel-6.24.0-py3-none-any.whl", hash = "sha256:2f5fffc7ad8f1fd5aadb4e171ba9129d9668dbafa374732cf9511ada52d6547f"}, - {file = "ipykernel-6.24.0.tar.gz", hash = "sha256:29cea0a716b1176d002a61d0b0c851f34536495bc4ef7dd0222c88b41b816123"}, -] -ipython = [ - {file = "ipython-8.14.0-py3-none-any.whl", hash = "sha256:248aca623f5c99a6635bc3857677b7320b9b8039f99f070ee0d20a5ca5a8e6bf"}, - {file = "ipython-8.14.0.tar.gz", hash = "sha256:1d197b907b6ba441b692c48cf2a3a2de280dc0ac91a3405b39349a50272ca0a1"}, -] -ipython-genutils = [ - {file = "ipython_genutils-0.2.0-py2.py3-none-any.whl", hash = "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8"}, - {file = "ipython_genutils-0.2.0.tar.gz", hash = "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"}, -] -ipywidgets = [ - {file = "ipywidgets-8.0.7-py3-none-any.whl", hash = "sha256:e0aed0c95a1e55b6a123f64305245578bdc09e52965a34941c2b6a578b8c64a0"}, - {file = "ipywidgets-8.0.7.tar.gz", hash = "sha256:50ace0a8886e9a0d68b980db82f94c25d55d21ff2340ed36f802dd9365e94acf"}, -] -isodate = [ - {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, - {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, -] -isoduration = [ - {file = "isoduration-20.11.0-py3-none-any.whl", hash = "sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042"}, - {file = "isoduration-20.11.0.tar.gz", hash = "sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9"}, -] -isort = [ - {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, - {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, -] -itsdangerous = [ - {file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"}, - {file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"}, -] -jedi = [ - {file = "jedi-0.18.2-py2.py3-none-any.whl", hash = "sha256:203c1fd9d969ab8f2119ec0a3342e0b49910045abe6af0a3ae83a5764d54639e"}, - {file = "jedi-0.18.2.tar.gz", hash = "sha256:bae794c30d07f6d910d32a7048af09b5a39ed740918da923c6b780790ebac612"}, -] -jeepney = [ - {file = "jeepney-0.8.0-py3-none-any.whl", hash = "sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755"}, - {file = "jeepney-0.8.0.tar.gz", hash = "sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806"}, -] -jinja2 = [ - {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, - {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, -] -jsonpatch = [ - {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, - {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, -] -jsonpointer = [ - {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, - {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, -] -jsonschema = [ - {file = "jsonschema-4.18.3-py3-none-any.whl", hash = "sha256:aab78b34c2de001c6b692232f08c21a97b436fe18e0b817bf0511046924fceef"}, - {file = "jsonschema-4.18.3.tar.gz", hash = "sha256:64b7104d72efe856bea49ca4af37a14a9eba31b40bb7238179f3803130fd34d9"}, -] -jsonschema-specifications = [ - {file = "jsonschema_specifications-2023.6.1-py3-none-any.whl", hash = "sha256:3d2b82663aff01815f744bb5c7887e2121a63399b49b104a3c96145474d091d7"}, - {file = "jsonschema_specifications-2023.6.1.tar.gz", hash = "sha256:ca1c4dd059a9e7b34101cf5b3ab7ff1d18b139f35950d598d629837ef66e8f28"}, -] -jupyter-client = [ - {file = "jupyter_client-8.3.0-py3-none-any.whl", hash = "sha256:7441af0c0672edc5d28035e92ba5e32fadcfa8a4e608a434c228836a89df6158"}, - {file = "jupyter_client-8.3.0.tar.gz", hash = "sha256:3af69921fe99617be1670399a0b857ad67275eefcfa291e2c81a160b7b650f5f"}, -] -jupyter-core = [ - {file = "jupyter_core-5.3.1-py3-none-any.whl", hash = "sha256:ae9036db959a71ec1cac33081eeb040a79e681f08ab68b0883e9a676c7a90dce"}, - {file = "jupyter_core-5.3.1.tar.gz", hash = "sha256:5ba5c7938a7f97a6b0481463f7ff0dbac7c15ba48cf46fa4035ca6e838aa1aba"}, -] -jupyter-events = [ - {file = "jupyter_events-0.6.3-py3-none-any.whl", hash = "sha256:57a2749f87ba387cd1bfd9b22a0875b889237dbf2edc2121ebb22bde47036c17"}, - {file = "jupyter_events-0.6.3.tar.gz", hash = "sha256:9a6e9995f75d1b7146b436ea24d696ce3a35bfa8bfe45e0c33c334c79464d0b3"}, -] -jupyter-server = [ - {file = "jupyter_server-2.7.0-py3-none-any.whl", hash = "sha256:6a77912aff643e53fa14bdb2634884b52b784a4be77ce8e93f7283faed0f0849"}, - {file = "jupyter_server-2.7.0.tar.gz", hash = "sha256:36da0a266d31a41ac335a366c88933c17dfa5bb817a48f5c02c16d303bc9477f"}, -] -jupyter-server-terminals = [ - {file = "jupyter_server_terminals-0.4.4-py3-none-any.whl", hash = "sha256:75779164661cec02a8758a5311e18bb8eb70c4e86c6b699403100f1585a12a36"}, - {file = "jupyter_server_terminals-0.4.4.tar.gz", hash = "sha256:57ab779797c25a7ba68e97bcfb5d7740f2b5e8a83b5e8102b10438041a7eac5d"}, -] -jupyterlab-pygments = [ - {file = "jupyterlab_pygments-0.2.2-py2.py3-none-any.whl", hash = "sha256:2405800db07c9f770863bcf8049a529c3dd4d3e28536638bd7c1c01d2748309f"}, - {file = "jupyterlab_pygments-0.2.2.tar.gz", hash = "sha256:7405d7fde60819d905a9fa8ce89e4cd830e318cdad22a0030f7a901da705585d"}, -] -jupyterlab-widgets = [ - {file = "jupyterlab_widgets-3.0.8-py3-none-any.whl", hash = "sha256:4715912d6ceab839c9db35953c764b3214ebbc9161c809f6e0510168845dfdf5"}, - {file = "jupyterlab_widgets-3.0.8.tar.gz", hash = "sha256:d428ab97b8d87cc7c54cbf37644d6e0f0e662f23876e05fa460a73ec3257252a"}, -] -keyring = [ - {file = "keyring-23.4.1-py3-none-any.whl", hash = "sha256:17e49fb0d6883c2b4445359434dba95aad84aabb29bbff044ad0ed7100232eca"}, - {file = "keyring-23.4.1.tar.gz", hash = "sha256:89cbd74d4683ed164c8082fb38619341097741323b3786905c6dac04d6915a55"}, -] -keyrings-alt = [ - {file = "keyrings.alt-3.1-py2.py3-none-any.whl", hash = "sha256:6a00fa799baf1385cf9620bd01bcc815aa56e6970342a567bcfea0c4d21abe5f"}, - {file = "keyrings.alt-3.1.tar.gz", hash = "sha256:b59c86b67b9027a86e841a49efc41025bcc3b1b0308629617b66b7011e52db5a"}, -] -lazy-object-proxy = [ - {file = "lazy-object-proxy-1.9.0.tar.gz", hash = "sha256:659fb5809fa4629b8a1ac5106f669cfc7bef26fbb389dda53b3e010d1ac4ebae"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b40387277b0ed2d0602b8293b94d7257e17d1479e257b4de114ea11a8cb7f2d7"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8c6cfb338b133fbdbc5cfaa10fe3c6aeea827db80c978dbd13bc9dd8526b7d4"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:721532711daa7db0d8b779b0bb0318fa87af1c10d7fe5e52ef30f8eff254d0cd"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:66a3de4a3ec06cd8af3f61b8e1ec67614fbb7c995d02fa224813cb7afefee701"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1aa3de4088c89a1b69f8ec0dcc169aa725b0ff017899ac568fe44ddc1396df46"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-win32.whl", hash = "sha256:f0705c376533ed2a9e5e97aacdbfe04cecd71e0aa84c7c0595d02ef93b6e4455"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:ea806fd4c37bf7e7ad82537b0757999264d5f70c45468447bb2b91afdbe73a6e"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:946d27deaff6cf8452ed0dba83ba38839a87f4f7a9732e8f9fd4107b21e6ff07"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79a31b086e7e68b24b99b23d57723ef7e2c6d81ed21007b6281ebcd1688acb0a"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f699ac1c768270c9e384e4cbd268d6e67aebcfae6cd623b4d7c3bfde5a35db59"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bfb38f9ffb53b942f2b5954e0f610f1e721ccebe9cce9025a38c8ccf4a5183a4"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:189bbd5d41ae7a498397287c408617fe5c48633e7755287b21d741f7db2706a9"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-win32.whl", hash = "sha256:81fc4d08b062b535d95c9ea70dbe8a335c45c04029878e62d744bdced5141586"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:f2457189d8257dd41ae9b434ba33298aec198e30adf2dcdaaa3a28b9994f6adb"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d9e25ef10a39e8afe59a5c348a4dbf29b4868ab76269f81ce1674494e2565a6e"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbf9b082426036e19c6924a9ce90c740a9861e2bdc27a4834fd0a910742ac1e8"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f5fa4a61ce2438267163891961cfd5e32ec97a2c444e5b842d574251ade27d2"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8fa02eaab317b1e9e03f69aab1f91e120e7899b392c4fc19807a8278a07a97e8"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e7c21c95cae3c05c14aafffe2865bbd5e377cfc1348c4f7751d9dc9a48ca4bda"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win32.whl", hash = "sha256:f12ad7126ae0c98d601a7ee504c1122bcef553d1d5e0c3bfa77b16b3968d2734"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:edd20c5a55acb67c7ed471fa2b5fb66cb17f61430b7a6b9c3b4a1e40293b1671"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d0daa332786cf3bb49e10dc6a17a52f6a8f9601b4cf5c295a4f85854d61de63"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cd077f3d04a58e83d04b20e334f678c2b0ff9879b9375ed107d5d07ff160171"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:660c94ea760b3ce47d1855a30984c78327500493d396eac4dfd8bd82041b22be"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:212774e4dfa851e74d393a2370871e174d7ff0ebc980907723bb67d25c8a7c30"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f0117049dd1d5635bbff65444496c90e0baa48ea405125c088e93d9cf4525b11"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-win32.whl", hash = "sha256:0a891e4e41b54fd5b8313b96399f8b0e173bbbfc03c7631f01efbe29bb0bcf82"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:9990d8e71b9f6488e91ad25f322898c136b008d87bf852ff65391b004da5e17b"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9e7551208b2aded9c1447453ee366f1c4070602b3d932ace044715d89666899b"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f83ac4d83ef0ab017683d715ed356e30dd48a93746309c8f3517e1287523ef4"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7322c3d6f1766d4ef1e51a465f47955f1e8123caee67dd641e67d539a534d006"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:18b78ec83edbbeb69efdc0e9c1cb41a3b1b1ed11ddd8ded602464c3fc6020494"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:09763491ce220c0299688940f8dc2c5d05fd1f45af1e42e636b2e8b2303e4382"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-win32.whl", hash = "sha256:9090d8e53235aa280fc9239a86ae3ea8ac58eff66a705fa6aa2ec4968b95c821"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:db1c1722726f47e10e0b5fdbf15ac3b8adb58c091d12b3ab713965795036985f"}, -] -makefun = [ - {file = "makefun-1.15.1-py2.py3-none-any.whl", hash = "sha256:a63cfc7b47a539c76d97bd4fdb833c7d0461e759fd1225f580cb4be6200294d4"}, - {file = "makefun-1.15.1.tar.gz", hash = "sha256:40b0f118b6ded0d8d78c78f1eb679b8b6b2462e3c1b3e05fb1b2da8cd46b48a5"}, -] -markupsafe = [ - {file = "MarkupSafe-2.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3028252424c72b2602a323f70fbf50aa80a5d3aa616ea6add4ba21ae9cc9da4c"}, - {file = "MarkupSafe-2.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:290b02bab3c9e216da57c1d11d2ba73a9f73a614bbdcc027d299a60cdfabb11a"}, - {file = "MarkupSafe-2.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e104c0c2b4cd765b4e83909cde7ec61a1e313f8a75775897db321450e928cce"}, - {file = "MarkupSafe-2.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24c3be29abb6b34052fd26fc7a8e0a49b1ee9d282e3665e8ad09a0a68faee5b3"}, - {file = "MarkupSafe-2.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:204730fd5fe2fe3b1e9ccadb2bd18ba8712b111dcabce185af0b3b5285a7c989"}, - {file = "MarkupSafe-2.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d3b64c65328cb4cd252c94f83e66e3d7acf8891e60ebf588d7b493a55a1dbf26"}, - {file = "MarkupSafe-2.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:96de1932237abe0a13ba68b63e94113678c379dca45afa040a17b6e1ad7ed076"}, - {file = "MarkupSafe-2.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:75bb36f134883fdbe13d8e63b8675f5f12b80bb6627f7714c7d6c5becf22719f"}, - {file = "MarkupSafe-2.1.0-cp310-cp310-win32.whl", hash = "sha256:4056f752015dfa9828dce3140dbadd543b555afb3252507348c493def166d454"}, - {file = "MarkupSafe-2.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:d4e702eea4a2903441f2735799d217f4ac1b55f7d8ad96ab7d4e25417cb0827c"}, - {file = "MarkupSafe-2.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f0eddfcabd6936558ec020130f932d479930581171368fd728efcfb6ef0dd357"}, - {file = "MarkupSafe-2.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ddea4c352a488b5e1069069f2f501006b1a4362cb906bee9a193ef1245a7a61"}, - {file = "MarkupSafe-2.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09c86c9643cceb1d87ca08cdc30160d1b7ab49a8a21564868921959bd16441b8"}, - {file = "MarkupSafe-2.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0a0abef2ca47b33fb615b491ce31b055ef2430de52c5b3fb19a4042dbc5cadb"}, - {file = "MarkupSafe-2.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:736895a020e31b428b3382a7887bfea96102c529530299f426bf2e636aacec9e"}, - {file = "MarkupSafe-2.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:679cbb78914ab212c49c67ba2c7396dc599a8479de51b9a87b174700abd9ea49"}, - {file = "MarkupSafe-2.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:84ad5e29bf8bab3ad70fd707d3c05524862bddc54dc040982b0dbcff36481de7"}, - {file = "MarkupSafe-2.1.0-cp37-cp37m-win32.whl", hash = "sha256:8da5924cb1f9064589767b0f3fc39d03e3d0fb5aa29e0cb21d43106519bd624a"}, - {file = "MarkupSafe-2.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:454ffc1cbb75227d15667c09f164a0099159da0c1f3d2636aa648f12675491ad"}, - {file = "MarkupSafe-2.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:142119fb14a1ef6d758912b25c4e803c3ff66920635c44078666fe7cc3f8f759"}, - {file = "MarkupSafe-2.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b2a5a856019d2833c56a3dcac1b80fe795c95f401818ea963594b345929dffa7"}, - {file = "MarkupSafe-2.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d1fb9b2eec3c9714dd936860850300b51dbaa37404209c8d4cb66547884b7ed"}, - {file = "MarkupSafe-2.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62c0285e91414f5c8f621a17b69fc0088394ccdaa961ef469e833dbff64bd5ea"}, - {file = "MarkupSafe-2.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fc3150f85e2dbcf99e65238c842d1cfe69d3e7649b19864c1cc043213d9cd730"}, - {file = "MarkupSafe-2.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f02cf7221d5cd915d7fa58ab64f7ee6dd0f6cddbb48683debf5d04ae9b1c2cc1"}, - {file = "MarkupSafe-2.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d5653619b3eb5cbd35bfba3c12d575db2a74d15e0e1c08bf1db788069d410ce8"}, - {file = "MarkupSafe-2.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7d2f5d97fcbd004c03df8d8fe2b973fe2b14e7bfeb2cfa012eaa8759ce9a762f"}, - {file = "MarkupSafe-2.1.0-cp38-cp38-win32.whl", hash = "sha256:3cace1837bc84e63b3fd2dfce37f08f8c18aeb81ef5cf6bb9b51f625cb4e6cd8"}, - {file = "MarkupSafe-2.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:fabbe18087c3d33c5824cb145ffca52eccd053061df1d79d4b66dafa5ad2a5ea"}, - {file = "MarkupSafe-2.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:023af8c54fe63530545f70dd2a2a7eed18d07a9a77b94e8bf1e2ff7f252db9a3"}, - {file = "MarkupSafe-2.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d66624f04de4af8bbf1c7f21cc06649c1c69a7f84109179add573ce35e46d448"}, - {file = "MarkupSafe-2.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c532d5ab79be0199fa2658e24a02fce8542df196e60665dd322409a03db6a52c"}, - {file = "MarkupSafe-2.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67ec74fada3841b8c5f4c4f197bea916025cb9aa3fe5abf7d52b655d042f956"}, - {file = "MarkupSafe-2.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30c653fde75a6e5eb814d2a0a89378f83d1d3f502ab710904ee585c38888816c"}, - {file = "MarkupSafe-2.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:961eb86e5be7d0973789f30ebcf6caab60b844203f4396ece27310295a6082c7"}, - {file = "MarkupSafe-2.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:598b65d74615c021423bd45c2bc5e9b59539c875a9bdb7e5f2a6b92dfcfc268d"}, - {file = "MarkupSafe-2.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:599941da468f2cf22bf90a84f6e2a65524e87be2fce844f96f2dd9a6c9d1e635"}, - {file = "MarkupSafe-2.1.0-cp39-cp39-win32.whl", hash = "sha256:e6f7f3f41faffaea6596da86ecc2389672fa949bd035251eab26dc6697451d05"}, - {file = "MarkupSafe-2.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:b8811d48078d1cf2a6863dafb896e68406c5f513048451cd2ded0473133473c7"}, - {file = "MarkupSafe-2.1.0.tar.gz", hash = "sha256:80beaf63ddfbc64a0452b841d8036ca0611e049650e20afcb882f5d3c266d65f"}, -] -marshmallow = [ - {file = "marshmallow-3.19.0-py3-none-any.whl", hash = "sha256:93f0958568da045b0021ec6aeb7ac37c81bfcccbb9a0e7ed8559885070b3a19b"}, - {file = "marshmallow-3.19.0.tar.gz", hash = "sha256:90032c0fd650ce94b6ec6dc8dfeb0e3ff50c144586462c389b81a07205bedb78"}, -] -matplotlib-inline = [ - {file = "matplotlib-inline-0.1.6.tar.gz", hash = "sha256:f887e5f10ba98e8d2b150ddcf4702c1e5f8b3a20005eb0f74bfdbd360ee6f304"}, - {file = "matplotlib_inline-0.1.6-py3-none-any.whl", hash = "sha256:f1f41aab5328aa5aaea9b16d083b128102f8712542f819fe7e6a420ff581b311"}, -] -mccabe = [ - {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, - {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, -] -mistune = [ - {file = "mistune-3.0.1-py3-none-any.whl", hash = "sha256:b9b3e438efbb57c62b5beb5e134dab664800bdf1284a7ee09e8b12b13eb1aac6"}, - {file = "mistune-3.0.1.tar.gz", hash = "sha256:e912116c13aa0944f9dc530db38eb88f6a77087ab128f49f84a48f4c05ea163c"}, -] -mypy = [ - {file = "mypy-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:566e72b0cd6598503e48ea610e0052d1b8168e60a46e0bfd34b3acf2d57f96a8"}, - {file = "mypy-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ca637024ca67ab24a7fd6f65d280572c3794665eaf5edcc7e90a866544076878"}, - {file = "mypy-1.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dde1d180cd84f0624c5dcaaa89c89775550a675aff96b5848de78fb11adabcd"}, - {file = "mypy-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8c4d8e89aa7de683e2056a581ce63c46a0c41e31bd2b6d34144e2c80f5ea53dc"}, - {file = "mypy-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:bfdca17c36ae01a21274a3c387a63aa1aafe72bff976522886869ef131b937f1"}, - {file = "mypy-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7549fbf655e5825d787bbc9ecf6028731973f78088fbca3a1f4145c39ef09462"}, - {file = "mypy-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:98324ec3ecf12296e6422939e54763faedbfcc502ea4a4c38502082711867258"}, - {file = "mypy-1.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:141dedfdbfe8a04142881ff30ce6e6653c9685b354876b12e4fe6c78598b45e2"}, - {file = "mypy-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8207b7105829eca6f3d774f64a904190bb2231de91b8b186d21ffd98005f14a7"}, - {file = "mypy-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:16f0db5b641ba159eff72cff08edc3875f2b62b2fa2bc24f68c1e7a4e8232d01"}, - {file = "mypy-1.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:470c969bb3f9a9efcedbadcd19a74ffb34a25f8e6b0e02dae7c0e71f8372f97b"}, - {file = "mypy-1.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5952d2d18b79f7dc25e62e014fe5a23eb1a3d2bc66318df8988a01b1a037c5b"}, - {file = "mypy-1.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:190b6bab0302cec4e9e6767d3eb66085aef2a1cc98fe04936d8a42ed2ba77bb7"}, - {file = "mypy-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9d40652cc4fe33871ad3338581dca3297ff5f2213d0df345bcfbde5162abf0c9"}, - {file = "mypy-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:01fd2e9f85622d981fd9063bfaef1aed6e336eaacca00892cd2d82801ab7c042"}, - {file = "mypy-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2460a58faeea905aeb1b9b36f5065f2dc9a9c6e4c992a6499a2360c6c74ceca3"}, - {file = "mypy-1.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2746d69a8196698146a3dbe29104f9eb6a2a4d8a27878d92169a6c0b74435b6"}, - {file = "mypy-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ae704dcfaa180ff7c4cfbad23e74321a2b774f92ca77fd94ce1049175a21c97f"}, - {file = "mypy-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:43d24f6437925ce50139a310a64b2ab048cb2d3694c84c71c3f2a1626d8101dc"}, - {file = "mypy-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c482e1246726616088532b5e964e39765b6d1520791348e6c9dc3af25b233828"}, - {file = "mypy-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:43b592511672017f5b1a483527fd2684347fdffc041c9ef53428c8dc530f79a3"}, - {file = "mypy-1.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34a9239d5b3502c17f07fd7c0b2ae6b7dd7d7f6af35fbb5072c6208e76295816"}, - {file = "mypy-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5703097c4936bbb9e9bce41478c8d08edd2865e177dc4c52be759f81ee4dd26c"}, - {file = "mypy-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e02d700ec8d9b1859790c0475df4e4092c7bf3272a4fd2c9f33d87fac4427b8f"}, - {file = "mypy-1.4.1-py3-none-any.whl", hash = "sha256:45d32cec14e7b97af848bddd97d85ea4f0db4d5a149ed9676caa4eb2f7402bb4"}, - {file = "mypy-1.4.1.tar.gz", hash = "sha256:9bbcd9ab8ea1f2e1c8031c21445b511442cc45c89951e49bbf852cbb70755b1b"}, -] -mypy-extensions = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, -] -nbclassic = [ - {file = "nbclassic-1.0.0-py3-none-any.whl", hash = "sha256:f99e4769b4750076cd4235c044b61232110733322384a94a63791d2e7beacc66"}, - {file = "nbclassic-1.0.0.tar.gz", hash = "sha256:0ae11eb2319455d805596bf320336cda9554b41d99ab9a3c31bf8180bffa30e3"}, -] -nbclient = [ - {file = "nbclient-0.8.0-py3-none-any.whl", hash = "sha256:25e861299e5303a0477568557c4045eccc7a34c17fc08e7959558707b9ebe548"}, - {file = "nbclient-0.8.0.tar.gz", hash = "sha256:f9b179cd4b2d7bca965f900a2ebf0db4a12ebff2f36a711cb66861e4ae158e55"}, -] -nbconvert = [ - {file = "nbconvert-7.6.0-py3-none-any.whl", hash = "sha256:5a445c6794b0791984bc5436608fe2c066cb43c83920c7bc91bde3b765e9a264"}, - {file = "nbconvert-7.6.0.tar.gz", hash = "sha256:24fcf27efdef2b51d7f090cc5ce5a9b178766a55be513c4ebab08c91899ab550"}, -] -nbformat = [ - {file = "nbformat-5.9.1-py3-none-any.whl", hash = "sha256:b7968ebf4811178a4108ee837eae1442e3f054132100f0359219e9ed1ce3ca45"}, - {file = "nbformat-5.9.1.tar.gz", hash = "sha256:3a7f52d040639cbd8a3890218c8b0ffb93211588c57446c90095e32ba5881b5d"}, -] -nest-asyncio = [ - {file = "nest_asyncio-1.5.6-py3-none-any.whl", hash = "sha256:b9a953fb40dceaa587d109609098db21900182b16440652454a146cffb06e8b8"}, - {file = "nest_asyncio-1.5.6.tar.gz", hash = "sha256:d267cc1ff794403f7df692964d1d2a3fa9418ffea2a3f6859a439ff482fef290"}, -] -networkx = [ - {file = "networkx-2.8.8-py3-none-any.whl", hash = "sha256:e435dfa75b1d7195c7b8378c3859f0445cd88c6b0375c181ed66823a9ceb7524"}, - {file = "networkx-2.8.8.tar.gz", hash = "sha256:230d388117af870fce5647a3c52401fcf753e94720e6ea6b4197a5355648885e"}, -] -notebook = [ - {file = "notebook-6.5.4-py3-none-any.whl", hash = "sha256:dd17e78aefe64c768737b32bf171c1c766666a21cc79a44d37a1700771cab56f"}, - {file = "notebook-6.5.4.tar.gz", hash = "sha256:517209568bd47261e2def27a140e97d49070602eea0d226a696f42a7f16c9a4e"}, -] -notebook-shim = [ - {file = "notebook_shim-0.2.3-py3-none-any.whl", hash = "sha256:a83496a43341c1674b093bfcebf0fe8e74cbe7eda5fd2bbc56f8e39e1486c0c7"}, - {file = "notebook_shim-0.2.3.tar.gz", hash = "sha256:f69388ac283ae008cd506dda10d0288b09a017d822d5e8c7129a152cbd3ce7e9"}, -] -numpy = [ - {file = "numpy-1.25.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:77d339465dff3eb33c701430bcb9c325b60354698340229e1dff97745e6b3efa"}, - {file = "numpy-1.25.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d736b75c3f2cb96843a5c7f8d8ccc414768d34b0a75f466c05f3a739b406f10b"}, - {file = "numpy-1.25.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a90725800caeaa160732d6b31f3f843ebd45d6b5f3eec9e8cc287e30f2805bf"}, - {file = "numpy-1.25.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c6c9261d21e617c6dc5eacba35cb68ec36bb72adcff0dee63f8fbc899362588"}, - {file = "numpy-1.25.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0def91f8af6ec4bb94c370e38c575855bf1d0be8a8fbfba42ef9c073faf2cf19"}, - {file = "numpy-1.25.1-cp310-cp310-win32.whl", hash = "sha256:fd67b306320dcadea700a8f79b9e671e607f8696e98ec255915c0c6d6b818503"}, - {file = "numpy-1.25.1-cp310-cp310-win_amd64.whl", hash = "sha256:c1516db588987450b85595586605742879e50dcce923e8973f79529651545b57"}, - {file = "numpy-1.25.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6b82655dd8efeea69dbf85d00fca40013d7f503212bc5259056244961268b66e"}, - {file = "numpy-1.25.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e8f6049c4878cb16960fbbfb22105e49d13d752d4d8371b55110941fb3b17800"}, - {file = "numpy-1.25.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41a56b70e8139884eccb2f733c2f7378af06c82304959e174f8e7370af112e09"}, - {file = "numpy-1.25.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5154b1a25ec796b1aee12ac1b22f414f94752c5f94832f14d8d6c9ac40bcca6"}, - {file = "numpy-1.25.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:38eb6548bb91c421261b4805dc44def9ca1a6eef6444ce35ad1669c0f1a3fc5d"}, - {file = "numpy-1.25.1-cp311-cp311-win32.whl", hash = "sha256:791f409064d0a69dd20579345d852c59822c6aa087f23b07b1b4e28ff5880fcb"}, - {file = "numpy-1.25.1-cp311-cp311-win_amd64.whl", hash = "sha256:c40571fe966393b212689aa17e32ed905924120737194b5d5c1b20b9ed0fb171"}, - {file = "numpy-1.25.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3d7abcdd85aea3e6cdddb59af2350c7ab1ed764397f8eec97a038ad244d2d105"}, - {file = "numpy-1.25.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1a180429394f81c7933634ae49b37b472d343cccb5bb0c4a575ac8bbc433722f"}, - {file = "numpy-1.25.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d412c1697c3853c6fc3cb9751b4915859c7afe6a277c2bf00acf287d56c4e625"}, - {file = "numpy-1.25.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20e1266411120a4f16fad8efa8e0454d21d00b8c7cee5b5ccad7565d95eb42dd"}, - {file = "numpy-1.25.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f76aebc3358ade9eacf9bc2bb8ae589863a4f911611694103af05346637df1b7"}, - {file = "numpy-1.25.1-cp39-cp39-win32.whl", hash = "sha256:247d3ffdd7775bdf191f848be8d49100495114c82c2bd134e8d5d075fb386a1c"}, - {file = "numpy-1.25.1-cp39-cp39-win_amd64.whl", hash = "sha256:1d5d3c68e443c90b38fdf8ef40e60e2538a27548b39b12b73132456847f4b631"}, - {file = "numpy-1.25.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:35a9527c977b924042170a0887de727cd84ff179e478481404c5dc66b4170009"}, - {file = "numpy-1.25.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d3fe3dd0506a28493d82dc3cf254be8cd0d26f4008a417385cbf1ae95b54004"}, - {file = "numpy-1.25.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:012097b5b0d00a11070e8f2e261128c44157a8689f7dedcf35576e525893f4fe"}, - {file = "numpy-1.25.1.tar.gz", hash = "sha256:9a3a9f3a61480cc086117b426a8bd86869c213fc4072e606f01c4e4b66eb92bf"}, -] -oauth2client = [ - {file = "oauth2client-4.1.3-py2.py3-none-any.whl", hash = "sha256:b8a81cc5d60e2d364f0b1b98f958dbd472887acaf1a5b05e21c28c31a2d6d3ac"}, - {file = "oauth2client-4.1.3.tar.gz", hash = "sha256:d486741e451287f69568a4d26d70d9acd73a2bbfa275746c535b4209891cccc6"}, -] -oauthlib = [ - {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, - {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, -] -openpyxl = [ - {file = "openpyxl-3.1.2-py2.py3-none-any.whl", hash = "sha256:f91456ead12ab3c6c2e9491cf33ba6d08357d802192379bb482f1033ade496f5"}, - {file = "openpyxl-3.1.2.tar.gz", hash = "sha256:a6f5977418eff3b2d5500d54d9db50c8277a368436f4e4f8ddb1be3422870184"}, -] -overrides = [ - {file = "overrides-7.3.1-py3-none-any.whl", hash = "sha256:6187d8710a935d09b0bcef8238301d6ee2569d2ac1ae0ec39a8c7924e27f58ca"}, - {file = "overrides-7.3.1.tar.gz", hash = "sha256:8b97c6c1e1681b78cbc9424b138d880f0803c2254c5ebaabdde57bb6c62093f2"}, -] -packaging = [ - {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, - {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, -] -pandarallel = [ - {file = "pandarallel-1.6.5.tar.gz", hash = "sha256:1c2df98ff6441e8ae13ff428ceebaa7ec42d731f7f972c41ce4fdef1d3adf640"}, -] -pandas = [ - {file = "pandas-1.5.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3749077d86e3a2f0ed51367f30bf5b82e131cc0f14260c4d3e499186fccc4406"}, - {file = "pandas-1.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:972d8a45395f2a2d26733eb8d0f629b2f90bebe8e8eddbb8829b180c09639572"}, - {file = "pandas-1.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:50869a35cbb0f2e0cd5ec04b191e7b12ed688874bd05dd777c19b28cbea90996"}, - {file = "pandas-1.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3ac844a0fe00bfaeb2c9b51ab1424e5c8744f89860b138434a363b1f620f354"}, - {file = "pandas-1.5.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a0a56cef15fd1586726dace5616db75ebcfec9179a3a55e78f72c5639fa2a23"}, - {file = "pandas-1.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:478ff646ca42b20376e4ed3fa2e8d7341e8a63105586efe54fa2508ee087f328"}, - {file = "pandas-1.5.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6973549c01ca91ec96199e940495219c887ea815b2083722821f1d7abfa2b4dc"}, - {file = "pandas-1.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c39a8da13cede5adcd3be1182883aea1c925476f4e84b2807a46e2775306305d"}, - {file = "pandas-1.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f76d097d12c82a535fda9dfe5e8dd4127952b45fea9b0276cb30cca5ea313fbc"}, - {file = "pandas-1.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e474390e60ed609cec869b0da796ad94f420bb057d86784191eefc62b65819ae"}, - {file = "pandas-1.5.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f2b952406a1588ad4cad5b3f55f520e82e902388a6d5a4a91baa8d38d23c7f6"}, - {file = "pandas-1.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:bc4c368f42b551bf72fac35c5128963a171b40dce866fb066540eeaf46faa003"}, - {file = "pandas-1.5.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:14e45300521902689a81f3f41386dc86f19b8ba8dd5ac5a3c7010ef8d2932813"}, - {file = "pandas-1.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9842b6f4b8479e41968eced654487258ed81df7d1c9b7b870ceea24ed9459b31"}, - {file = "pandas-1.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:26d9c71772c7afb9d5046e6e9cf42d83dd147b5cf5bcb9d97252077118543792"}, - {file = "pandas-1.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fbcb19d6fceb9e946b3e23258757c7b225ba450990d9ed63ccceeb8cae609f7"}, - {file = "pandas-1.5.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:565fa34a5434d38e9d250af3c12ff931abaf88050551d9fbcdfafca50d62babf"}, - {file = "pandas-1.5.3-cp38-cp38-win32.whl", hash = "sha256:87bd9c03da1ac870a6d2c8902a0e1fd4267ca00f13bc494c9e5a9020920e1d51"}, - {file = "pandas-1.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:41179ce559943d83a9b4bbacb736b04c928b095b5f25dd2b7389eda08f46f373"}, - {file = "pandas-1.5.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c74a62747864ed568f5a82a49a23a8d7fe171d0c69038b38cedf0976831296fa"}, - {file = "pandas-1.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c4c00e0b0597c8e4f59e8d461f797e5d70b4d025880516a8261b2817c47759ee"}, - {file = "pandas-1.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a50d9a4336a9621cab7b8eb3fb11adb82de58f9b91d84c2cd526576b881a0c5a"}, - {file = "pandas-1.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd05f7783b3274aa206a1af06f0ceed3f9b412cf665b7247eacd83be41cf7bf0"}, - {file = "pandas-1.5.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f69c4029613de47816b1bb30ff5ac778686688751a5e9c99ad8c7031f6508e5"}, - {file = "pandas-1.5.3-cp39-cp39-win32.whl", hash = "sha256:7cec0bee9f294e5de5bbfc14d0573f65526071029d036b753ee6507d2a21480a"}, - {file = "pandas-1.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:dfd681c5dc216037e0b0a2c821f5ed99ba9f03ebcf119c7dac0e9a7b960b9ec9"}, - {file = "pandas-1.5.3.tar.gz", hash = "sha256:74a3fd7e5a7ec052f183273dc7b0acd3a863edf7520f5d3a1765c04ffdb3b0b1"}, -] -pandocfilters = [ - {file = "pandocfilters-1.5.0-py2.py3-none-any.whl", hash = "sha256:33aae3f25fd1a026079f5d27bdd52496f0e0803b3469282162bafdcbdf6ef14f"}, - {file = "pandocfilters-1.5.0.tar.gz", hash = "sha256:0b679503337d233b4339a817bfc8c50064e2eff681314376a47cb582305a7a38"}, -] -parso = [ - {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, - {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, -] -pathspec = [ - {file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"}, - {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"}, -] -pdoc = [ - {file = "pdoc-12.3.1-py3-none-any.whl", hash = "sha256:c3f24f31286e634de9c76fa6e67bd5c0c5e74360b41dc91e6b82499831eb52d8"}, - {file = "pdoc-12.3.1.tar.gz", hash = "sha256:453236f225feddb8a9071428f1982a78d74b9b3da4bc4433aedb64dbd0cc87ab"}, -] -pexpect = [ - {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, - {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"}, -] -pickleshare = [ - {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, - {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, -] -platformdirs = [ - {file = "platformdirs-3.8.1-py3-none-any.whl", hash = "sha256:cec7b889196b9144d088e4c57d9ceef7374f6c39694ad1577a0aab50d27ea28c"}, - {file = "platformdirs-3.8.1.tar.gz", hash = "sha256:f87ca4fcff7d2b0f81c6a748a77973d7af0f4d526f98f308477c3c436c74d528"}, -] -pluggy = [ - {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, - {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, -] -prometheus-client = [ - {file = "prometheus_client-0.17.1-py3-none-any.whl", hash = "sha256:e537f37160f6807b8202a6fc4764cdd19bac5480ddd3e0d463c3002b34462101"}, - {file = "prometheus_client-0.17.1.tar.gz", hash = "sha256:21e674f39831ae3f8acde238afd9a27a37d0d2fb5a28ea094f0ce25d2cbf2091"}, -] -prompt-toolkit = [ - {file = "prompt_toolkit-3.0.39-py3-none-any.whl", hash = "sha256:9dffbe1d8acf91e3de75f3b544e4842382fc06c6babe903ac9acb74dc6e08d88"}, - {file = "prompt_toolkit-3.0.39.tar.gz", hash = "sha256:04505ade687dc26dc4284b1ad19a83be2f2afe83e7a828ace0c72f3a1df72aac"}, -] -protobuf = [ - {file = "protobuf-4.23.4-cp310-abi3-win32.whl", hash = "sha256:5fea3c64d41ea5ecf5697b83e41d09b9589e6f20b677ab3c48e5f242d9b7897b"}, - {file = "protobuf-4.23.4-cp310-abi3-win_amd64.whl", hash = "sha256:7b19b6266d92ca6a2a87effa88ecc4af73ebc5cfde194dc737cf8ef23a9a3b12"}, - {file = "protobuf-4.23.4-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8547bf44fe8cec3c69e3042f5c4fb3e36eb2a7a013bb0a44c018fc1e427aafbd"}, - {file = "protobuf-4.23.4-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:fee88269a090ada09ca63551bf2f573eb2424035bcf2cb1b121895b01a46594a"}, - {file = "protobuf-4.23.4-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:effeac51ab79332d44fba74660d40ae79985901ac21bca408f8dc335a81aa597"}, - {file = "protobuf-4.23.4-cp37-cp37m-win32.whl", hash = "sha256:c3e0939433c40796ca4cfc0fac08af50b00eb66a40bbbc5dee711998fb0bbc1e"}, - {file = "protobuf-4.23.4-cp37-cp37m-win_amd64.whl", hash = "sha256:9053df6df8e5a76c84339ee4a9f5a2661ceee4a0dab019e8663c50ba324208b0"}, - {file = "protobuf-4.23.4-cp38-cp38-win32.whl", hash = "sha256:e1c915778d8ced71e26fcf43c0866d7499891bca14c4368448a82edc61fdbc70"}, - {file = "protobuf-4.23.4-cp38-cp38-win_amd64.whl", hash = "sha256:351cc90f7d10839c480aeb9b870a211e322bf05f6ab3f55fcb2f51331f80a7d2"}, - {file = "protobuf-4.23.4-cp39-cp39-win32.whl", hash = "sha256:6dd9b9940e3f17077e820b75851126615ee38643c2c5332aa7a359988820c720"}, - {file = "protobuf-4.23.4-cp39-cp39-win_amd64.whl", hash = "sha256:0a5759f5696895de8cc913f084e27fd4125e8fb0914bb729a17816a33819f474"}, - {file = "protobuf-4.23.4-py3-none-any.whl", hash = "sha256:e9d0be5bf34b275b9f87ba7407796556abeeba635455d036c7351f7c183ef8ff"}, - {file = "protobuf-4.23.4.tar.gz", hash = "sha256:ccd9430c0719dce806b93f89c91de7977304729e55377f872a92465d548329a9"}, -] -psutil = [ - {file = "psutil-5.9.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:be8929ce4313f9f8146caad4272f6abb8bf99fc6cf59344a3167ecd74f4f203f"}, - {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ab8ed1a1d77c95453db1ae00a3f9c50227ebd955437bcf2a574ba8adbf6a74d5"}, - {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:4aef137f3345082a3d3232187aeb4ac4ef959ba3d7c10c33dd73763fbc063da4"}, - {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ea8518d152174e1249c4f2a1c89e3e6065941df2fa13a1ab45327716a23c2b48"}, - {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:acf2aef9391710afded549ff602b5887d7a2349831ae4c26be7c807c0a39fac4"}, - {file = "psutil-5.9.5-cp27-none-win32.whl", hash = "sha256:5b9b8cb93f507e8dbaf22af6a2fd0ccbe8244bf30b1baad6b3954e935157ae3f"}, - {file = "psutil-5.9.5-cp27-none-win_amd64.whl", hash = "sha256:8c5f7c5a052d1d567db4ddd231a9d27a74e8e4a9c3f44b1032762bd7b9fdcd42"}, - {file = "psutil-5.9.5-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:3c6f686f4225553615612f6d9bc21f1c0e305f75d7d8454f9b46e901778e7217"}, - {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a7dd9997128a0d928ed4fb2c2d57e5102bb6089027939f3b722f3a210f9a8da"}, - {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89518112647f1276b03ca97b65cc7f64ca587b1eb0278383017c2a0dcc26cbe4"}, - {file = "psutil-5.9.5-cp36-abi3-win32.whl", hash = "sha256:104a5cc0e31baa2bcf67900be36acde157756b9c44017b86b2c049f11957887d"}, - {file = "psutil-5.9.5-cp36-abi3-win_amd64.whl", hash = "sha256:b258c0c1c9d145a1d5ceffab1134441c4c5113b2417fafff7315a917a026c3c9"}, - {file = "psutil-5.9.5-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:c607bb3b57dc779d55e1554846352b4e358c10fff3abf3514a7a6601beebdb30"}, - {file = "psutil-5.9.5.tar.gz", hash = "sha256:5410638e4df39c54d957fc51ce03048acd8e6d60abc0f5107af51e5fb566eb3c"}, -] -ptyprocess = [ - {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, - {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, -] -pure-eval = [ - {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, - {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, -] -py = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, -] -pyasn1 = [ - {file = "pyasn1-0.5.0-py2.py3-none-any.whl", hash = "sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57"}, - {file = "pyasn1-0.5.0.tar.gz", hash = "sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde"}, -] -pyasn1-modules = [ - {file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"}, - {file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"}, -] -pycodestyle = [ - {file = "pycodestyle-2.10.0-py2.py3-none-any.whl", hash = "sha256:8a4eaf0d0495c7395bdab3589ac2db602797d76207242c17d470186815706610"}, - {file = "pycodestyle-2.10.0.tar.gz", hash = "sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053"}, -] -pycparser = [ - {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, - {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, -] -pydantic = [ - {file = "pydantic-1.10.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ff44c5e89315b15ff1f7fdaf9853770b810936d6b01a7bcecaa227d2f8fe444f"}, - {file = "pydantic-1.10.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a6c098d4ab5e2d5b3984d3cb2527e2d6099d3de85630c8934efcfdc348a9760e"}, - {file = "pydantic-1.10.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16928fdc9cb273c6af00d9d5045434c39afba5f42325fb990add2c241402d151"}, - {file = "pydantic-1.10.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0588788a9a85f3e5e9ebca14211a496409cb3deca5b6971ff37c556d581854e7"}, - {file = "pydantic-1.10.11-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e9baf78b31da2dc3d3f346ef18e58ec5f12f5aaa17ac517e2ffd026a92a87588"}, - {file = "pydantic-1.10.11-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:373c0840f5c2b5b1ccadd9286782852b901055998136287828731868027a724f"}, - {file = "pydantic-1.10.11-cp310-cp310-win_amd64.whl", hash = "sha256:c3339a46bbe6013ef7bdd2844679bfe500347ac5742cd4019a88312aa58a9847"}, - {file = "pydantic-1.10.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:08a6c32e1c3809fbc49debb96bf833164f3438b3696abf0fbeceb417d123e6eb"}, - {file = "pydantic-1.10.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a451ccab49971af043ec4e0d207cbc8cbe53dbf148ef9f19599024076fe9c25b"}, - {file = "pydantic-1.10.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b02d24f7b2b365fed586ed73582c20f353a4c50e4be9ba2c57ab96f8091ddae"}, - {file = "pydantic-1.10.11-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f34739a89260dfa420aa3cbd069fbcc794b25bbe5c0a214f8fb29e363484b66"}, - {file = "pydantic-1.10.11-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e297897eb4bebde985f72a46a7552a7556a3dd11e7f76acda0c1093e3dbcf216"}, - {file = "pydantic-1.10.11-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d185819a7a059550ecb85d5134e7d40f2565f3dd94cfd870132c5f91a89cf58c"}, - {file = "pydantic-1.10.11-cp311-cp311-win_amd64.whl", hash = "sha256:4400015f15c9b464c9db2d5d951b6a780102cfa5870f2c036d37c23b56f7fc1b"}, - {file = "pydantic-1.10.11-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2417de68290434461a266271fc57274a138510dca19982336639484c73a07af6"}, - {file = "pydantic-1.10.11-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:331c031ba1554b974c98679bd0780d89670d6fd6f53f5d70b10bdc9addee1713"}, - {file = "pydantic-1.10.11-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8268a735a14c308923e8958363e3a3404f6834bb98c11f5ab43251a4e410170c"}, - {file = "pydantic-1.10.11-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:44e51ba599c3ef227e168424e220cd3e544288c57829520dc90ea9cb190c3248"}, - {file = "pydantic-1.10.11-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d7781f1d13b19700b7949c5a639c764a077cbbdd4322ed505b449d3ca8edcb36"}, - {file = "pydantic-1.10.11-cp37-cp37m-win_amd64.whl", hash = "sha256:7522a7666157aa22b812ce14c827574ddccc94f361237ca6ea8bb0d5c38f1629"}, - {file = "pydantic-1.10.11-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc64eab9b19cd794a380179ac0e6752335e9555d214cfcb755820333c0784cb3"}, - {file = "pydantic-1.10.11-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8dc77064471780262b6a68fe67e013298d130414d5aaf9b562c33987dbd2cf4f"}, - {file = "pydantic-1.10.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe429898f2c9dd209bd0632a606bddc06f8bce081bbd03d1c775a45886e2c1cb"}, - {file = "pydantic-1.10.11-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:192c608ad002a748e4a0bed2ddbcd98f9b56df50a7c24d9a931a8c5dd053bd3d"}, - {file = "pydantic-1.10.11-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ef55392ec4bb5721f4ded1096241e4b7151ba6d50a50a80a2526c854f42e6a2f"}, - {file = "pydantic-1.10.11-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:41e0bb6efe86281623abbeeb0be64eab740c865388ee934cd3e6a358784aca6e"}, - {file = "pydantic-1.10.11-cp38-cp38-win_amd64.whl", hash = "sha256:265a60da42f9f27e0b1014eab8acd3e53bd0bad5c5b4884e98a55f8f596b2c19"}, - {file = "pydantic-1.10.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:469adf96c8e2c2bbfa655fc7735a2a82f4c543d9fee97bd113a7fb509bf5e622"}, - {file = "pydantic-1.10.11-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e6cbfbd010b14c8a905a7b10f9fe090068d1744d46f9e0c021db28daeb8b6de1"}, - {file = "pydantic-1.10.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abade85268cc92dff86d6effcd917893130f0ff516f3d637f50dadc22ae93999"}, - {file = "pydantic-1.10.11-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9738b0f2e6c70f44ee0de53f2089d6002b10c33264abee07bdb5c7f03038303"}, - {file = "pydantic-1.10.11-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:787cf23e5a0cde753f2eabac1b2e73ae3844eb873fd1f5bdbff3048d8dbb7604"}, - {file = "pydantic-1.10.11-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:174899023337b9fc685ac8adaa7b047050616136ccd30e9070627c1aaab53a13"}, - {file = "pydantic-1.10.11-cp39-cp39-win_amd64.whl", hash = "sha256:1954f8778489a04b245a1e7b8b22a9d3ea8ef49337285693cf6959e4b757535e"}, - {file = "pydantic-1.10.11-py3-none-any.whl", hash = "sha256:008c5e266c8aada206d0627a011504e14268a62091450210eda7c07fabe6963e"}, - {file = "pydantic-1.10.11.tar.gz", hash = "sha256:f66d479cf7eb331372c470614be6511eae96f1f120344c25f3f9bb59fb1b5528"}, -] -pyflakes = [ - {file = "pyflakes-3.0.1-py2.py3-none-any.whl", hash = "sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf"}, - {file = "pyflakes-3.0.1.tar.gz", hash = "sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd"}, -] -pygments = [ - {file = "Pygments-2.15.1-py3-none-any.whl", hash = "sha256:db2db3deb4b4179f399a09054b023b6a586b76499d36965813c71aa8ed7b5fd1"}, - {file = "Pygments-2.15.1.tar.gz", hash = "sha256:8ace4d3c1dd481894b2005f560ead0f9f19ee64fe983366be1a21e171d12775c"}, -] -pygsheets = [ - {file = "pygsheets-2.0.6-py3-none-any.whl", hash = "sha256:3338c2eb8990fdee9f463b42a370ec0870c118d607d775471a6dfb8b08f6cd87"}, - {file = "pygsheets-2.0.6.tar.gz", hash = "sha256:bff46c812e99f9b8b81a09b456581365281c797620ec08530b0d0e48fa9299e2"}, -] -pylint = [ - {file = "pylint-2.17.4-py3-none-any.whl", hash = "sha256:7a1145fb08c251bdb5cca11739722ce64a63db479283d10ce718b2460e54123c"}, - {file = "pylint-2.17.4.tar.gz", hash = "sha256:5dcf1d9e19f41f38e4e85d10f511e5b9c35e1aa74251bf95cdd8cb23584e2db1"}, -] -pyopenssl = [ - {file = "pyOpenSSL-23.2.0-py3-none-any.whl", hash = "sha256:24f0dc5227396b3e831f4c7f602b950a5e9833d292c8e4a2e06b709292806ae2"}, - {file = "pyOpenSSL-23.2.0.tar.gz", hash = "sha256:276f931f55a452e7dea69c7173e984eb2a4407ce413c918aa34b55f82f9b8bac"}, -] -pyparsing = [ - {file = "pyparsing-3.1.0-py3-none-any.whl", hash = "sha256:d554a96d1a7d3ddaf7183104485bc19fd80543ad6ac5bdb6426719d766fb06c1"}, - {file = "pyparsing-3.1.0.tar.gz", hash = "sha256:edb662d6fe322d6e990b1594b5feaeadf806803359e3d4d42f11e295e588f0ea"}, -] -pytest = [ - {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"}, - {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"}, -] -pytest-cov = [ - {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, - {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, -] -pytest-mock = [ - {file = "pytest-mock-3.11.1.tar.gz", hash = "sha256:7f6b125602ac6d743e523ae0bfa71e1a697a2f5534064528c6ff84c2f7c2fc7f"}, - {file = "pytest_mock-3.11.1-py3-none-any.whl", hash = "sha256:21c279fff83d70763b05f8874cc9cfb3fcacd6d354247a976f9529d19f9acf39"}, -] -python-dateutil = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, -] -python-dotenv = [ - {file = "python-dotenv-0.21.1.tar.gz", hash = "sha256:1c93de8f636cde3ce377292818d0e440b6e45a82f215c3744979151fa8151c49"}, - {file = "python_dotenv-0.21.1-py3-none-any.whl", hash = "sha256:41e12e0318bebc859fcc4d97d4db8d20ad21721a6aa5047dd59f090391cb549a"}, -] -python-json-logger = [ - {file = "python-json-logger-2.0.7.tar.gz", hash = "sha256:23e7ec02d34237c5aa1e29a070193a4ea87583bb4e7f8fd06d3de8264c4b2e1c"}, - {file = "python_json_logger-2.0.7-py3-none-any.whl", hash = "sha256:f380b826a991ebbe3de4d897aeec42760035ac760345e57b812938dc8b35e2bd"}, -] -pytz = [ - {file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"}, - {file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"}, -] -pywin32 = [ - {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, - {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, - {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, - {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, - {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, - {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, - {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, - {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, - {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, - {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, - {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, - {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, - {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, - {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, -] -pywin32-ctypes = [ - {file = "pywin32-ctypes-0.2.2.tar.gz", hash = "sha256:3426e063bdd5fd4df74a14fa3cf80a0b42845a87e1d1e81f6549f9daec593a60"}, - {file = "pywin32_ctypes-0.2.2-py3-none-any.whl", hash = "sha256:bf490a1a709baf35d688fe0ecf980ed4de11d2b3e37b51e5442587a75d9957e7"}, -] -pywinpty = [ - {file = "pywinpty-2.0.10-cp310-none-win_amd64.whl", hash = "sha256:4c7d06ad10f6e92bc850a467f26d98f4f30e73d2fe5926536308c6ae0566bc16"}, - {file = "pywinpty-2.0.10-cp311-none-win_amd64.whl", hash = "sha256:7ffbd66310b83e42028fc9df7746118978d94fba8c1ebf15a7c1275fdd80b28a"}, - {file = "pywinpty-2.0.10-cp37-none-win_amd64.whl", hash = "sha256:38cb924f2778b5751ef91a75febd114776b3af0ae411bc667be45dd84fc881d3"}, - {file = "pywinpty-2.0.10-cp38-none-win_amd64.whl", hash = "sha256:902d79444b29ad1833b8d5c3c9aabdfd428f4f068504430df18074007c8c0de8"}, - {file = "pywinpty-2.0.10-cp39-none-win_amd64.whl", hash = "sha256:3c46aef80dd50979aff93de199e4a00a8ee033ba7a03cadf0a91fed45f0c39d7"}, - {file = "pywinpty-2.0.10.tar.gz", hash = "sha256:cdbb5694cf8c7242c2ecfaca35c545d31fa5d5814c3d67a4e628f803f680ebea"}, -] -pyyaml = [ - {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, - {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, - {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, - {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, - {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"}, - {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"}, - {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"}, - {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"}, - {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, - {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, - {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, - {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, - {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, - {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, - {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, - {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, - {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, - {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, - {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, - {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, -] -pyzmq = [ - {file = "pyzmq-25.1.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:1a6169e69034eaa06823da6a93a7739ff38716142b3596c180363dee729d713d"}, - {file = "pyzmq-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:19d0383b1f18411d137d891cab567de9afa609b214de68b86e20173dc624c101"}, - {file = "pyzmq-25.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1e931d9a92f628858a50f5bdffdfcf839aebe388b82f9d2ccd5d22a38a789dc"}, - {file = "pyzmq-25.1.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:97d984b1b2f574bc1bb58296d3c0b64b10e95e7026f8716ed6c0b86d4679843f"}, - {file = "pyzmq-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:154bddda2a351161474b36dba03bf1463377ec226a13458725183e508840df89"}, - {file = "pyzmq-25.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:cb6d161ae94fb35bb518b74bb06b7293299c15ba3bc099dccd6a5b7ae589aee3"}, - {file = "pyzmq-25.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:90146ab578931e0e2826ee39d0c948d0ea72734378f1898939d18bc9c823fcf9"}, - {file = "pyzmq-25.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:831ba20b660b39e39e5ac8603e8193f8fce1ee03a42c84ade89c36a251449d80"}, - {file = "pyzmq-25.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3a522510e3434e12aff80187144c6df556bb06fe6b9d01b2ecfbd2b5bfa5c60c"}, - {file = "pyzmq-25.1.0-cp310-cp310-win32.whl", hash = "sha256:be24a5867b8e3b9dd5c241de359a9a5217698ff616ac2daa47713ba2ebe30ad1"}, - {file = "pyzmq-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:5693dcc4f163481cf79e98cf2d7995c60e43809e325b77a7748d8024b1b7bcba"}, - {file = "pyzmq-25.1.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:13bbe36da3f8aaf2b7ec12696253c0bf6ffe05f4507985a8844a1081db6ec22d"}, - {file = "pyzmq-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:69511d604368f3dc58d4be1b0bad99b61ee92b44afe1cd9b7bd8c5e34ea8248a"}, - {file = "pyzmq-25.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a983c8694667fd76d793ada77fd36c8317e76aa66eec75be2653cef2ea72883"}, - {file = "pyzmq-25.1.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:332616f95eb400492103ab9d542b69d5f0ff628b23129a4bc0a2fd48da6e4e0b"}, - {file = "pyzmq-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58416db767787aedbfd57116714aad6c9ce57215ffa1c3758a52403f7c68cff5"}, - {file = "pyzmq-25.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:cad9545f5801a125f162d09ec9b724b7ad9b6440151b89645241d0120e119dcc"}, - {file = "pyzmq-25.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d6128d431b8dfa888bf51c22a04d48bcb3d64431caf02b3cb943269f17fd2994"}, - {file = "pyzmq-25.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:2b15247c49d8cbea695b321ae5478d47cffd496a2ec5ef47131a9e79ddd7e46c"}, - {file = "pyzmq-25.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:442d3efc77ca4d35bee3547a8e08e8d4bb88dadb54a8377014938ba98d2e074a"}, - {file = "pyzmq-25.1.0-cp311-cp311-win32.whl", hash = "sha256:65346f507a815a731092421d0d7d60ed551a80d9b75e8b684307d435a5597425"}, - {file = "pyzmq-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:8b45d722046fea5a5694cba5d86f21f78f0052b40a4bbbbf60128ac55bfcc7b6"}, - {file = "pyzmq-25.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f45808eda8b1d71308c5416ef3abe958f033fdbb356984fabbfc7887bed76b3f"}, - {file = "pyzmq-25.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b697774ea8273e3c0460cf0bba16cd85ca6c46dfe8b303211816d68c492e132"}, - {file = "pyzmq-25.1.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b324fa769577fc2c8f5efcd429cef5acbc17d63fe15ed16d6dcbac2c5eb00849"}, - {file = "pyzmq-25.1.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:5873d6a60b778848ce23b6c0ac26c39e48969823882f607516b91fb323ce80e5"}, - {file = "pyzmq-25.1.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:f0d9e7ba6a815a12c8575ba7887da4b72483e4cfc57179af10c9b937f3f9308f"}, - {file = "pyzmq-25.1.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:414b8beec76521358b49170db7b9967d6974bdfc3297f47f7d23edec37329b00"}, - {file = "pyzmq-25.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:01f06f33e12497dca86353c354461f75275a5ad9eaea181ac0dc1662da8074fa"}, - {file = "pyzmq-25.1.0-cp36-cp36m-win32.whl", hash = "sha256:b5a07c4f29bf7cb0164664ef87e4aa25435dcc1f818d29842118b0ac1eb8e2b5"}, - {file = "pyzmq-25.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:968b0c737797c1809ec602e082cb63e9824ff2329275336bb88bd71591e94a90"}, - {file = "pyzmq-25.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:47b915ba666c51391836d7ed9a745926b22c434efa76c119f77bcffa64d2c50c"}, - {file = "pyzmq-25.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5af31493663cf76dd36b00dafbc839e83bbca8a0662931e11816d75f36155897"}, - {file = "pyzmq-25.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5489738a692bc7ee9a0a7765979c8a572520d616d12d949eaffc6e061b82b4d1"}, - {file = "pyzmq-25.1.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1fc56a0221bdf67cfa94ef2d6ce5513a3d209c3dfd21fed4d4e87eca1822e3a3"}, - {file = "pyzmq-25.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:75217e83faea9edbc29516fc90c817bc40c6b21a5771ecb53e868e45594826b0"}, - {file = "pyzmq-25.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3830be8826639d801de9053cf86350ed6742c4321ba4236e4b5568528d7bfed7"}, - {file = "pyzmq-25.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3575699d7fd7c9b2108bc1c6128641a9a825a58577775ada26c02eb29e09c517"}, - {file = "pyzmq-25.1.0-cp37-cp37m-win32.whl", hash = "sha256:95bd3a998d8c68b76679f6b18f520904af5204f089beebb7b0301d97704634dd"}, - {file = "pyzmq-25.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:dbc466744a2db4b7ca05589f21ae1a35066afada2f803f92369f5877c100ef62"}, - {file = "pyzmq-25.1.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:3bed53f7218490c68f0e82a29c92335daa9606216e51c64f37b48eb78f1281f4"}, - {file = "pyzmq-25.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eb52e826d16c09ef87132c6e360e1879c984f19a4f62d8a935345deac43f3c12"}, - {file = "pyzmq-25.1.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ddbef8b53cd16467fdbfa92a712eae46dd066aa19780681a2ce266e88fbc7165"}, - {file = "pyzmq-25.1.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9301cf1d7fc1ddf668d0abbe3e227fc9ab15bc036a31c247276012abb921b5ff"}, - {file = "pyzmq-25.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7e23a8c3b6c06de40bdb9e06288180d630b562db8ac199e8cc535af81f90e64b"}, - {file = "pyzmq-25.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4a82faae00d1eed4809c2f18b37f15ce39a10a1c58fe48b60ad02875d6e13d80"}, - {file = "pyzmq-25.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c8398a1b1951aaa330269c35335ae69744be166e67e0ebd9869bdc09426f3871"}, - {file = "pyzmq-25.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d40682ac60b2a613d36d8d3a0cd14fbdf8e7e0618fbb40aa9fa7b796c9081584"}, - {file = "pyzmq-25.1.0-cp38-cp38-win32.whl", hash = "sha256:33d5c8391a34d56224bccf74f458d82fc6e24b3213fc68165c98b708c7a69325"}, - {file = "pyzmq-25.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:c66b7ff2527e18554030319b1376d81560ca0742c6e0b17ff1ee96624a5f1afd"}, - {file = "pyzmq-25.1.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:af56229ea6527a849ac9fb154a059d7e32e77a8cba27e3e62a1e38d8808cb1a5"}, - {file = "pyzmq-25.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bdca18b94c404af6ae5533cd1bc310c4931f7ac97c148bbfd2cd4bdd62b96253"}, - {file = "pyzmq-25.1.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0b6b42f7055bbc562f63f3df3b63e3dd1ebe9727ff0f124c3aa7bcea7b3a00f9"}, - {file = "pyzmq-25.1.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4c2fc7aad520a97d64ffc98190fce6b64152bde57a10c704b337082679e74f67"}, - {file = "pyzmq-25.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be86a26415a8b6af02cd8d782e3a9ae3872140a057f1cadf0133de685185c02b"}, - {file = "pyzmq-25.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:851fb2fe14036cfc1960d806628b80276af5424db09fe5c91c726890c8e6d943"}, - {file = "pyzmq-25.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2a21fec5c3cea45421a19ccbe6250c82f97af4175bc09de4d6dd78fb0cb4c200"}, - {file = "pyzmq-25.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bad172aba822444b32eae54c2d5ab18cd7dee9814fd5c7ed026603b8cae2d05f"}, - {file = "pyzmq-25.1.0-cp39-cp39-win32.whl", hash = "sha256:4d67609b37204acad3d566bb7391e0ecc25ef8bae22ff72ebe2ad7ffb7847158"}, - {file = "pyzmq-25.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:71c7b5896e40720d30cd77a81e62b433b981005bbff0cb2f739e0f8d059b5d99"}, - {file = "pyzmq-25.1.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4cb27ef9d3bdc0c195b2dc54fcb8720e18b741624686a81942e14c8b67cc61a6"}, - {file = "pyzmq-25.1.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0c4fc2741e0513b5d5a12fe200d6785bbcc621f6f2278893a9ca7bed7f2efb7d"}, - {file = "pyzmq-25.1.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fc34fdd458ff77a2a00e3c86f899911f6f269d393ca5675842a6e92eea565bae"}, - {file = "pyzmq-25.1.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8751f9c1442624da391bbd92bd4b072def6d7702a9390e4479f45c182392ff78"}, - {file = "pyzmq-25.1.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:6581e886aec3135964a302a0f5eb68f964869b9efd1dbafdebceaaf2934f8a68"}, - {file = "pyzmq-25.1.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5482f08d2c3c42b920e8771ae8932fbaa0a67dff925fc476996ddd8155a170f3"}, - {file = "pyzmq-25.1.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5e7fbcafa3ea16d1de1f213c226005fea21ee16ed56134b75b2dede5a2129e62"}, - {file = "pyzmq-25.1.0-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:adecf6d02b1beab8d7c04bc36f22bb0e4c65a35eb0b4750b91693631d4081c70"}, - {file = "pyzmq-25.1.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6d39e42a0aa888122d1beb8ec0d4ddfb6c6b45aecb5ba4013c27e2f28657765"}, - {file = "pyzmq-25.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7018289b402ebf2b2c06992813523de61d4ce17bd514c4339d8f27a6f6809492"}, - {file = "pyzmq-25.1.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9e68ae9864d260b18f311b68d29134d8776d82e7f5d75ce898b40a88df9db30f"}, - {file = "pyzmq-25.1.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e21cc00e4debe8f54c3ed7b9fcca540f46eee12762a9fa56feb8512fd9057161"}, - {file = "pyzmq-25.1.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f666ae327a6899ff560d741681fdcdf4506f990595201ed39b44278c471ad98"}, - {file = "pyzmq-25.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f5efcc29056dfe95e9c9db0dfbb12b62db9c4ad302f812931b6d21dd04a9119"}, - {file = "pyzmq-25.1.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:48e5e59e77c1a83162ab3c163fc01cd2eebc5b34560341a67421b09be0891287"}, - {file = "pyzmq-25.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:108c96ebbd573d929740d66e4c3d1bdf31d5cde003b8dc7811a3c8c5b0fc173b"}, - {file = "pyzmq-25.1.0.tar.gz", hash = "sha256:80c41023465d36280e801564a69cbfce8ae85ff79b080e1913f6e90481fb8957"}, -] -rdflib = [ - {file = "rdflib-6.3.2-py3-none-any.whl", hash = "sha256:36b4e74a32aa1e4fa7b8719876fb192f19ecd45ff932ea5ebbd2e417a0247e63"}, - {file = "rdflib-6.3.2.tar.gz", hash = "sha256:72af591ff704f4caacea7ecc0c5a9056b8553e0489dd4f35a9bc52dbd41522e0"}, -] -referencing = [ - {file = "referencing-0.29.1-py3-none-any.whl", hash = "sha256:d3c8f323ee1480095da44d55917cfb8278d73d6b4d5f677e3e40eb21314ac67f"}, - {file = "referencing-0.29.1.tar.gz", hash = "sha256:90cb53782d550ba28d2166ef3f55731f38397def8832baac5d45235f1995e35e"}, -] -regex = [ - {file = "regex-2023.6.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:824bf3ac11001849aec3fa1d69abcb67aac3e150a933963fb12bda5151fe1bfd"}, - {file = "regex-2023.6.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:05ed27acdf4465c95826962528f9e8d41dbf9b1aa8531a387dee6ed215a3e9ef"}, - {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b49c764f88a79160fa64f9a7b425620e87c9f46095ef9c9920542ab2495c8bc"}, - {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8e3f1316c2293e5469f8f09dc2d76efb6c3982d3da91ba95061a7e69489a14ef"}, - {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43e1dd9d12df9004246bacb79a0e5886b3b6071b32e41f83b0acbf293f820ee8"}, - {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4959e8bcbfda5146477d21c3a8ad81b185cd252f3d0d6e4724a5ef11c012fb06"}, - {file = "regex-2023.6.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:af4dd387354dc83a3bff67127a124c21116feb0d2ef536805c454721c5d7993d"}, - {file = "regex-2023.6.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2239d95d8e243658b8dbb36b12bd10c33ad6e6933a54d36ff053713f129aa536"}, - {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:890e5a11c97cf0d0c550eb661b937a1e45431ffa79803b942a057c4fb12a2da2"}, - {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a8105e9af3b029f243ab11ad47c19b566482c150c754e4c717900a798806b222"}, - {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:25be746a8ec7bc7b082783216de8e9473803706723b3f6bef34b3d0ed03d57e2"}, - {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:3676f1dd082be28b1266c93f618ee07741b704ab7b68501a173ce7d8d0d0ca18"}, - {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:10cb847aeb1728412c666ab2e2000ba6f174f25b2bdc7292e7dd71b16db07568"}, - {file = "regex-2023.6.3-cp310-cp310-win32.whl", hash = "sha256:dbbbfce33cd98f97f6bffb17801b0576e653f4fdb1d399b2ea89638bc8d08ae1"}, - {file = "regex-2023.6.3-cp310-cp310-win_amd64.whl", hash = "sha256:c5f8037000eb21e4823aa485149f2299eb589f8d1fe4b448036d230c3f4e68e0"}, - {file = "regex-2023.6.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c123f662be8ec5ab4ea72ea300359023a5d1df095b7ead76fedcd8babbedf969"}, - {file = "regex-2023.6.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9edcbad1f8a407e450fbac88d89e04e0b99a08473f666a3f3de0fd292badb6aa"}, - {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcba6dae7de533c876255317c11f3abe4907ba7d9aa15d13e3d9710d4315ec0e"}, - {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29cdd471ebf9e0f2fb3cac165efedc3c58db841d83a518b082077e612d3ee5df"}, - {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:12b74fbbf6cbbf9dbce20eb9b5879469e97aeeaa874145517563cca4029db65c"}, - {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c29ca1bd61b16b67be247be87390ef1d1ef702800f91fbd1991f5c4421ebae8"}, - {file = "regex-2023.6.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77f09bc4b55d4bf7cc5eba785d87001d6757b7c9eec237fe2af57aba1a071d9"}, - {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ea353ecb6ab5f7e7d2f4372b1e779796ebd7b37352d290096978fea83c4dba0c"}, - {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:10590510780b7541969287512d1b43f19f965c2ece6c9b1c00fc367b29d8dce7"}, - {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e2fbd6236aae3b7f9d514312cdb58e6494ee1c76a9948adde6eba33eb1c4264f"}, - {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:6b2675068c8b56f6bfd5a2bda55b8accbb96c02fd563704732fd1c95e2083461"}, - {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:74419d2b50ecb98360cfaa2974da8689cb3b45b9deff0dcf489c0d333bcc1477"}, - {file = "regex-2023.6.3-cp311-cp311-win32.whl", hash = "sha256:fb5ec16523dc573a4b277663a2b5a364e2099902d3944c9419a40ebd56a118f9"}, - {file = "regex-2023.6.3-cp311-cp311-win_amd64.whl", hash = "sha256:09e4a1a6acc39294a36b7338819b10baceb227f7f7dbbea0506d419b5a1dd8af"}, - {file = "regex-2023.6.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0654bca0cdf28a5956c83839162692725159f4cda8d63e0911a2c0dc76166525"}, - {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:463b6a3ceb5ca952e66550a4532cef94c9a0c80dc156c4cc343041951aec1697"}, - {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87b2a5bb5e78ee0ad1de71c664d6eb536dc3947a46a69182a90f4410f5e3f7dd"}, - {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6343c6928282c1f6a9db41f5fd551662310e8774c0e5ebccb767002fcf663ca9"}, - {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6192d5af2ccd2a38877bfef086d35e6659566a335b1492786ff254c168b1693"}, - {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74390d18c75054947e4194019077e243c06fbb62e541d8817a0fa822ea310c14"}, - {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:742e19a90d9bb2f4a6cf2862b8b06dea5e09b96c9f2df1779e53432d7275331f"}, - {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8abbc5d54ea0ee80e37fef009e3cec5dafd722ed3c829126253d3e22f3846f1e"}, - {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:c2b867c17a7a7ae44c43ebbeb1b5ff406b3e8d5b3e14662683e5e66e6cc868d3"}, - {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:d831c2f8ff278179705ca59f7e8524069c1a989e716a1874d6d1aab6119d91d1"}, - {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ee2d1a9a253b1729bb2de27d41f696ae893507c7db224436abe83ee25356f5c1"}, - {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:61474f0b41fe1a80e8dfa70f70ea1e047387b7cd01c85ec88fa44f5d7561d787"}, - {file = "regex-2023.6.3-cp36-cp36m-win32.whl", hash = "sha256:0b71e63226e393b534105fcbdd8740410dc6b0854c2bfa39bbda6b0d40e59a54"}, - {file = "regex-2023.6.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bbb02fd4462f37060122e5acacec78e49c0fbb303c30dd49c7f493cf21fc5b27"}, - {file = "regex-2023.6.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b862c2b9d5ae38a68b92e215b93f98d4c5e9454fa36aae4450f61dd33ff48487"}, - {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:976d7a304b59ede34ca2921305b57356694f9e6879db323fd90a80f865d355a3"}, - {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:83320a09188e0e6c39088355d423aa9d056ad57a0b6c6381b300ec1a04ec3d16"}, - {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9427a399501818a7564f8c90eced1e9e20709ece36be701f394ada99890ea4b3"}, - {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178bbc1b2ec40eaca599d13c092079bf529679bf0371c602edaa555e10b41c3"}, - {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:837328d14cde912af625d5f303ec29f7e28cdab588674897baafaf505341f2fc"}, - {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2d44dc13229905ae96dd2ae2dd7cebf824ee92bc52e8cf03dcead37d926da019"}, - {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d54af539295392611e7efbe94e827311eb8b29668e2b3f4cadcfe6f46df9c777"}, - {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:7117d10690c38a622e54c432dfbbd3cbd92f09401d622902c32f6d377e2300ee"}, - {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bb60b503ec8a6e4e3e03a681072fa3a5adcbfa5479fa2d898ae2b4a8e24c4591"}, - {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:65ba8603753cec91c71de423a943ba506363b0e5c3fdb913ef8f9caa14b2c7e0"}, - {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:271f0bdba3c70b58e6f500b205d10a36fb4b58bd06ac61381b68de66442efddb"}, - {file = "regex-2023.6.3-cp37-cp37m-win32.whl", hash = "sha256:9beb322958aaca059f34975b0df135181f2e5d7a13b84d3e0e45434749cb20f7"}, - {file = "regex-2023.6.3-cp37-cp37m-win_amd64.whl", hash = "sha256:fea75c3710d4f31389eed3c02f62d0b66a9da282521075061ce875eb5300cf23"}, - {file = "regex-2023.6.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8f56fcb7ff7bf7404becdfc60b1e81a6d0561807051fd2f1860b0d0348156a07"}, - {file = "regex-2023.6.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d2da3abc88711bce7557412310dfa50327d5769a31d1c894b58eb256459dc289"}, - {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a99b50300df5add73d307cf66abea093304a07eb017bce94f01e795090dea87c"}, - {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5708089ed5b40a7b2dc561e0c8baa9535b77771b64a8330b684823cfd5116036"}, - {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:687ea9d78a4b1cf82f8479cab23678aff723108df3edeac098e5b2498879f4a7"}, - {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d3850beab9f527f06ccc94b446c864059c57651b3f911fddb8d9d3ec1d1b25d"}, - {file = "regex-2023.6.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8915cc96abeb8983cea1df3c939e3c6e1ac778340c17732eb63bb96247b91d2"}, - {file = "regex-2023.6.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:841d6e0e5663d4c7b4c8099c9997be748677d46cbf43f9f471150e560791f7ff"}, - {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9edce5281f965cf135e19840f4d93d55b3835122aa76ccacfd389e880ba4cf82"}, - {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b956231ebdc45f5b7a2e1f90f66a12be9610ce775fe1b1d50414aac1e9206c06"}, - {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:36efeba71c6539d23c4643be88295ce8c82c88bbd7c65e8a24081d2ca123da3f"}, - {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:cf67ca618b4fd34aee78740bea954d7c69fdda419eb208c2c0c7060bb822d747"}, - {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b4598b1897837067a57b08147a68ac026c1e73b31ef6e36deeeb1fa60b2933c9"}, - {file = "regex-2023.6.3-cp38-cp38-win32.whl", hash = "sha256:f415f802fbcafed5dcc694c13b1292f07fe0befdb94aa8a52905bd115ff41e88"}, - {file = "regex-2023.6.3-cp38-cp38-win_amd64.whl", hash = "sha256:d4f03bb71d482f979bda92e1427f3ec9b220e62a7dd337af0aa6b47bf4498f72"}, - {file = "regex-2023.6.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ccf91346b7bd20c790310c4147eee6ed495a54ddb6737162a36ce9dbef3e4751"}, - {file = "regex-2023.6.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b28f5024a3a041009eb4c333863d7894d191215b39576535c6734cd88b0fcb68"}, - {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0bb18053dfcfed432cc3ac632b5e5e5c5b7e55fb3f8090e867bfd9b054dbcbf"}, - {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a5bfb3004f2144a084a16ce19ca56b8ac46e6fd0651f54269fc9e230edb5e4a"}, - {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c6b48d0fa50d8f4df3daf451be7f9689c2bde1a52b1225c5926e3f54b6a9ed1"}, - {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:051da80e6eeb6e239e394ae60704d2b566aa6a7aed6f2890a7967307267a5dc6"}, - {file = "regex-2023.6.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4c3b7fa4cdaa69268748665a1a6ff70c014d39bb69c50fda64b396c9116cf77"}, - {file = "regex-2023.6.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:457b6cce21bee41ac292d6753d5e94dcbc5c9e3e3a834da285b0bde7aa4a11e9"}, - {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:aad51907d74fc183033ad796dd4c2e080d1adcc4fd3c0fd4fd499f30c03011cd"}, - {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0385e73da22363778ef2324950e08b689abdf0b108a7d8decb403ad7f5191938"}, - {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c6a57b742133830eec44d9b2290daf5cbe0a2f1d6acee1b3c7b1c7b2f3606df7"}, - {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3e5219bf9e75993d73ab3d25985c857c77e614525fac9ae02b1bebd92f7cecac"}, - {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e5087a3c59eef624a4591ef9eaa6e9a8d8a94c779dade95d27c0bc24650261cd"}, - {file = "regex-2023.6.3-cp39-cp39-win32.whl", hash = "sha256:20326216cc2afe69b6e98528160b225d72f85ab080cbdf0b11528cbbaba2248f"}, - {file = "regex-2023.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:bdff5eab10e59cf26bc479f565e25ed71a7d041d1ded04ccf9aee1d9f208487a"}, - {file = "regex-2023.6.3.tar.gz", hash = "sha256:72d1a25bf36d2050ceb35b517afe13864865268dfb45910e2e17a84be6cbfeb0"}, -] -requests = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, -] -requests-oauthlib = [ - {file = "requests-oauthlib-1.3.1.tar.gz", hash = "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"}, - {file = "requests_oauthlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5"}, -] -rfc3339-validator = [ - {file = "rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa"}, - {file = "rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b"}, -] -rfc3986-validator = [ - {file = "rfc3986_validator-0.1.1-py2.py3-none-any.whl", hash = "sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9"}, - {file = "rfc3986_validator-0.1.1.tar.gz", hash = "sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055"}, -] -rpds-py = [ - {file = "rpds_py-0.8.10-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:93d06cccae15b3836247319eee7b6f1fdcd6c10dabb4e6d350d27bd0bdca2711"}, - {file = "rpds_py-0.8.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3816a890a6a9e9f1de250afa12ca71c9a7a62f2b715a29af6aaee3aea112c181"}, - {file = "rpds_py-0.8.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7c6304b894546b5a6bdc0fe15761fa53fe87d28527a7142dae8de3c663853e1"}, - {file = "rpds_py-0.8.10-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ad3bfb44c8840fb4be719dc58e229f435e227fbfbe133dc33f34981ff622a8f8"}, - {file = "rpds_py-0.8.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:14f1c356712f66653b777ecd8819804781b23dbbac4eade4366b94944c9e78ad"}, - {file = "rpds_py-0.8.10-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:82bb361cae4d0a627006dadd69dc2f36b7ad5dc1367af9d02e296ec565248b5b"}, - {file = "rpds_py-0.8.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2e3c4f2a8e3da47f850d7ea0d7d56720f0f091d66add889056098c4b2fd576c"}, - {file = "rpds_py-0.8.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:15a90d0ac11b4499171067ae40a220d1ca3cb685ec0acc356d8f3800e07e4cb8"}, - {file = "rpds_py-0.8.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:70bb9c8004b97b4ef7ae56a2aa56dfaa74734a0987c78e7e85f00004ab9bf2d0"}, - {file = "rpds_py-0.8.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d64f9f88d5203274a002b54442cafc9c7a1abff2a238f3e767b70aadf919b451"}, - {file = "rpds_py-0.8.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ccbbd276642788c4376fbe8d4e6c50f0fb4972ce09ecb051509062915891cbf0"}, - {file = "rpds_py-0.8.10-cp310-none-win32.whl", hash = "sha256:fafc0049add8043ad07ab5382ee80d80ed7e3699847f26c9a5cf4d3714d96a84"}, - {file = "rpds_py-0.8.10-cp310-none-win_amd64.whl", hash = "sha256:915031002c86a5add7c6fd4beb601b2415e8a1c956590a5f91d825858e92fe6e"}, - {file = "rpds_py-0.8.10-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:84eb541a44f7a18f07a6bfc48b95240739e93defe1fdfb4f2a295f37837945d7"}, - {file = "rpds_py-0.8.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f59996d0550894affaad8743e97b9b9c98f638b221fac12909210ec3d9294786"}, - {file = "rpds_py-0.8.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9adb5664b78fcfcd830000416c8cc69853ef43cb084d645b3f1f0296edd9bae"}, - {file = "rpds_py-0.8.10-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f96f3f98fbff7af29e9edf9a6584f3c1382e7788783d07ba3721790625caa43e"}, - {file = "rpds_py-0.8.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:376b8de737401050bd12810003d207e824380be58810c031f10ec563ff6aef3d"}, - {file = "rpds_py-0.8.10-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d1c2bc319428d50b3e0fa6b673ab8cc7fa2755a92898db3a594cbc4eeb6d1f7"}, - {file = "rpds_py-0.8.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73a1e48430f418f0ac3dfd87860e4cc0d33ad6c0f589099a298cb53724db1169"}, - {file = "rpds_py-0.8.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:134ec8f14ca7dbc6d9ae34dac632cdd60939fe3734b5d287a69683c037c51acb"}, - {file = "rpds_py-0.8.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4b519bac7c09444dd85280fd60f28c6dde4389c88dddf4279ba9b630aca3bbbe"}, - {file = "rpds_py-0.8.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9cd57981d9fab04fc74438d82460f057a2419974d69a96b06a440822d693b3c0"}, - {file = "rpds_py-0.8.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:69d089c026f6a8b9d64a06ff67dc3be196707b699d7f6ca930c25f00cf5e30d8"}, - {file = "rpds_py-0.8.10-cp311-none-win32.whl", hash = "sha256:220bdcad2d2936f674650d304e20ac480a3ce88a40fe56cd084b5780f1d104d9"}, - {file = "rpds_py-0.8.10-cp311-none-win_amd64.whl", hash = "sha256:6c6a0225b8501d881b32ebf3f5807a08ad3685b5eb5f0a6bfffd3a6e039b2055"}, - {file = "rpds_py-0.8.10-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:e3d0cd3dff0e7638a7b5390f3a53057c4e347f4ef122ee84ed93fc2fb7ea4aa2"}, - {file = "rpds_py-0.8.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d77dff3a5aa5eedcc3da0ebd10ff8e4969bc9541aa3333a8d41715b429e99f47"}, - {file = "rpds_py-0.8.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41c89a366eae49ad9e65ed443a8f94aee762931a1e3723749d72aeac80f5ef2f"}, - {file = "rpds_py-0.8.10-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3793c21494bad1373da517001d0849eea322e9a049a0e4789e50d8d1329df8e7"}, - {file = "rpds_py-0.8.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:805a5f3f05d186c5d50de2e26f765ba7896d0cc1ac5b14ffc36fae36df5d2f10"}, - {file = "rpds_py-0.8.10-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b01b39ad5411563031ea3977bbbc7324d82b088e802339e6296f082f78f6115c"}, - {file = "rpds_py-0.8.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3f1e860be21f3e83011116a65e7310486300e08d9a3028e73e8d13bb6c77292"}, - {file = "rpds_py-0.8.10-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a13c8e56c46474cd5958d525ce6a9996727a83d9335684e41f5192c83deb6c58"}, - {file = "rpds_py-0.8.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:93d99f957a300d7a4ced41615c45aeb0343bb8f067c42b770b505de67a132346"}, - {file = "rpds_py-0.8.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:148b0b38d719c0760e31ce9285a9872972bdd7774969a4154f40c980e5beaca7"}, - {file = "rpds_py-0.8.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3cc5e5b5514796f45f03a568981971b12a3570f3de2e76114f7dc18d4b60a3c4"}, - {file = "rpds_py-0.8.10-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:e8e24b210a4deb5a7744971f8f77393005bae7f873568e37dfd9effe808be7f7"}, - {file = "rpds_py-0.8.10-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b41941583adce4242af003d2a8337b066ba6148ca435f295f31ac6d9e4ea2722"}, - {file = "rpds_py-0.8.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c490204e16bca4f835dba8467869fe7295cdeaa096e4c5a7af97f3454a97991"}, - {file = "rpds_py-0.8.10-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ee45cd1d84beed6cbebc839fd85c2e70a3a1325c8cfd16b62c96e2ffb565eca"}, - {file = "rpds_py-0.8.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a8ca409f1252e1220bf09c57290b76cae2f14723746215a1e0506472ebd7bdf"}, - {file = "rpds_py-0.8.10-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96b293c0498c70162effb13100624c5863797d99df75f2f647438bd10cbf73e4"}, - {file = "rpds_py-0.8.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4627520a02fccbd324b33c7a83e5d7906ec746e1083a9ac93c41ac7d15548c7"}, - {file = "rpds_py-0.8.10-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e39d7ab0c18ac99955b36cd19f43926450baba21e3250f053e0704d6ffd76873"}, - {file = "rpds_py-0.8.10-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ba9f1d1ebe4b63801977cec7401f2d41e888128ae40b5441270d43140efcad52"}, - {file = "rpds_py-0.8.10-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:802f42200d8caf7f25bbb2a6464cbd83e69d600151b7e3b49f49a47fa56b0a38"}, - {file = "rpds_py-0.8.10-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:d19db6ba816e7f59fc806c690918da80a7d186f00247048cd833acdab9b4847b"}, - {file = "rpds_py-0.8.10-cp38-none-win32.whl", hash = "sha256:7947e6e2c2ad68b1c12ee797d15e5f8d0db36331200b0346871492784083b0c6"}, - {file = "rpds_py-0.8.10-cp38-none-win_amd64.whl", hash = "sha256:fa326b3505d5784436d9433b7980171ab2375535d93dd63fbcd20af2b5ca1bb6"}, - {file = "rpds_py-0.8.10-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:7b38a9ac96eeb6613e7f312cd0014de64c3f07000e8bf0004ad6ec153bac46f8"}, - {file = "rpds_py-0.8.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c4d42e83ddbf3445e6514f0aff96dca511421ed0392d9977d3990d9f1ba6753c"}, - {file = "rpds_py-0.8.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b21575031478609db6dbd1f0465e739fe0e7f424a8e7e87610a6c7f68b4eb16"}, - {file = "rpds_py-0.8.10-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:574868858a7ff6011192c023a5289158ed20e3f3b94b54f97210a773f2f22921"}, - {file = "rpds_py-0.8.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae40f4a70a1f40939d66ecbaf8e7edc144fded190c4a45898a8cfe19d8fc85ea"}, - {file = "rpds_py-0.8.10-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37f7ee4dc86db7af3bac6d2a2cedbecb8e57ce4ed081f6464510e537589f8b1e"}, - {file = "rpds_py-0.8.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:695f642a3a5dbd4ad2ffbbacf784716ecd87f1b7a460843b9ddf965ccaeafff4"}, - {file = "rpds_py-0.8.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f43ab4cb04bde6109eb2555528a64dfd8a265cc6a9920a67dcbde13ef53a46c8"}, - {file = "rpds_py-0.8.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a11ab0d97be374efd04f640c04fe5c2d3dabc6dfb998954ea946ee3aec97056d"}, - {file = "rpds_py-0.8.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:92cf5b3ee60eef41f41e1a2cabca466846fb22f37fc580ffbcb934d1bcab225a"}, - {file = "rpds_py-0.8.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ceaac0c603bf5ac2f505a78b2dcab78d3e6b706be6596c8364b64cc613d208d2"}, - {file = "rpds_py-0.8.10-cp39-none-win32.whl", hash = "sha256:dd4f16e57c12c0ae17606c53d1b57d8d1c8792efe3f065a37cb3341340599d49"}, - {file = "rpds_py-0.8.10-cp39-none-win_amd64.whl", hash = "sha256:c03a435d26c3999c2a8642cecad5d1c4d10c961817536af52035f6f4ee2f5dd0"}, - {file = "rpds_py-0.8.10-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:0da53292edafecba5e1d8c1218f99babf2ed0bf1c791d83c0ab5c29b57223068"}, - {file = "rpds_py-0.8.10-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:7d20a8ed227683401cc508e7be58cba90cc97f784ea8b039c8cd01111e6043e0"}, - {file = "rpds_py-0.8.10-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97cab733d303252f7c2f7052bf021a3469d764fc2b65e6dbef5af3cbf89d4892"}, - {file = "rpds_py-0.8.10-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8c398fda6df361a30935ab4c4bccb7f7a3daef2964ca237f607c90e9f3fdf66f"}, - {file = "rpds_py-0.8.10-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2eb4b08c45f8f8d8254cdbfacd3fc5d6b415d64487fb30d7380b0d0569837bf1"}, - {file = "rpds_py-0.8.10-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7dfb1cbb895810fa2b892b68153c17716c6abaa22c7dc2b2f6dcf3364932a1c"}, - {file = "rpds_py-0.8.10-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89c92b74e8bf6f53a6f4995fd52f4bd510c12f103ee62c99e22bc9e05d45583c"}, - {file = "rpds_py-0.8.10-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e9c0683cb35a9b5881b41bc01d5568ffc667910d9dbc632a1fba4e7d59e98773"}, - {file = "rpds_py-0.8.10-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:0eeb2731708207d0fe2619afe6c4dc8cb9798f7de052da891de5f19c0006c315"}, - {file = "rpds_py-0.8.10-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:7495010b658ec5b52835f21d8c8b1a7e52e194c50f095d4223c0b96c3da704b1"}, - {file = "rpds_py-0.8.10-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:c72ebc22e70e04126158c46ba56b85372bc4d54d00d296be060b0db1671638a4"}, - {file = "rpds_py-0.8.10-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:2cd3045e7f6375dda64ed7db1c5136826facb0159ea982f77d9cf6125025bd34"}, - {file = "rpds_py-0.8.10-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:2418cf17d653d24ffb8b75e81f9f60b7ba1b009a23298a433a4720b2a0a17017"}, - {file = "rpds_py-0.8.10-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a2edf8173ac0c7a19da21bc68818be1321998528b5e3f748d6ee90c0ba2a1fd"}, - {file = "rpds_py-0.8.10-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7f29b8c55fd3a2bc48e485e37c4e2df3317f43b5cc6c4b6631c33726f52ffbb3"}, - {file = "rpds_py-0.8.10-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a7d20c1cf8d7b3960c5072c265ec47b3f72a0c608a9a6ee0103189b4f28d531"}, - {file = "rpds_py-0.8.10-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:521fc8861a86ae54359edf53a15a05fabc10593cea7b3357574132f8427a5e5a"}, - {file = "rpds_py-0.8.10-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5c191713e98e7c28800233f039a32a42c1a4f9a001a8a0f2448b07391881036"}, - {file = "rpds_py-0.8.10-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:083df0fafe199371206111583c686c985dddaf95ab3ee8e7b24f1fda54515d09"}, - {file = "rpds_py-0.8.10-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:ed41f3f49507936a6fe7003985ea2574daccfef999775525d79eb67344e23767"}, - {file = "rpds_py-0.8.10-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:2614c2732bf45de5c7f9e9e54e18bc78693fa2f635ae58d2895b7965e470378c"}, - {file = "rpds_py-0.8.10-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:c60528671d9d467009a6ec284582179f6b88651e83367d0ab54cb739021cd7de"}, - {file = "rpds_py-0.8.10-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ee744fca8d1ea822480a2a4e7c5f2e1950745477143668f0b523769426060f29"}, - {file = "rpds_py-0.8.10-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a38b9f526d0d6cbdaa37808c400e3d9f9473ac4ff64d33d9163fd05d243dbd9b"}, - {file = "rpds_py-0.8.10-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60e0e86e870350e03b3e25f9b1dd2c6cc72d2b5f24e070249418320a6f9097b7"}, - {file = "rpds_py-0.8.10-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f53f55a8852f0e49b0fc76f2412045d6ad9d5772251dea8f55ea45021616e7d5"}, - {file = "rpds_py-0.8.10-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c493365d3fad241d52f096e4995475a60a80f4eba4d3ff89b713bc65c2ca9615"}, - {file = "rpds_py-0.8.10-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:300eb606e6b94a7a26f11c8cc8ee59e295c6649bd927f91e1dbd37a4c89430b6"}, - {file = "rpds_py-0.8.10-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a665f6f1a87614d1c3039baf44109094926dedf785e346d8b0a728e9cabd27a"}, - {file = "rpds_py-0.8.10-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:927d784648211447201d4c6f1babddb7971abad922b32257ab74de2f2750fad0"}, - {file = "rpds_py-0.8.10-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:c200b30dd573afa83847bed7e3041aa36a8145221bf0cfdfaa62d974d720805c"}, - {file = "rpds_py-0.8.10-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:08166467258fd0240a1256fce272f689f2360227ee41c72aeea103e9e4f63d2b"}, - {file = "rpds_py-0.8.10-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:996cc95830de9bc22b183661d95559ec6b3cd900ad7bc9154c4cbf5be0c9b734"}, - {file = "rpds_py-0.8.10.tar.gz", hash = "sha256:13e643ce8ad502a0263397362fb887594b49cf84bf518d6038c16f235f2bcea4"}, -] -rsa = [ - {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, - {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, -] -ruamel-yaml = [ - {file = "ruamel.yaml-0.17.17-py3-none-any.whl", hash = "sha256:9af3ec5d7f8065582f3aa841305465025d0afd26c5fb54e15b964e11838fc74f"}, - {file = "ruamel.yaml-0.17.17.tar.gz", hash = "sha256:9751de4cbb57d4bfbf8fc394e125ed4a2f170fbff3dc3d78abf50be85924f8be"}, -] -ruamel-yaml-clib = [ - {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d5859983f26d8cd7bb5c287ef452e8aacc86501487634573d260968f753e1d71"}, - {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:debc87a9516b237d0466a711b18b6ebeb17ba9f391eb7f91c649c5c4ec5006c7"}, - {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:df5828871e6648db72d1c19b4bd24819b80a755c4541d3409f0f7acd0f335c80"}, - {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:efa08d63ef03d079dcae1dfe334f6c8847ba8b645d08df286358b1f5293d24ab"}, - {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-win32.whl", hash = "sha256:763d65baa3b952479c4e972669f679fe490eee058d5aa85da483ebae2009d231"}, - {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:d000f258cf42fec2b1bbf2863c61d7b8918d31ffee905da62dede869254d3b8a"}, - {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:045e0626baf1c52e5527bd5db361bc83180faaba2ff586e763d3d5982a876a9e"}, - {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_12_6_arm64.whl", hash = "sha256:721bc4ba4525f53f6a611ec0967bdcee61b31df5a56801281027a3a6d1c2daf5"}, - {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:4b3a93bb9bc662fc1f99c5c3ea8e623d8b23ad22f861eb6fce9377ac07ad6072"}, - {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-macosx_12_0_arm64.whl", hash = "sha256:a234a20ae07e8469da311e182e70ef6b199d0fbeb6c6cc2901204dd87fb867e8"}, - {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:15910ef4f3e537eea7fe45f8a5d19997479940d9196f357152a09031c5be59f3"}, - {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:370445fd795706fd291ab00c9df38a0caed0f17a6fb46b0f607668ecb16ce763"}, - {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-win32.whl", hash = "sha256:ecdf1a604009bd35c674b9225a8fa609e0282d9b896c03dd441a91e5f53b534e"}, - {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-win_amd64.whl", hash = "sha256:f34019dced51047d6f70cb9383b2ae2853b7fc4dce65129a5acd49f4f9256646"}, - {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2aa261c29a5545adfef9296b7e33941f46aa5bbd21164228e833412af4c9c75f"}, - {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f01da5790e95815eb5a8a138508c01c758e5f5bc0ce4286c4f7028b8dd7ac3d0"}, - {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:40d030e2329ce5286d6b231b8726959ebbe0404c92f0a578c0e2482182e38282"}, - {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:c3ca1fbba4ae962521e5eb66d72998b51f0f4d0f608d3c0347a48e1af262efa7"}, - {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-win32.whl", hash = "sha256:7bdb4c06b063f6fd55e472e201317a3bb6cdeeee5d5a38512ea5c01e1acbdd93"}, - {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:be2a7ad8fd8f7442b24323d24ba0b56c51219513cfa45b9ada3b87b76c374d4b"}, - {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:91a789b4aa0097b78c93e3dc4b40040ba55bef518f84a40d4442f713b4094acb"}, - {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:99e77daab5d13a48a4054803d052ff40780278240a902b880dd37a51ba01a307"}, - {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:3243f48ecd450eddadc2d11b5feb08aca941b5cd98c9b1db14b2fd128be8c697"}, - {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:8831a2cedcd0f0927f788c5bdf6567d9dc9cc235646a434986a852af1cb54b4b"}, - {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-win32.whl", hash = "sha256:3110a99e0f94a4a3470ff67fc20d3f96c25b13d24c6980ff841e82bafe827cac"}, - {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:92460ce908546ab69770b2e576e4f99fbb4ce6ab4b245345a3869a0a0410488f"}, - {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5bc0667c1eb8f83a3752b71b9c4ba55ef7c7058ae57022dd9b29065186a113d9"}, - {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:4a4d8d417868d68b979076a9be6a38c676eca060785abaa6709c7b31593c35d1"}, - {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:bf9a6bc4a0221538b1a7de3ed7bca4c93c02346853f44e1cd764be0023cd3640"}, - {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a7b301ff08055d73223058b5c46c55638917f04d21577c95e00e0c4d79201a6b"}, - {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-win32.whl", hash = "sha256:d5e51e2901ec2366b79f16c2299a03e74ba4531ddcfacc1416639c557aef0ad8"}, - {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:184faeaec61dbaa3cace407cffc5819f7b977e75360e8d5ca19461cd851a5fc5"}, - {file = "ruamel.yaml.clib-0.2.7.tar.gz", hash = "sha256:1f08fd5a2bea9c4180db71678e850b995d2a5f4537be0e94557668cf0f5f9497"}, -] -schematic-db = [ - {file = "schematic_db-0.0.29-py3-none-any.whl", hash = "sha256:e43f1d7c06d877d47036c5a480ac8f22333daa967df67c4d8316091ff4ddc0a5"}, - {file = "schematic_db-0.0.29.tar.gz", hash = "sha256:77d338b34dd8f1e75b9df5b9b3f20de35087285079019d48d162de0d131f3ffb"}, -] -scipy = [ - {file = "scipy-1.11.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:aec8c62fbe52914f9cf28d846cf0401dd80ab80788bbab909434eb336ed07c04"}, - {file = "scipy-1.11.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:3b9963798df1d8a52db41a6fc0e6fa65b1c60e85d73da27ae8bb754de4792481"}, - {file = "scipy-1.11.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e8eb42db36526b130dfbc417609498a6192381abc1975b91e3eb238e0b41c1a"}, - {file = "scipy-1.11.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:366a6a937110d80dca4f63b3f5b00cc89d36f678b2d124a01067b154e692bab1"}, - {file = "scipy-1.11.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:08d957ca82d3535b3b9ba6c8ff355d78fe975271874e2af267cb5add5bd78625"}, - {file = "scipy-1.11.1-cp310-cp310-win_amd64.whl", hash = "sha256:e866514bc2d660608447b6ba95c8900d591f2865c07cca0aa4f7ff3c4ca70f30"}, - {file = "scipy-1.11.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ba94eeef3c9caa4cea7b402a35bb02a5714ee1ee77eb98aca1eed4543beb0f4c"}, - {file = "scipy-1.11.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:512fdc18c65f76dadaca139348e525646d440220d8d05f6d21965b8d4466bccd"}, - {file = "scipy-1.11.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cce154372f0ebe88556ed06d7b196e9c2e0c13080ecb58d0f35062dc7cc28b47"}, - {file = "scipy-1.11.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4bb943010203465ac81efa392e4645265077b4d9e99b66cf3ed33ae12254173"}, - {file = "scipy-1.11.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:249cfa465c379c9bb2c20123001e151ff5e29b351cbb7f9c91587260602c58d0"}, - {file = "scipy-1.11.1-cp311-cp311-win_amd64.whl", hash = "sha256:ffb28e3fa31b9c376d0fb1f74c1f13911c8c154a760312fbee87a21eb21efe31"}, - {file = "scipy-1.11.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:39154437654260a52871dfde852adf1b93b1d1bc5dc0ffa70068f16ec0be2624"}, - {file = "scipy-1.11.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:b588311875c58d1acd4ef17c983b9f1ab5391755a47c3d70b6bd503a45bfaf71"}, - {file = "scipy-1.11.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d51565560565a0307ed06fa0ec4c6f21ff094947d4844d6068ed04400c72d0c3"}, - {file = "scipy-1.11.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b41a0f322b4eb51b078cb3441e950ad661ede490c3aca66edef66f4b37ab1877"}, - {file = "scipy-1.11.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:396fae3f8c12ad14c5f3eb40499fd06a6fef8393a6baa352a652ecd51e74e029"}, - {file = "scipy-1.11.1-cp39-cp39-win_amd64.whl", hash = "sha256:be8c962a821957fdde8c4044efdab7a140c13294997a407eaee777acf63cbf0c"}, - {file = "scipy-1.11.1.tar.gz", hash = "sha256:fb5b492fa035334fd249f0973cc79ecad8b09c604b42a127a677b45a9a3d4289"}, -] -secretstorage = [ - {file = "SecretStorage-3.3.3-py3-none-any.whl", hash = "sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99"}, - {file = "SecretStorage-3.3.3.tar.gz", hash = "sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77"}, -] -send2trash = [ - {file = "Send2Trash-1.8.2-py3-none-any.whl", hash = "sha256:a384719d99c07ce1eefd6905d2decb6f8b7ed054025bb0e618919f945de4f679"}, - {file = "Send2Trash-1.8.2.tar.gz", hash = "sha256:c132d59fa44b9ca2b1699af5c86f57ce9f4c5eb56629d5d55fbb7a35f84e2312"}, -] -setuptools = [ - {file = "setuptools-66.1.1-py3-none-any.whl", hash = "sha256:6f590d76b713d5de4e49fe4fbca24474469f53c83632d5d0fd056f7ff7e8112b"}, - {file = "setuptools-66.1.1.tar.gz", hash = "sha256:ac4008d396bc9cd983ea483cb7139c0240a07bbc74ffb6232fceffedc6cf03a8"}, -] -six = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] -sniffio = [ - {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, - {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, -] -snowballstemmer = [ - {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, - {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, -] -soupsieve = [ - {file = "soupsieve-2.4.1-py3-none-any.whl", hash = "sha256:1c1bfee6819544a3447586c889157365a27e10d88cde3ad3da0cf0ddf646feb8"}, - {file = "soupsieve-2.4.1.tar.gz", hash = "sha256:89d12b2d5dfcd2c9e8c22326da9d9aa9cb3dfab0a83a024f05704076ee8d35ea"}, -] -sphinx = [ - {file = "Sphinx-7.0.1.tar.gz", hash = "sha256:61e025f788c5977d9412587e733733a289e2b9fdc2fef8868ddfbfc4ccfe881d"}, - {file = "sphinx-7.0.1-py3-none-any.whl", hash = "sha256:60c5e04756c1709a98845ed27a2eed7a556af3993afb66e77fec48189f742616"}, -] -sphinx-click = [ - {file = "sphinx-click-4.4.0.tar.gz", hash = "sha256:cc67692bd28f482c7f01531c61b64e9d2f069bfcf3d24cbbb51d4a84a749fa48"}, - {file = "sphinx_click-4.4.0-py3-none-any.whl", hash = "sha256:2821c10a68fc9ee6ce7c92fad26540d8d8c8f45e6d7258f0e4fb7529ae8fab49"}, -] -sphinxcontrib-applehelp = [ - {file = "sphinxcontrib-applehelp-1.0.4.tar.gz", hash = "sha256:828f867945bbe39817c210a1abfd1bc4895c8b73fcaade56d45357a348a07d7e"}, - {file = "sphinxcontrib_applehelp-1.0.4-py3-none-any.whl", hash = "sha256:29d341f67fb0f6f586b23ad80e072c8e6ad0b48417db2bde114a4c9746feb228"}, -] -sphinxcontrib-devhelp = [ - {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, - {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, -] -sphinxcontrib-htmlhelp = [ - {file = "sphinxcontrib-htmlhelp-2.0.1.tar.gz", hash = "sha256:0cbdd302815330058422b98a113195c9249825d681e18f11e8b1f78a2f11efff"}, - {file = "sphinxcontrib_htmlhelp-2.0.1-py3-none-any.whl", hash = "sha256:c38cb46dccf316c79de6e5515e1770414b797162b23cd3d06e67020e1d2a6903"}, -] -sphinxcontrib-jsmath = [ - {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, - {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, -] -sphinxcontrib-qthelp = [ - {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, - {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, -] -sphinxcontrib-serializinghtml = [ - {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, - {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, -] -sqlalchemy = [ - {file = "SQLAlchemy-1.4.49-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:2e126cf98b7fd38f1e33c64484406b78e937b1a280e078ef558b95bf5b6895f6"}, - {file = "SQLAlchemy-1.4.49-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:03db81b89fe7ef3857b4a00b63dedd632d6183d4ea5a31c5d8a92e000a41fc71"}, - {file = "SQLAlchemy-1.4.49-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:95b9df9afd680b7a3b13b38adf6e3a38995da5e162cc7524ef08e3be4e5ed3e1"}, - {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a63e43bf3f668c11bb0444ce6e809c1227b8f067ca1068898f3008a273f52b09"}, - {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f835c050ebaa4e48b18403bed2c0fda986525896efd76c245bdd4db995e51a4c"}, - {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c21b172dfb22e0db303ff6419451f0cac891d2e911bb9fbf8003d717f1bcf91"}, - {file = "SQLAlchemy-1.4.49-cp310-cp310-win32.whl", hash = "sha256:5fb1ebdfc8373b5a291485757bd6431de8d7ed42c27439f543c81f6c8febd729"}, - {file = "SQLAlchemy-1.4.49-cp310-cp310-win_amd64.whl", hash = "sha256:f8a65990c9c490f4651b5c02abccc9f113a7f56fa482031ac8cb88b70bc8ccaa"}, - {file = "SQLAlchemy-1.4.49-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8923dfdf24d5aa8a3adb59723f54118dd4fe62cf59ed0d0d65d940579c1170a4"}, - {file = "SQLAlchemy-1.4.49-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9ab2c507a7a439f13ca4499db6d3f50423d1d65dc9b5ed897e70941d9e135b0"}, - {file = "SQLAlchemy-1.4.49-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5debe7d49b8acf1f3035317e63d9ec8d5e4d904c6e75a2a9246a119f5f2fdf3d"}, - {file = "SQLAlchemy-1.4.49-cp311-cp311-win32.whl", hash = "sha256:82b08e82da3756765c2e75f327b9bf6b0f043c9c3925fb95fb51e1567fa4ee87"}, - {file = "SQLAlchemy-1.4.49-cp311-cp311-win_amd64.whl", hash = "sha256:171e04eeb5d1c0d96a544caf982621a1711d078dbc5c96f11d6469169bd003f1"}, - {file = "SQLAlchemy-1.4.49-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:36e58f8c4fe43984384e3fbe6341ac99b6b4e083de2fe838f0fdb91cebe9e9cb"}, - {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b31e67ff419013f99ad6f8fc73ee19ea31585e1e9fe773744c0f3ce58c039c30"}, - {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c14b29d9e1529f99efd550cd04dbb6db6ba5d690abb96d52de2bff4ed518bc95"}, - {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c40f3470e084d31247aea228aa1c39bbc0904c2b9ccbf5d3cfa2ea2dac06f26d"}, - {file = "SQLAlchemy-1.4.49-cp36-cp36m-win32.whl", hash = "sha256:706bfa02157b97c136547c406f263e4c6274a7b061b3eb9742915dd774bbc264"}, - {file = "SQLAlchemy-1.4.49-cp36-cp36m-win_amd64.whl", hash = "sha256:a7f7b5c07ae5c0cfd24c2db86071fb2a3d947da7bd487e359cc91e67ac1c6d2e"}, - {file = "SQLAlchemy-1.4.49-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:4afbbf5ef41ac18e02c8dc1f86c04b22b7a2125f2a030e25bbb4aff31abb224b"}, - {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24e300c0c2147484a002b175f4e1361f102e82c345bf263242f0449672a4bccf"}, - {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:201de072b818f8ad55c80d18d1a788729cccf9be6d9dc3b9d8613b053cd4836d"}, - {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7653ed6817c710d0c95558232aba799307d14ae084cc9b1f4c389157ec50df5c"}, - {file = "SQLAlchemy-1.4.49-cp37-cp37m-win32.whl", hash = "sha256:647e0b309cb4512b1f1b78471fdaf72921b6fa6e750b9f891e09c6e2f0e5326f"}, - {file = "SQLAlchemy-1.4.49-cp37-cp37m-win_amd64.whl", hash = "sha256:ab73ed1a05ff539afc4a7f8cf371764cdf79768ecb7d2ec691e3ff89abbc541e"}, - {file = "SQLAlchemy-1.4.49-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:37ce517c011560d68f1ffb28af65d7e06f873f191eb3a73af5671e9c3fada08a"}, - {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1878ce508edea4a879015ab5215546c444233881301e97ca16fe251e89f1c55"}, - {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0e8e608983e6f85d0852ca61f97e521b62e67969e6e640fe6c6b575d4db68557"}, - {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccf956da45290df6e809ea12c54c02ace7f8ff4d765d6d3dfb3655ee876ce58d"}, - {file = "SQLAlchemy-1.4.49-cp38-cp38-win32.whl", hash = "sha256:f167c8175ab908ce48bd6550679cc6ea20ae169379e73c7720a28f89e53aa532"}, - {file = "SQLAlchemy-1.4.49-cp38-cp38-win_amd64.whl", hash = "sha256:45806315aae81a0c202752558f0df52b42d11dd7ba0097bf71e253b4215f34f4"}, - {file = "SQLAlchemy-1.4.49-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:b6d0c4b15d65087738a6e22e0ff461b407533ff65a73b818089efc8eb2b3e1de"}, - {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a843e34abfd4c797018fd8d00ffffa99fd5184c421f190b6ca99def4087689bd"}, - {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1c890421651b45a681181301b3497e4d57c0d01dc001e10438a40e9a9c25ee77"}, - {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d26f280b8f0a8f497bc10573849ad6dc62e671d2468826e5c748d04ed9e670d5"}, - {file = "SQLAlchemy-1.4.49-cp39-cp39-win32.whl", hash = "sha256:ec2268de67f73b43320383947e74700e95c6770d0c68c4e615e9897e46296294"}, - {file = "SQLAlchemy-1.4.49-cp39-cp39-win_amd64.whl", hash = "sha256:bbdf16372859b8ed3f4d05f925a984771cd2abd18bd187042f24be4886c2a15f"}, - {file = "SQLAlchemy-1.4.49.tar.gz", hash = "sha256:06ff25cbae30c396c4b7737464f2a7fc37a67b7da409993b182b024cec80aed9"}, -] -sqlalchemy-utils = [ - {file = "SQLAlchemy-Utils-0.38.3.tar.gz", hash = "sha256:9f9afba607a40455cf703adfa9846584bf26168a0c5a60a70063b70d65051f4d"}, - {file = "SQLAlchemy_Utils-0.38.3-py3-none-any.whl", hash = "sha256:5c13b5d08adfaa85f3d4e8ec09a75136216fad41346980d02974a70a77988bf9"}, -] -stack-data = [ - {file = "stack_data-0.6.2-py3-none-any.whl", hash = "sha256:cbb2a53eb64e5785878201a97ed7c7b94883f48b87bfb0bbe8b623c74679e4a8"}, - {file = "stack_data-0.6.2.tar.gz", hash = "sha256:32d2dd0376772d01b6cb9fc996f3c8b57a357089dec328ed4b6553d037eaf815"}, -] -swagger-ui-bundle = [ - {file = "swagger_ui_bundle-0.0.9-py3-none-any.whl", hash = "sha256:cea116ed81147c345001027325c1ddc9ca78c1ee7319935c3c75d3669279d575"}, - {file = "swagger_ui_bundle-0.0.9.tar.gz", hash = "sha256:b462aa1460261796ab78fd4663961a7f6f347ce01760f1303bbbdf630f11f516"}, -] -synapseclient = [ - {file = "synapseclient-2.7.2-py3-none-any.whl", hash = "sha256:dd8b1a1b4667d08311bb651469431f43fe2eeab83c0ef1fe5a03c2929aeb26cd"}, - {file = "synapseclient-2.7.2.tar.gz", hash = "sha256:dc5a61f9f495109a0c89aa7d42b641b6ff278280d7961fb450dd5015704fe15b"}, -] -tabulate = [ - {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, - {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, -] -tenacity = [ - {file = "tenacity-8.2.2-py3-none-any.whl", hash = "sha256:2f277afb21b851637e8f52e6a613ff08734c347dc19ade928e519d7d2d8569b0"}, - {file = "tenacity-8.2.2.tar.gz", hash = "sha256:43af037822bd0029025877f3b2d97cc4d7bb0c2991000a3d59d71517c5c969e0"}, -] -terminado = [ - {file = "terminado-0.17.1-py3-none-any.whl", hash = "sha256:8650d44334eba354dd591129ca3124a6ba42c3d5b70df5051b6921d506fdaeae"}, - {file = "terminado-0.17.1.tar.gz", hash = "sha256:6ccbbcd3a4f8a25a5ec04991f39a0b8db52dfcd487ea0e578d977e6752380333"}, -] -tinycss2 = [ +description = "A tiny CSS parser" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ {file = "tinycss2-1.2.1-py3-none-any.whl", hash = "sha256:2b80a96d41e7c3914b8cda8bc7f705a4d9c49275616e886103dd839dfc847847"}, {file = "tinycss2-1.2.1.tar.gz", hash = "sha256:8cff3a8f066c2ec677c06dbc7b45619804a6938478d9d73c284b29d14ecb0627"}, ] -toml = [ + +[package.dependencies] +webencodings = ">=0.4" + +[package.extras] +doc = ["sphinx", "sphinx_rtd_theme"] +test = ["flake8", "isort", "pytest"] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +category = "main" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] -tomli = [ + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] -tomlkit = [ + +[[package]] +name = "tomlkit" +version = "0.11.8" +description = "Style preserving TOML library" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ {file = "tomlkit-0.11.8-py3-none-any.whl", hash = "sha256:8c726c4c202bdb148667835f68d68780b9a003a9ec34167b6c673b38eff2a171"}, {file = "tomlkit-0.11.8.tar.gz", hash = "sha256:9330fc7faa1db67b541b28e62018c17d20be733177d290a13b24c62d1614e0c3"}, ] -toolz = [ + +[[package]] +name = "toolz" +version = "0.12.0" +description = "List processing tools and functional utilities" +category = "main" +optional = false +python-versions = ">=3.5" +files = [ {file = "toolz-0.12.0-py3-none-any.whl", hash = "sha256:2059bd4148deb1884bb0eb770a3cde70e7f954cfbbdc2285f1f2de01fd21eb6f"}, {file = "toolz-0.12.0.tar.gz", hash = "sha256:88c570861c440ee3f2f6037c4654613228ff40c93a6c25e0eba70d17282c6194"}, ] -tornado = [ + +[[package]] +name = "tornado" +version = "6.3.2" +description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +category = "main" +optional = false +python-versions = ">= 3.8" +files = [ {file = "tornado-6.3.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:c367ab6c0393d71171123ca5515c61ff62fe09024fa6bf299cd1339dc9456829"}, {file = "tornado-6.3.2-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:b46a6ab20f5c7c1cb949c72c1994a4585d2eaa0be4853f50a03b5031e964fc7c"}, {file = "tornado-6.3.2-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c2de14066c4a38b4ecbbcd55c5cc4b5340eb04f1c5e81da7451ef555859c833f"}, @@ -4442,69 +4278,266 @@ tornado = [ {file = "tornado-6.3.2-cp38-abi3-win_amd64.whl", hash = "sha256:0c325e66c8123c606eea33084976c832aa4e766b7dff8aedd7587ea44a604cdf"}, {file = "tornado-6.3.2.tar.gz", hash = "sha256:4b927c4f19b71e627b13f3db2324e4ae660527143f9e1f2e2fb404f3a187e2ba"}, ] -tqdm = [ + +[[package]] +name = "tqdm" +version = "4.65.0" +description = "Fast, Extensible Progress Meter" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ {file = "tqdm-4.65.0-py3-none-any.whl", hash = "sha256:c4f53a17fe37e132815abceec022631be8ffe1b9381c2e6e30aa70edc99e9671"}, {file = "tqdm-4.65.0.tar.gz", hash = "sha256:1871fb68a86b8fb3b59ca4cdd3dcccbc7e6d613eeed31f4c332531977b89beb5"}, ] -traitlets = [ + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["py-make (>=0.1.0)", "twine", "wheel"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + +[[package]] +name = "traitlets" +version = "5.9.0" +description = "Traitlets Python configuration system" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ {file = "traitlets-5.9.0-py3-none-any.whl", hash = "sha256:9e6ec080259b9a5940c797d58b613b5e31441c2257b87c2e795c5228ae80d2d8"}, {file = "traitlets-5.9.0.tar.gz", hash = "sha256:f6cde21a9c68cf756af02035f72d5a723bf607e862e7be33ece505abf4a3bad9"}, ] -typing-extensions = [ + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"] + +[[package]] +name = "typing-extensions" +version = "4.5.0" +description = "Backported and Experimental Type Hints for Python 3.7+" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ {file = "typing_extensions-4.5.0-py3-none-any.whl", hash = "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"}, {file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"}, ] -tzdata = [ + +[[package]] +name = "typing-inspect" +version = "0.9.0" +description = "Runtime inspection utilities for typing module." +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, + {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, +] + +[package.dependencies] +mypy-extensions = ">=0.3.0" +typing-extensions = ">=3.7.4" + +[[package]] +name = "tzdata" +version = "2023.3" +description = "Provider of IANA time zone data" +category = "main" +optional = false +python-versions = ">=2" +files = [ {file = "tzdata-2023.3-py2.py3-none-any.whl", hash = "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"}, {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, ] -tzlocal = [ + +[[package]] +name = "tzlocal" +version = "5.0.1" +description = "tzinfo object for the local timezone" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ {file = "tzlocal-5.0.1-py3-none-any.whl", hash = "sha256:f3596e180296aaf2dbd97d124fe76ae3a0e3d32b258447de7b939b3fd4be992f"}, {file = "tzlocal-5.0.1.tar.gz", hash = "sha256:46eb99ad4bdb71f3f72b7d24f4267753e240944ecfc16f25d2719ba89827a803"}, ] -uri-template = [ + +[package.dependencies] +tzdata = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +devenv = ["black", "check-manifest", "flake8", "pyroma", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3)", "zest.releaser"] + +[[package]] +name = "uri-template" +version = "1.3.0" +description = "RFC 6570 URI Template Processor" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ {file = "uri-template-1.3.0.tar.gz", hash = "sha256:0e00f8eb65e18c7de20d595a14336e9f337ead580c70934141624b6d1ffdacc7"}, {file = "uri_template-1.3.0-py3-none-any.whl", hash = "sha256:a44a133ea12d44a0c0f06d7d42a52d71282e77e2f937d8abd5655b8d56fc1363"}, ] -uritemplate = [ + +[package.extras] +dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake8-commas", "flake8-comprehensions", "flake8-continuation", "flake8-datetimez", "flake8-docstrings", "flake8-import-order", "flake8-literal", "flake8-modern-annotations", "flake8-noqa", "flake8-pyproject", "flake8-requirements", "flake8-typechecking-import", "flake8-use-fstring", "mypy", "pep8-naming", "types-PyYAML"] + +[[package]] +name = "uritemplate" +version = "4.1.1" +description = "Implementation of RFC 6570 URI Templates" +category = "main" +optional = false +python-versions = ">=3.6" +files = [ {file = "uritemplate-4.1.1-py2.py3-none-any.whl", hash = "sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e"}, {file = "uritemplate-4.1.1.tar.gz", hash = "sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0"}, ] -urllib3 = [ + +[[package]] +name = "urllib3" +version = "1.26.16" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ {file = "urllib3-1.26.16-py2.py3-none-any.whl", hash = "sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f"}, {file = "urllib3-1.26.16.tar.gz", hash = "sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14"}, ] -uwsgi = [ + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[[package]] +name = "uwsgi" +version = "2.0.21" +description = "The uWSGI server" +category = "dev" +optional = false +python-versions = "*" +files = [ {file = "uwsgi-2.0.21.tar.gz", hash = "sha256:35a30d83791329429bc04fe44183ce4ab512fcf6968070a7bfba42fc5a0552a9"}, ] -validators = [ + +[[package]] +name = "validators" +version = "0.20.0" +description = "Python Data Validation for Humans™." +category = "main" +optional = false +python-versions = ">=3.4" +files = [ {file = "validators-0.20.0.tar.gz", hash = "sha256:24148ce4e64100a2d5e267233e23e7afeb55316b47d30faae7eb6e7292bc226a"}, ] -wcwidth = [ + +[package.dependencies] +decorator = ">=3.4.0" + +[package.extras] +test = ["flake8 (>=2.4.0)", "isort (>=4.2.2)", "pytest (>=2.2.3)"] + +[[package]] +name = "wcwidth" +version = "0.2.6" +description = "Measures the displayed width of unicode strings in a terminal" +category = "main" +optional = false +python-versions = "*" +files = [ {file = "wcwidth-0.2.6-py2.py3-none-any.whl", hash = "sha256:795b138f6875577cd91bba52baf9e445cd5118fd32723b460e30a0af30ea230e"}, {file = "wcwidth-0.2.6.tar.gz", hash = "sha256:a5220780a404dbe3353789870978e472cfe477761f06ee55077256e509b156d0"}, ] -webcolors = [ + +[[package]] +name = "webcolors" +version = "1.13" +description = "A library for working with the color formats defined by HTML and CSS." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ {file = "webcolors-1.13-py3-none-any.whl", hash = "sha256:29bc7e8752c0a1bd4a1f03c14d6e6a72e93d82193738fa860cbff59d0fcc11bf"}, {file = "webcolors-1.13.tar.gz", hash = "sha256:c225b674c83fa923be93d235330ce0300373d02885cef23238813b0d5668304a"}, ] -webencodings = [ + +[package.extras] +docs = ["furo", "sphinx", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-notfound-page", "sphinxext-opengraph"] +tests = ["pytest", "pytest-cov"] + +[[package]] +name = "webencodings" +version = "0.5.1" +description = "Character encoding aliases for legacy web content" +category = "main" +optional = false +python-versions = "*" +files = [ {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, ] -websocket-client = [ + +[[package]] +name = "websocket-client" +version = "1.6.1" +description = "WebSocket client for Python with low level API options" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ {file = "websocket-client-1.6.1.tar.gz", hash = "sha256:c951af98631d24f8df89ab1019fc365f2227c0892f12fd150e935607c79dd0dd"}, {file = "websocket_client-1.6.1-py3-none-any.whl", hash = "sha256:f1f9f2ad5291f0225a49efad77abf9e700b6fef553900623060dad6e26503b9d"}, ] -werkzeug = [ + +[package.extras] +docs = ["Sphinx (>=3.4)", "sphinx-rtd-theme (>=0.5)"] +optional = ["python-socks", "wsaccel"] +test = ["websockets"] + +[[package]] +name = "werkzeug" +version = "2.1.2" +description = "The comprehensive WSGI web application library." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ {file = "Werkzeug-2.1.2-py3-none-any.whl", hash = "sha256:72a4b735692dd3135217911cbeaa1be5fa3f62bffb8745c5215420a03dc55255"}, {file = "Werkzeug-2.1.2.tar.gz", hash = "sha256:1ce08e8093ed67d638d63879fd1ba3735817f7a80de3674d293f5984f25fb6e6"}, ] -widgetsnbextension = [ + +[package.extras] +watchdog = ["watchdog"] + +[[package]] +name = "widgetsnbextension" +version = "4.0.8" +description = "Jupyter interactive widgets for Jupyter Notebook" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ {file = "widgetsnbextension-4.0.8-py3-none-any.whl", hash = "sha256:2e37f0ce9da11651056280c7efe96f2db052fe8fc269508e3724f5cbd6c93018"}, {file = "widgetsnbextension-4.0.8.tar.gz", hash = "sha256:9ec291ba87c2dfad42c3d5b6f68713fa18be1acd7476569516b2431682315c17"}, ] -wrapt = [ + +[[package]] +name = "wrapt" +version = "1.15.0" +description = "Module for decorators, wrappers and monkey patching." +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +files = [ {file = "wrapt-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1"}, {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e826aadda3cae59295b95343db8f3d965fb31059da7de01ee8d1c40a60398b29"}, {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5fc8e02f5984a55d2c653f5fea93531e9836abbd84342c1d1e17abc4a15084c2"}, @@ -4581,7 +4614,24 @@ wrapt = [ {file = "wrapt-1.15.0-py3-none-any.whl", hash = "sha256:64b1df0f83706b4ef4cfb4fb0e4c2669100fd7ecacfb59e091fad300d4e04640"}, {file = "wrapt-1.15.0.tar.gz", hash = "sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a"}, ] -zipp = [ + +[[package]] +name = "zipp" +version = "3.16.1" +description = "Backport of pathlib-compatible object wrapper for zip files" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ {file = "zipp-3.16.1-py3-none-any.whl", hash = "sha256:0b37c326d826d5ca35f2b9685cd750292740774ef16190008b00a0227c256fe0"}, {file = "zipp-3.16.1.tar.gz", hash = "sha256:857b158da2cbf427b376da1c24fd11faecbac5a4ac7523c3607f8a01f94c2ec0"}, ] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff"] + +[metadata] +lock-version = "2.0" +python-versions = ">=3.9.0,<3.11" +content-hash = "c7fbaf60049ed6f4a60971720b8761423fa94647c1e428d8a04f994d0b1095d7" diff --git a/pyproject.toml b/pyproject.toml index 9b32bdb5a..0d437af61 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -73,6 +73,7 @@ pandarallel = "^1.6.4" schematic-db = {version = "^0.0.29", extras = ["synapse"]} pyopenssl = "^23.0.0" typing-extensions = "<4.6.0" +dataclasses-json = "^0.6.1" [tool.poetry.dev-dependencies] pytest = "^7.0.0" diff --git a/schematic/schemas/data_model_jsonld.py b/schematic/schemas/data_model_jsonld.py index 4b5bc2447..defaaf31c 100644 --- a/schematic/schemas/data_model_jsonld.py +++ b/schematic/schemas/data_model_jsonld.py @@ -1,3 +1,5 @@ +from dataclasses import dataclass, field, asdict +from dataclasses_json import config, dataclass_json from functools import wraps from typing import Any, Dict, Optional, Text, List import networkx as nx @@ -6,6 +8,47 @@ from schematic.schemas.data_model_relationships import DataModelRelationships from schematic.utils.schema_utils import get_label_from_display_name, convert_bool_to_str +@dataclass_json +@dataclass +class BaseTemplate: + magic_context: str = field(default_factory=lambda: {"bts": "http://schema.biothings.io/", + "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#", + "rdfs": "http://www.w3.org/2000/01/rdf-schema#", + "schema": "http://schema.org/", + "xsd": "http://www.w3.org/2001/XMLSchema#", + }, + metadata=config(field_name="@context")) + magic_graph: str = field(default_factory=list, metadata=config(field_name="@graph")) + magic_id: str = field(default="http://schema.biothings.io/#0.1", metadata=config(field_name="@id")) + +@dataclass_json +@dataclass +class PropertyTemplate: + magic_id: str = field(default="", metadata=config(field_name="@id")) + magic_type: str = field(default="rdf:Property", metadata=config(field_name="@type")) + magic_comment: str = field(default="", metadata=config(field_name="rdfs:comment")) + magic_label: str = field(default="", metadata=config(field_name="rdfs:label")) + magic_domain_includes: list = field(default_factory=list, metadata=config(field_name="schema:domainIncludes")) + magic_range_includes: list = field(default_factory=list, metadata=config(field_name="schema:rangeIncludes")) + magic_isPartOf: dict = field(default_factory=dict, metadata=config(field_name="schema:isPartOf")) + magic_displayName:str = field(default="", metadata=config(field_name="sms:displayName")) + magic_required: str = field(default="sms:false", metadata=config(field_name="sms:required")) + magic_validationRules: list = field(default_factory=list, metadata=config(field_name="sms:validationRules")) + +@dataclass_json +@dataclass +class ClassTemplate: + magic_id: str = field(default="", metadata=config(field_name="@id")) + magic_type: str = field(default="rdfs:Class", metadata=config(field_name="@type")) + magic_comment: str = field(default="", metadata=config(field_name="rdfs:comment")) + magic_label: str = field(default="", metadata=config(field_name="rdfs:label")) + magic_subClassOf: list = field(default_factory=list, metadata=config(field_name="rdfs:subClassOf")) + magic_range_includes: list = field(default_factory=list, metadata=config(field_name="schema:rangeIncludes")) + magic_isPartOf: dict = field(default_factory=dict, metadata=config(field_name="schema:isPartOf")) + magic_displayName:str = field(default="", metadata=config(field_name="sms:displayName")) + magic_requiresDependency: list = field(default_factory=list, metadata=config(field_name="sms:requiresDependency")) + magic_requiresComponent: list = field(default_factory=list, metadata=config(field_name="sms:requiresComponent")) + magic_validationRules: list = field(default_factory=list, metadata=config(field_name="sms:validationRules")) class DataModelJsonLD(object): ''' @@ -20,28 +63,18 @@ def __init__(self, Graph: nx.MultiDiGraph, output_path:str = ''): self.DME = DataModelGraphExplorer(self.graph) self.output_path = output_path - - def base_jsonld_template(self) -> dict: - """Base starter JSONLD template, to be filled out with model. For entire file. - Returns: - base_template, dict: base JSONLD template - TODO: when done adding contexts fill out this section here. - """ - base_template = { - "@context": { - "bts": "http://schema.biothings.io/", - "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#", - "rdfs": "http://www.w3.org/2000/01/rdf-schema#", - "schema": "http://schema.org/", - "xsd": "http://www.w3.org/2001/XMLSchema#", - }, - "@graph": [], - "@id": "http://schema.biothings.io/#0.1", - } - return base_template + # Gather the templates + base_template = BaseTemplate() + self.base_jsonld_template = base_template.to_json() - def create_object(self, template:dict, node:str)->dict: - """ Fill in a blank JSONLD template with information for each node. All relationships are filled from the graph, based on the type of information (node or edge) + property_template = PropertyTemplate() + self.property_template = property_template.to_json() + + class_template = ClassTemplate() + self.class_template = class_template.to_json() + + def fill_entry_template(self, template:dict, node:str)->dict: + """ Fill in a blank JSONLD entry template with information for each node. All relationships are filled from the graph, based on the type of information (node or edge) Args: template, dict: empty class or property template to be filled with information for the given node. node, str: target node to fill the template out for. @@ -215,47 +248,6 @@ def reorder_template_entries(self, template:dict) -> dict: template[jsonld_key] = ordered_edges return template - def property_template(self): - '''Generate a template for schema property - Returns: - property_template, dict: template for property schema - ''' - property_template = { - "@id": "", - "@type": "rdf:Property", - "rdfs:comment": "", - "rdfs:label": "", - "schema:domainIncludes": [], - "schema:rangeIncludes": [], - "schema:isPartOf": {}, - "sms:displayName": "", - "sms:required": "sms:false", - "sms:validationRules": [], - } - return property_template - - def class_template(self): - """Generate a template for schema class - Returns: - class_template, dict: template for class schema - """ - class_template = { - "@id": "", - "@type": "rdfs:Class", - "rdfs:comment": "", - "rdfs:label": "", - "rdfs:subClassOf": [], - "schema:isPartOf": {}, - "schema:rangeIncludes": [], - "sms:displayName": "", - "sms:required": "sms:false", - "sms:requiresDependency": [], - "sms:requiresComponent": [], - "sms:validationRules": [], - } - return class_template - - def generate_jsonld_object(self): '''Create the JSONLD object. Returns: @@ -265,16 +257,17 @@ def generate_jsonld_object(self): properties = self.DME.find_properties() # Get JSONLD Template - json_ld_object = self.base_jsonld_template() + json_ld_template = self.base_jsonld_template - # Iterativly add graph nodes to json_ld_object as properties or classes + # Iterativly add graph nodes to json_ld_template as properties or classes for node in self.graph.nodes: if node in properties: - obj = self.create_object(template = self.property_template(), node = node) + obj = self.fill_entry_template(template = self.property_template, node = node) else: - obj = self.create_object(template = self.class_template(), node = node) - json_ld_object['@graph'].append(obj) - return json_ld_object + obj = self.fill_entry_template(template = self.class_template, node = node) + + json_ld_template['@graph'].append(obj) + return json_ld_template def convert_graph_to_jsonld(Graph): # Make the JSONLD object From c89594b856bb7cbdc8987b22d87199bfbefb3305 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Thu, 12 Oct 2023 19:22:31 -0700 Subject: [PATCH 156/239] fix issue with convert jsonstr to dict --- schematic/schemas/data_model_jsonld.py | 35 +++++++++----------------- 1 file changed, 12 insertions(+), 23 deletions(-) diff --git a/schematic/schemas/data_model_jsonld.py b/schematic/schemas/data_model_jsonld.py index defaaf31c..ab2d36c65 100644 --- a/schematic/schemas/data_model_jsonld.py +++ b/schematic/schemas/data_model_jsonld.py @@ -1,12 +1,13 @@ from dataclasses import dataclass, field, asdict from dataclasses_json import config, dataclass_json +import json from functools import wraps from typing import Any, Dict, Optional, Text, List import networkx as nx from schematic.schemas.data_model_graph import DataModelGraphExplorer from schematic.schemas.data_model_relationships import DataModelRelationships -from schematic.utils.schema_utils import get_label_from_display_name, convert_bool_to_str +from schematic.utils.schema_utils import get_label_from_display_name, convert_bool_to_str, strip_context @dataclass_json @dataclass @@ -65,13 +66,13 @@ def __init__(self, Graph: nx.MultiDiGraph, output_path:str = ''): # Gather the templates base_template = BaseTemplate() - self.base_jsonld_template = base_template.to_json() + self.base_jsonld_template = json.loads(base_template.to_json()) property_template = PropertyTemplate() - self.property_template = property_template.to_json() + self.property_template = json.loads(property_template.to_json()) class_template = ClassTemplate() - self.class_template = class_template.to_json() + self.class_template = json.loads(class_template.to_json()) def fill_entry_template(self, template:dict, node:str)->dict: """ Fill in a blank JSONLD entry template with information for each node. All relationships are filled from the graph, based on the type of information (node or edge) @@ -86,7 +87,7 @@ def fill_entry_template(self, template:dict, node:str)->dict: # For each field in template fill out with information from the graph for rel, rel_vals in data_model_relationships.items(): - key_context, key_rel = self.strip_context(context_value=rel_vals['jsonld_key']) + key_context, key_rel = strip_context(context_value=rel_vals['jsonld_key']) # Fill edge information (done per edge type) if rel_vals['edge_rel']: @@ -138,10 +139,12 @@ def fill_entry_template(self, template:dict, node:str)->dict: # Get recorded info for current node, and the attribute type node_info = nx.get_node_attributes(self.graph, node_label)[node] - - # Add this information to the template - template[rel_vals['jsonld_key']] = node_info - + try: + # Add this information to the template + template[rel_vals['jsonld_key']] = node_info + except: + breakpoint() + # Clean up template template = self.clean_template(template=template, data_model_relationships=data_model_relationships, @@ -200,20 +203,6 @@ def clean_template(self, template: dict, data_model_relationships: dict) -> dict del template[rels['jsonld_key']] return template - def strip_context(self, context_value: str) -> tuple[str]: - """Strip contexts from str entry. - Args: - context_value, str: string from which to strip context from - Returns: - context, str: the original context - v, str: value separated from context - """ - if ':' in context_value: - context, v = context_value.split(':') - elif '@' in context_value: - context, v = context_value.split('@') - return context, v - def reorder_template_entries(self, template:dict) -> dict: '''In JSONLD some classes or property keys have list values. We want to make sure these lists are ordered according to the order supplied by the user. This will look specically in lists and reorder those. From a7cfa94d05e2a58616778a5bde6ebbded4d4c505 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Thu, 12 Oct 2023 19:29:01 -0700 Subject: [PATCH 157/239] add strip_contex to utils --- schematic/utils/schema_utils.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/schematic/utils/schema_utils.py b/schematic/utils/schema_utils.py index 52112d7a3..b8cab8e66 100644 --- a/schematic/utils/schema_utils.py +++ b/schematic/utils/schema_utils.py @@ -110,3 +110,17 @@ def export_schema(schema: dict, file_path: str) -> None: """ with open(file_path, "w") as f: json.dump(schema, f, sort_keys=True, indent=4, ensure_ascii=False) + +def strip_context(context_value: str) -> tuple[str]: + """Strip contexts from str entry. + Args: + context_value, str: string from which to strip context from + Returns: + context, str: the original context + v, str: value separated from context + """ + if ':' in context_value: + context, v = context_value.split(':') + elif '@' in context_value: + context, v = context_value.split('@') + return context, v From 0f30c7e4c26a2eeb7040dbf950274786a2f16e4b Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Thu, 12 Oct 2023 19:29:33 -0700 Subject: [PATCH 158/239] add additional check when adding back contexts --- schematic/schemas/data_model_jsonld.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/schematic/schemas/data_model_jsonld.py b/schematic/schemas/data_model_jsonld.py index ab2d36c65..66c82a194 100644 --- a/schematic/schemas/data_model_jsonld.py +++ b/schematic/schemas/data_model_jsonld.py @@ -181,9 +181,9 @@ def add_contexts_to_entries(self, template:dict) -> dict: rel_func = self.rel_dict[key]['node_attr_dict']['default'] # Add appropritae contexts that have been removed in previous steps (for JSONLD) or did not exist to begin with (csv) - if key == 'id' and rel_func == get_label_from_display_name: + if key == 'id' and rel_func == get_label_from_display_name and 'bts' not in str(template[jsonld_key]).lower(): template[jsonld_key] = 'bts:' + template[jsonld_key] - elif key == 'required' and rel_func == convert_bool_to_str: + elif key == 'required' and rel_func == convert_bool_to_str and 'sms' not in str(template[jsonld_key]).lower(): template[jsonld_key] = 'sms:' + str(template[jsonld_key]).lower() return template From d90a7b5756c81caac53e5dc61f6e2111e868bb5f Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Fri, 13 Oct 2023 09:49:37 -0700 Subject: [PATCH 159/239] fix typo in api --- schematic_api/api/openapi/api.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic_api/api/openapi/api.yaml b/schematic_api/api/openapi/api.yaml index 5f234457e..0aca6a05d 100644 --- a/schematic_api/api/openapi/api.yaml +++ b/schematic_api/api/openapi/api.yaml @@ -768,7 +768,7 @@ paths: tags: - Schema Operation - /schmas/find_class_specific_properties: + /schemas/find_class_specific_properties: get: summary: Find properties specifically associated with a given class description: Find properties specifically associated with a given class From 4e998154c42776b420058afa49e7fcc5fe3c4894 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Fri, 13 Oct 2023 14:49:01 -0700 Subject: [PATCH 160/239] WIP: combine functions to look for value and edge rels --- schematic/schemas/data_model_graph.py | 2 +- schematic/schemas/data_model_nodes.py | 4 +- schematic/schemas/data_model_parser.py | 2 +- schematic/schemas/data_model_relationships.py | 23 ++++++++++- tests/test_schemas.py | 40 +++++++++---------- 5 files changed, 45 insertions(+), 26 deletions(-) diff --git a/schematic/schemas/data_model_graph.py b/schematic/schemas/data_model_graph.py index 140c1cbf2..a77f0fbb7 100644 --- a/schematic/schemas/data_model_graph.py +++ b/schematic/schemas/data_model_graph.py @@ -96,7 +96,7 @@ def generate_data_model_graph(self) -> nx.MultiDiGraph: G: nx.MultiDiGraph, networkx graph representation of the data model ''' # Get all relationships with edges - edge_relationships = self.dmr.define_edge_relationships() + edge_relationships = self.dmr.retreive_rel_headers_dict(edge=True) # Find all nodes all_nodes = self.dmn.gather_all_nodes_in_model(attr_rel_dict=self.attribute_relationships_dict) diff --git a/schematic/schemas/data_model_nodes.py b/schematic/schemas/data_model_nodes.py index 99c82b3b6..65bb18b3f 100644 --- a/schematic/schemas/data_model_nodes.py +++ b/schematic/schemas/data_model_nodes.py @@ -16,8 +16,8 @@ class DataModelNodes(): def __init__(self, attribute_relationships_dict): self.namespaces = dict(rdf=Namespace("http://www.w3.org/1999/02/22-rdf-syntax-ns#")) self.data_model_relationships = DataModelRelationships() - self.value_relationships = self.data_model_relationships.define_value_relationships() - self.edge_relationships_dictionary = self.data_model_relationships.define_edge_relationships() + self.value_relationships = self.data_model_relationships.retreive_rel_headers_dict(edge=False) + self.edge_relationships_dictionary = self.data_model_relationships.retreive_rel_headers_dict(edge=True) self.properties = self.get_data_model_properties(attr_rel_dict=attribute_relationships_dict) # retrieve a list of relationship types that will produce nodes. self.node_relationships =list(self.edge_relationships_dictionary.values()) diff --git a/schematic/schemas/data_model_parser.py b/schematic/schemas/data_model_parser.py index 2e5aafd71..f4283bab7 100644 --- a/schematic/schemas/data_model_parser.py +++ b/schematic/schemas/data_model_parser.py @@ -109,7 +109,7 @@ def __init__( # Load relationships dictionary. self.rel_dict = self.dmr.define_data_model_relationships() # Get edge relationships - self.edge_relationships_dictionary = self.dmr.define_edge_relationships() + self.edge_relationships_dictionary = self.dmr.retreive_rel_headers_dict(edge=True) # Load required csv headers self.required_headers = self.dmr.define_required_csv_headers() # Get the type for each value that needs to be submitted. diff --git a/schematic/schemas/data_model_relationships.py b/schematic/schemas/data_model_relationships.py index 133a292ea..0c2ab9132 100644 --- a/schematic/schemas/data_model_relationships.py +++ b/schematic/schemas/data_model_relationships.py @@ -189,6 +189,25 @@ def define_required_csv_headers(self): return required_headers + def retreive_rel_headers_dict(self, edge: bool) -> Dict[str, str]: + """Helper function to retrieve CSV headers for edge and non-edge relationships defined by edge_type. + Args: + edge, bool: True if looking for edge relationships + Returns: + rel_headers_dict: dict, key: csv_header if the key represents an edge relationship. + """ + rel_headers_dict = {} + for rel, rel_dict in self.relationships_dictionary.items(): + if 'edge_rel' in rel_dict: + if rel_dict['edge_rel'] and edge: + rel_headers_dict.update({rel:rel_dict['csv_header']}) + elif rel_dict['edge_rel'] == False and edge == False: + rel_headers_dict.update({rel:rel_dict['csv_header']}) + else: + raise ValueError(f"Did not provide a 'edge_rel' for relationship {rel}") + + return rel_headers_dict + ''' def define_edge_relationships(self): """Helper function to retrieve CSV headers for edge relationships. Returns: @@ -200,7 +219,7 @@ def define_edge_relationships(self): if v['edge_rel']: edge_relationships.update({k:v['csv_header']}) except KeyError: - print(f"Did not provide a 'edge_rel' key, value pair for the nested dictionary {k} : {key}") + print(f"Did not provide a 'edge_rel' for relationship {k}") return edge_relationships @@ -218,5 +237,5 @@ def define_value_relationships(self): print(f"Did not provide a 'edge_rel' for key {k}") return value_relationships - + ''' diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 37e4ac993..d32008907 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -160,27 +160,27 @@ def test_define_required_csv_headers(self, dmr: DataModelRelationships): 'Source' ] - def test_define_edge_relationships(self, dmr: DataModelRelationships): + @pytest.mark.parametrize("edge", [True, False], ids=["True", "False"]) + def test_retreive_rel_headers_dict(self, dmr: DataModelRelationships, edge:bool): """Tests method returns correct values""" - assert dmr.define_edge_relationships() == { - 'rangeIncludes': 'Valid Values', - 'requiresDependency': 'DependsOn', - 'requiresComponent': 'DependsOn Component', - 'subClassOf': 'Parent', - 'domainIncludes': 'Properties' - } - - def test_define_value_relationships(self, dmr: DataModelRelationships): - """Tests method returns correct values""" - assert dmr.define_value_relationships() == { - 'displayName': 'Attribute', - 'label': None, - 'comment': 'Description', - 'required': 'Required', - 'validationRules': 'Validation Rules', - 'isPartOf': None, - 'id': 'Source' - } + if edge: + assert dmr.retreive_rel_headers_dict(edge=edge) == { + 'rangeIncludes': 'Valid Values', + 'requiresDependency': 'DependsOn', + 'requiresComponent': 'DependsOn Component', + 'subClassOf': 'Parent', + 'domainIncludes': 'Properties' + } + else: + assert dmr.retreive_rel_headers_dict(edge=edge) == { + 'displayName': 'Attribute', + 'label': None, + 'comment': 'Description', + 'required': 'Required', + 'validationRules': 'Validation Rules', + 'isPartOf': None, + 'id': 'Source' + } class TestDataModelGraph: From 48b8236e4bc044480bad1b2fcb4fec1d59bd866a Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Fri, 13 Oct 2023 15:17:46 -0700 Subject: [PATCH 161/239] Removed commented relationship functions --- schematic/schemas/data_model_relationships.py | 32 ------------------- 1 file changed, 32 deletions(-) diff --git a/schematic/schemas/data_model_relationships.py b/schematic/schemas/data_model_relationships.py index 0c2ab9132..c9ff0b944 100644 --- a/schematic/schemas/data_model_relationships.py +++ b/schematic/schemas/data_model_relationships.py @@ -207,35 +207,3 @@ def retreive_rel_headers_dict(self, edge: bool) -> Dict[str, str]: raise ValueError(f"Did not provide a 'edge_rel' for relationship {rel}") return rel_headers_dict - ''' - def define_edge_relationships(self): - """Helper function to retrieve CSV headers for edge relationships. - Returns: - edge_relationships: dict, key: csv_header if the key represents an edge relationship. - """ - edge_relationships = {} - for k, v in self.relationships_dictionary.items(): - try: - if v['edge_rel']: - edge_relationships.update({k:v['csv_header']}) - except KeyError: - print(f"Did not provide a 'edge_rel' for relationship {k}") - - return edge_relationships - - def define_value_relationships(self): - """Helper function to retrieve CSV headers for non-edge (value) relationships. - Returns: - edge_relationships: dict, key: csv_header if the key represents a value relationship. - """ - value_relationships = {} - for k, v in self.relationships_dictionary.items(): - try: - if not v['edge_rel']: - value_relationships.update({k:v['csv_header']}) - except KeyError: - print(f"Did not provide a 'edge_rel' for key {k}") - - return value_relationships - ''' - From 8d238054c08cd04d19b492cbcf10485b52096fc5 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 16 Oct 2023 09:17:19 -0700 Subject: [PATCH 162/239] remake poetry.lock file --- poetry.lock | 2475 +++++++++++++++++++++++++++------------------------ 1 file changed, 1295 insertions(+), 1180 deletions(-) diff --git a/poetry.lock b/poetry.lock index b860f9153..5c71fe849 100644 --- a/poetry.lock +++ b/poetry.lock @@ -37,25 +37,25 @@ dev = ["black", "docutils", "flake8", "ipython", "m2r", "mistune (<2.0.0)", "pyt [[package]] name = "anyio" -version = "3.7.1" +version = "4.0.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "anyio-3.7.1-py3-none-any.whl", hash = "sha256:91dee416e570e92c64041bd18b900d1d6fa78dff7048769ce5ac5ddad004fbb5"}, - {file = "anyio-3.7.1.tar.gz", hash = "sha256:44a3c9aba0f5defa43261a8b3efb97891f2bd7d804e0e1f56419befa1adfc780"}, + {file = "anyio-4.0.0-py3-none-any.whl", hash = "sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f"}, + {file = "anyio-4.0.0.tar.gz", hash = "sha256:f7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a"}, ] [package.dependencies] -exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} idna = ">=2.8" sniffio = ">=1.1" [package.extras] -doc = ["Sphinx", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme (>=1.2.2)", "sphinxcontrib-jquery"] -test = ["anyio[trio]", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (<0.22)"] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.22)"] [[package]] name = "appnope" @@ -71,23 +71,24 @@ files = [ [[package]] name = "argon2-cffi" -version = "21.3.0" -description = "The secure Argon2 password hashing algorithm." +version = "23.1.0" +description = "Argon2 for Python" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "argon2-cffi-21.3.0.tar.gz", hash = "sha256:d384164d944190a7dd7ef22c6aa3ff197da12962bd04b17f64d4e93d934dba5b"}, - {file = "argon2_cffi-21.3.0-py3-none-any.whl", hash = "sha256:8c976986f2c5c0e5000919e6de187906cfd81fb1c72bf9d88c01177e77da7f80"}, + {file = "argon2_cffi-23.1.0-py3-none-any.whl", hash = "sha256:c670642b78ba29641818ab2e68bd4e6a78ba53b7eff7b4c3815ae16abf91c7ea"}, + {file = "argon2_cffi-23.1.0.tar.gz", hash = "sha256:879c3e79a2729ce768ebb7d36d4609e3a78a4ca2ec3a9f12286ca057e3d0db08"}, ] [package.dependencies] argon2-cffi-bindings = "*" [package.extras] -dev = ["cogapp", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "pre-commit", "pytest", "sphinx", "sphinx-notfound-page", "tomli"] -docs = ["furo", "sphinx", "sphinx-notfound-page"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pytest"] +dev = ["argon2-cffi[tests,typing]", "tox (>4)"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-copybutton", "sphinx-notfound-page"] +tests = ["hypothesis", "pytest"] +typing = ["mypy"] [[package]] name = "argon2-cffi-bindings" @@ -129,29 +130,34 @@ tests = ["pytest"] [[package]] name = "arrow" -version = "1.2.3" +version = "1.3.0" description = "Better dates & times for Python" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "arrow-1.2.3-py3-none-any.whl", hash = "sha256:5a49ab92e3b7b71d96cd6bfcc4df14efefc9dfa96ea19045815914a6ab6b1fe2"}, - {file = "arrow-1.2.3.tar.gz", hash = "sha256:3934b30ca1b9f292376d9db15b19446088d12ec58629bc3f0da28fd55fb633a1"}, + {file = "arrow-1.3.0-py3-none-any.whl", hash = "sha256:c728b120ebc00eb84e01882a6f5e7927a53960aa990ce7dd2b10f39005a67f80"}, + {file = "arrow-1.3.0.tar.gz", hash = "sha256:d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85"}, ] [package.dependencies] python-dateutil = ">=2.7.0" +types-python-dateutil = ">=2.8.10" + +[package.extras] +doc = ["doc8", "sphinx (>=7.0.0)", "sphinx-autobuild", "sphinx-autodoc-typehints", "sphinx_rtd_theme (>=1.3.0)"] +test = ["dateparser (>=1.0.0,<2.0.0)", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "pytz (==2021.1)", "simplejson (>=3.0.0,<4.0.0)"] [[package]] name = "astroid" -version = "2.15.6" +version = "2.15.8" description = "An abstract syntax tree for Python with inference support." category = "dev" optional = false python-versions = ">=3.7.2" files = [ - {file = "astroid-2.15.6-py3-none-any.whl", hash = "sha256:389656ca57b6108f939cf5d2f9a2a825a3be50ba9d589670f393236e0a03b91c"}, - {file = "astroid-2.15.6.tar.gz", hash = "sha256:903f024859b7c7687d7a7f3a3f73b17301f8e42dfd9cc9df9d4418172d3e2dbd"}, + {file = "astroid-2.15.8-py3-none-any.whl", hash = "sha256:1aa149fc5c6589e3d0ece885b4491acd80af4f087baafa3fb5203b113e68cd3c"}, + {file = "astroid-2.15.8.tar.gz", hash = "sha256:6c107453dffee9055899705de3c9ead36e74119cee151e5a9aaf7f0b0e020a6a"}, ] [package.dependencies] @@ -161,22 +167,37 @@ wrapt = {version = ">=1.11,<2", markers = "python_version < \"3.11\""} [[package]] name = "asttokens" -version = "2.2.1" +version = "2.4.0" description = "Annotate AST trees with source code positions" category = "main" optional = false python-versions = "*" files = [ - {file = "asttokens-2.2.1-py2.py3-none-any.whl", hash = "sha256:6b0ac9e93fb0335014d382b8fa9b3afa7df546984258005da0b9e7095b3deb1c"}, - {file = "asttokens-2.2.1.tar.gz", hash = "sha256:4622110b2a6f30b77e1473affaa97e711bc2f07d3f10848420ff1898edbe94f3"}, + {file = "asttokens-2.4.0-py2.py3-none-any.whl", hash = "sha256:cf8fc9e61a86461aa9fb161a14a0841a03c405fa829ac6b202670b3495d2ce69"}, + {file = "asttokens-2.4.0.tar.gz", hash = "sha256:2e0171b991b2c959acc6c49318049236844a5da1d65ba2672c4880c1c894834e"}, ] [package.dependencies] -six = "*" +six = ">=1.12.0" [package.extras] test = ["astroid", "pytest"] +[[package]] +name = "async-lru" +version = "2.0.4" +description = "Simple LRU cache for asyncio" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "async-lru-2.0.4.tar.gz", hash = "sha256:b8a59a5df60805ff63220b2a0c5b5393da5521b113cd5465a44eb037d81a5627"}, + {file = "async_lru-2.0.4-py3-none-any.whl", hash = "sha256:ff02944ce3c288c5be660c42dbcca0742b32c3b279d6dceda655190240b99224"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} + [[package]] name = "attrs" version = "23.1.0" @@ -198,16 +219,19 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte [[package]] name = "babel" -version = "2.12.1" +version = "2.13.0" description = "Internationalization utilities" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "Babel-2.12.1-py3-none-any.whl", hash = "sha256:b4246fb7677d3b98f501a39d43396d3cafdc8eadb045f4a31be01863f655c610"}, - {file = "Babel-2.12.1.tar.gz", hash = "sha256:cc2d99999cd01d44420ae725a21c9e3711b3aadc7976d6147f622d8581963455"}, + {file = "Babel-2.13.0-py3-none-any.whl", hash = "sha256:fbfcae1575ff78e26c7449136f1abbefc3c13ce542eeb13d43d50d8b047216ec"}, + {file = "Babel-2.13.0.tar.gz", hash = "sha256:04c3e2d28d2b7681644508f836be388ae49e0cfe91465095340395b60d00f210"}, ] +[package.extras] +dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] + [[package]] name = "backcall" version = "0.2.0" @@ -241,34 +265,34 @@ lxml = ["lxml"] [[package]] name = "black" -version = "23.7.0" +version = "23.9.1" description = "The uncompromising code formatter." category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "black-23.7.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:5c4bc552ab52f6c1c506ccae05681fab58c3f72d59ae6e6639e8885e94fe2587"}, - {file = "black-23.7.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:552513d5cd5694590d7ef6f46e1767a4df9af168d449ff767b13b084c020e63f"}, - {file = "black-23.7.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:86cee259349b4448adb4ef9b204bb4467aae74a386bce85d56ba4f5dc0da27be"}, - {file = "black-23.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:501387a9edcb75d7ae8a4412bb8749900386eaef258f1aefab18adddea1936bc"}, - {file = "black-23.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb074d8b213749fa1d077d630db0d5f8cc3b2ae63587ad4116e8a436e9bbe995"}, - {file = "black-23.7.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b5b0ee6d96b345a8b420100b7d71ebfdd19fab5e8301aff48ec270042cd40ac2"}, - {file = "black-23.7.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:893695a76b140881531062d48476ebe4a48f5d1e9388177e175d76234ca247cd"}, - {file = "black-23.7.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:c333286dc3ddca6fdff74670b911cccedacb4ef0a60b34e491b8a67c833b343a"}, - {file = "black-23.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:831d8f54c3a8c8cf55f64d0422ee875eecac26f5f649fb6c1df65316b67c8926"}, - {file = "black-23.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:7f3bf2dec7d541b4619b8ce526bda74a6b0bffc480a163fed32eb8b3c9aed8ad"}, - {file = "black-23.7.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:f9062af71c59c004cd519e2fb8f5d25d39e46d3af011b41ab43b9c74e27e236f"}, - {file = "black-23.7.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:01ede61aac8c154b55f35301fac3e730baf0c9cf8120f65a9cd61a81cfb4a0c3"}, - {file = "black-23.7.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:327a8c2550ddc573b51e2c352adb88143464bb9d92c10416feb86b0f5aee5ff6"}, - {file = "black-23.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1c6022b86f83b632d06f2b02774134def5d4d4f1dac8bef16d90cda18ba28a"}, - {file = "black-23.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:27eb7a0c71604d5de083757fbdb245b1a4fae60e9596514c6ec497eb63f95320"}, - {file = "black-23.7.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:8417dbd2f57b5701492cd46edcecc4f9208dc75529bcf76c514864e48da867d9"}, - {file = "black-23.7.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:47e56d83aad53ca140da0af87678fb38e44fd6bc0af71eebab2d1f59b1acf1d3"}, - {file = "black-23.7.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:25cc308838fe71f7065df53aedd20327969d05671bac95b38fdf37ebe70ac087"}, - {file = "black-23.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:642496b675095d423f9b8448243336f8ec71c9d4d57ec17bf795b67f08132a91"}, - {file = "black-23.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:ad0014efc7acf0bd745792bd0d8857413652979200ab924fbf239062adc12491"}, - {file = "black-23.7.0-py3-none-any.whl", hash = "sha256:9fd59d418c60c0348505f2ddf9609c1e1de8e7493eab96198fc89d9f865e7a96"}, - {file = "black-23.7.0.tar.gz", hash = "sha256:022a582720b0d9480ed82576c920a8c1dde97cc38ff11d8d8859b3bd6ca9eedb"}, + {file = "black-23.9.1-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:d6bc09188020c9ac2555a498949401ab35bb6bf76d4e0f8ee251694664df6301"}, + {file = "black-23.9.1-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:13ef033794029b85dfea8032c9d3b92b42b526f1ff4bf13b2182ce4e917f5100"}, + {file = "black-23.9.1-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:75a2dc41b183d4872d3a500d2b9c9016e67ed95738a3624f4751a0cb4818fe71"}, + {file = "black-23.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13a2e4a93bb8ca74a749b6974925c27219bb3df4d42fc45e948a5d9feb5122b7"}, + {file = "black-23.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:adc3e4442eef57f99b5590b245a328aad19c99552e0bdc7f0b04db6656debd80"}, + {file = "black-23.9.1-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:8431445bf62d2a914b541da7ab3e2b4f3bc052d2ccbf157ebad18ea126efb91f"}, + {file = "black-23.9.1-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:8fc1ddcf83f996247505db6b715294eba56ea9372e107fd54963c7553f2b6dfe"}, + {file = "black-23.9.1-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:7d30ec46de88091e4316b17ae58bbbfc12b2de05e069030f6b747dfc649ad186"}, + {file = "black-23.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:031e8c69f3d3b09e1aa471a926a1eeb0b9071f80b17689a655f7885ac9325a6f"}, + {file = "black-23.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:538efb451cd50f43aba394e9ec7ad55a37598faae3348d723b59ea8e91616300"}, + {file = "black-23.9.1-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:638619a559280de0c2aa4d76f504891c9860bb8fa214267358f0a20f27c12948"}, + {file = "black-23.9.1-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:a732b82747235e0542c03bf352c126052c0fbc458d8a239a94701175b17d4855"}, + {file = "black-23.9.1-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:cf3a4d00e4cdb6734b64bf23cd4341421e8953615cba6b3670453737a72ec204"}, + {file = "black-23.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf99f3de8b3273a8317681d8194ea222f10e0133a24a7548c73ce44ea1679377"}, + {file = "black-23.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:14f04c990259576acd093871e7e9b14918eb28f1866f91968ff5524293f9c573"}, + {file = "black-23.9.1-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:c619f063c2d68f19b2d7270f4cf3192cb81c9ec5bc5ba02df91471d0b88c4c5c"}, + {file = "black-23.9.1-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:6a3b50e4b93f43b34a9d3ef00d9b6728b4a722c997c99ab09102fd5efdb88325"}, + {file = "black-23.9.1-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:c46767e8df1b7beefb0899c4a95fb43058fa8500b6db144f4ff3ca38eb2f6393"}, + {file = "black-23.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50254ebfa56aa46a9fdd5d651f9637485068a1adf42270148cd101cdf56e0ad9"}, + {file = "black-23.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:403397c033adbc45c2bd41747da1f7fc7eaa44efbee256b53842470d4ac5a70f"}, + {file = "black-23.9.1-py3-none-any.whl", hash = "sha256:6ccd59584cc834b6d127628713e4b6b968e5f79572da66284532525a042549f9"}, + {file = "black-23.9.1.tar.gz", hash = "sha256:24b6b3ff5c6d9ea08a8888f6977eae858e1f340d7260cf56d70a49823236b62d"}, ] [package.dependencies] @@ -278,7 +302,7 @@ packaging = ">=22.0" pathspec = ">=0.9.0" platformdirs = ">=2" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] @@ -288,14 +312,14 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "bleach" -version = "6.0.0" +version = "6.1.0" description = "An easy safelist-based HTML-sanitizing tool." category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "bleach-6.0.0-py3-none-any.whl", hash = "sha256:33c16e3353dbd13028ab4799a0f89a83f113405c766e9c122df8a06f5b85b3f4"}, - {file = "bleach-6.0.0.tar.gz", hash = "sha256:1a1a85c1595e07d8db14c5f09f09e6433502c51c595970edc090551f0db99414"}, + {file = "bleach-6.1.0-py3-none-any.whl", hash = "sha256:3225f354cfc436b9789c66c4ee030194bee0568fbf9cbdad3bc8b5c26c5f12b6"}, + {file = "bleach-6.1.0.tar.gz", hash = "sha256:0a31f1837963c41d46bbf1331b8778e1308ea0791db03cc4e7357b97cf42a8fe"}, ] [package.dependencies] @@ -303,7 +327,7 @@ six = ">=1.9.0" webencodings = "*" [package.extras] -css = ["tinycss2 (>=1.1.0,<1.2)"] +css = ["tinycss2 (>=1.1.0,<1.3)"] [[package]] name = "cachetools" @@ -319,88 +343,76 @@ files = [ [[package]] name = "certifi" -version = "2023.5.7" +version = "2023.7.22" description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.5.7-py3-none-any.whl", hash = "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716"}, - {file = "certifi-2023.5.7.tar.gz", hash = "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7"}, + {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, + {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, ] [[package]] name = "cffi" -version = "1.15.1" +version = "1.16.0" description = "Foreign Function Interface for Python calling C code." category = "main" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, - {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, - {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, - {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, - {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, - {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, - {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, - {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, - {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, - {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, - {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, - {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, - {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, - {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, - {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, - {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, - {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, - {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, - {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, + {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, + {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, + {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, + {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, + {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, + {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, + {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, + {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, + {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, + {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, + {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, + {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, + {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, ] [package.dependencies] @@ -408,99 +420,114 @@ pycparser = "*" [[package]] name = "charset-normalizer" -version = "3.2.0" +version = "3.3.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"}, - {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, + {file = "charset-normalizer-3.3.0.tar.gz", hash = "sha256:63563193aec44bce707e0c5ca64ff69fa72ed7cf34ce6e11d5127555756fd2f6"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:effe5406c9bd748a871dbcaf3ac69167c38d72db8c9baf3ff954c344f31c4cbe"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4162918ef3098851fcd8a628bf9b6a98d10c380725df9e04caf5ca6dd48c847a"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0570d21da019941634a531444364f2482e8db0b3425fcd5ac0c36565a64142c8"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5707a746c6083a3a74b46b3a631d78d129edab06195a92a8ece755aac25a3f3d"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:278c296c6f96fa686d74eb449ea1697f3c03dc28b75f873b65b5201806346a69"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a4b71f4d1765639372a3b32d2638197f5cd5221b19531f9245fcc9ee62d38f56"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5969baeaea61c97efa706b9b107dcba02784b1601c74ac84f2a532ea079403e"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3f93dab657839dfa61025056606600a11d0b696d79386f974e459a3fbc568ec"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:db756e48f9c5c607b5e33dd36b1d5872d0422e960145b08ab0ec7fd420e9d649"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:232ac332403e37e4a03d209a3f92ed9071f7d3dbda70e2a5e9cff1c4ba9f0678"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e5c1502d4ace69a179305abb3f0bb6141cbe4714bc9b31d427329a95acfc8bdd"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:2502dd2a736c879c0f0d3e2161e74d9907231e25d35794584b1ca5284e43f596"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23e8565ab7ff33218530bc817922fae827420f143479b753104ab801145b1d5b"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-win32.whl", hash = "sha256:1872d01ac8c618a8da634e232f24793883d6e456a66593135aeafe3784b0848d"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:557b21a44ceac6c6b9773bc65aa1b4cc3e248a5ad2f5b914b91579a32e22204d"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d7eff0f27edc5afa9e405f7165f85a6d782d308f3b6b9d96016c010597958e63"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6a685067d05e46641d5d1623d7c7fdf15a357546cbb2f71b0ebde91b175ffc3e"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0d3d5b7db9ed8a2b11a774db2bbea7ba1884430a205dbd54a32d61d7c2a190fa"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2935ffc78db9645cb2086c2f8f4cfd23d9b73cc0dc80334bc30aac6f03f68f8c"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fe359b2e3a7729010060fbca442ca225280c16e923b37db0e955ac2a2b72a05"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:380c4bde80bce25c6e4f77b19386f5ec9db230df9f2f2ac1e5ad7af2caa70459"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0d1e3732768fecb052d90d62b220af62ead5748ac51ef61e7b32c266cac9293"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b2919306936ac6efb3aed1fbf81039f7087ddadb3160882a57ee2ff74fd2382"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f8888e31e3a85943743f8fc15e71536bda1c81d5aa36d014a3c0c44481d7db6e"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:82eb849f085624f6a607538ee7b83a6d8126df6d2f7d3b319cb837b289123078"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7b8b8bf1189b3ba9b8de5c8db4d541b406611a71a955bbbd7385bbc45fcb786c"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5adf257bd58c1b8632046bbe43ee38c04e1038e9d37de9c57a94d6bd6ce5da34"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c350354efb159b8767a6244c166f66e67506e06c8924ed74669b2c70bc8735b1"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-win32.whl", hash = "sha256:02af06682e3590ab952599fbadac535ede5d60d78848e555aa58d0c0abbde786"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:86d1f65ac145e2c9ed71d8ffb1905e9bba3a91ae29ba55b4c46ae6fc31d7c0d4"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3b447982ad46348c02cb90d230b75ac34e9886273df3a93eec0539308a6296d7"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:abf0d9f45ea5fb95051c8bfe43cb40cda383772f7e5023a83cc481ca2604d74e"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b09719a17a2301178fac4470d54b1680b18a5048b481cb8890e1ef820cb80455"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3d9b48ee6e3967b7901c052b670c7dda6deb812c309439adaffdec55c6d7b78"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:edfe077ab09442d4ef3c52cb1f9dab89bff02f4524afc0acf2d46be17dc479f5"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3debd1150027933210c2fc321527c2299118aa929c2f5a0a80ab6953e3bd1908"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86f63face3a527284f7bb8a9d4f78988e3c06823f7bea2bd6f0e0e9298ca0403"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24817cb02cbef7cd499f7c9a2735286b4782bd47a5b3516a0e84c50eab44b98e"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c71f16da1ed8949774ef79f4a0260d28b83b3a50c6576f8f4f0288d109777989"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9cf3126b85822c4e53aa28c7ec9869b924d6fcfb76e77a45c44b83d91afd74f9"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:b3b2316b25644b23b54a6f6401074cebcecd1244c0b8e80111c9a3f1c8e83d65"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:03680bb39035fbcffe828eae9c3f8afc0428c91d38e7d61aa992ef7a59fb120e"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4cc152c5dd831641e995764f9f0b6589519f6f5123258ccaca8c6d34572fefa8"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-win32.whl", hash = "sha256:b8f3307af845803fb0b060ab76cf6dd3a13adc15b6b451f54281d25911eb92df"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:8eaf82f0eccd1505cf39a45a6bd0a8cf1c70dcfc30dba338207a969d91b965c0"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dc45229747b67ffc441b3de2f3ae5e62877a282ea828a5bdb67883c4ee4a8810"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f4a0033ce9a76e391542c182f0d48d084855b5fcba5010f707c8e8c34663d77"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ada214c6fa40f8d800e575de6b91a40d0548139e5dc457d2ebb61470abf50186"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b1121de0e9d6e6ca08289583d7491e7fcb18a439305b34a30b20d8215922d43c"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1063da2c85b95f2d1a430f1c33b55c9c17ffaf5e612e10aeaad641c55a9e2b9d"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70f1d09c0d7748b73290b29219e854b3207aea922f839437870d8cc2168e31cc"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:250c9eb0f4600361dd80d46112213dff2286231d92d3e52af1e5a6083d10cad9"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:750b446b2ffce1739e8578576092179160f6d26bd5e23eb1789c4d64d5af7dc7"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:fc52b79d83a3fe3a360902d3f5d79073a993597d48114c29485e9431092905d8"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:588245972aca710b5b68802c8cad9edaa98589b1b42ad2b53accd6910dad3545"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e39c7eb31e3f5b1f88caff88bcff1b7f8334975b46f6ac6e9fc725d829bc35d4"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-win32.whl", hash = "sha256:abecce40dfebbfa6abf8e324e1860092eeca6f7375c8c4e655a8afb61af58f2c"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:24a91a981f185721542a0b7c92e9054b7ab4fea0508a795846bc5b0abf8118d4"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:67b8cc9574bb518ec76dc8e705d4c39ae78bb96237cb533edac149352c1f39fe"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac71b2977fb90c35d41c9453116e283fac47bb9096ad917b8819ca8b943abecd"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3ae38d325b512f63f8da31f826e6cb6c367336f95e418137286ba362925c877e"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:542da1178c1c6af8873e143910e2269add130a299c9106eef2594e15dae5e482"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30a85aed0b864ac88309b7d94be09f6046c834ef60762a8833b660139cfbad13"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aae32c93e0f64469f74ccc730a7cb21c7610af3a775157e50bbd38f816536b38"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15b26ddf78d57f1d143bdf32e820fd8935d36abe8a25eb9ec0b5a71c82eb3895"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f5d10bae5d78e4551b7be7a9b29643a95aded9d0f602aa2ba584f0388e7a557"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:249c6470a2b60935bafd1d1d13cd613f8cd8388d53461c67397ee6a0f5dce741"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c5a74c359b2d47d26cdbbc7845e9662d6b08a1e915eb015d044729e92e7050b7"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:b5bcf60a228acae568e9911f410f9d9e0d43197d030ae5799e20dca8df588287"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:187d18082694a29005ba2944c882344b6748d5be69e3a89bf3cc9d878e548d5a"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:81bf654678e575403736b85ba3a7867e31c2c30a69bc57fe88e3ace52fb17b89"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-win32.whl", hash = "sha256:85a32721ddde63c9df9ebb0d2045b9691d9750cb139c161c80e500d210f5e26e"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:468d2a840567b13a590e67dd276c570f8de00ed767ecc611994c301d0f8c014f"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e0fc42822278451bc13a2e8626cf2218ba570f27856b536e00cfa53099724828"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:09c77f964f351a7369cc343911e0df63e762e42bac24cd7d18525961c81754f4"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:12ebea541c44fdc88ccb794a13fe861cc5e35d64ed689513a5c03d05b53b7c82"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:805dfea4ca10411a5296bcc75638017215a93ffb584c9e344731eef0dcfb026a"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96c2b49eb6a72c0e4991d62406e365d87067ca14c1a729a870d22354e6f68115"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaf7b34c5bc56b38c931a54f7952f1ff0ae77a2e82496583b247f7c969eb1479"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:619d1c96099be5823db34fe89e2582b336b5b074a7f47f819d6b3a57ff7bdb86"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0ac5e7015a5920cfce654c06618ec40c33e12801711da6b4258af59a8eff00a"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:93aa7eef6ee71c629b51ef873991d6911b906d7312c6e8e99790c0f33c576f89"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7966951325782121e67c81299a031f4c115615e68046f79b85856b86ebffc4cd"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:02673e456dc5ab13659f85196c534dc596d4ef260e4d86e856c3b2773ce09843"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:c2af80fb58f0f24b3f3adcb9148e6203fa67dd3f61c4af146ecad033024dde43"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:153e7b6e724761741e0974fc4dcd406d35ba70b92bfe3fedcb497226c93b9da7"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-win32.whl", hash = "sha256:d47ecf253780c90ee181d4d871cd655a789da937454045b17b5798da9393901a"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:d97d85fa63f315a8bdaba2af9a6a686e0eceab77b3089af45133252618e70884"}, + {file = "charset_normalizer-3.3.0-py3-none-any.whl", hash = "sha256:e46cd37076971c1040fc8c41273a8b3e2c624ce4f2be3f5dfcb7a430c1d3acc2"}, ] [[package]] name = "click" -version = "8.1.5" +version = "8.1.7" description = "Composable command line interface toolkit" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "click-8.1.5-py3-none-any.whl", hash = "sha256:e576aa487d679441d7d30abb87e1b43d24fc53bffb8758443b1a9e1cee504548"}, - {file = "click-8.1.5.tar.gz", hash = "sha256:4be4b1af8d665c6d942909916d31a213a106800c47d0eeba73d34da3cbc11367"}, + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, ] [package.dependencies] @@ -551,18 +578,18 @@ files = [ [[package]] name = "comm" -version = "0.1.3" +version = "0.1.4" description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "comm-0.1.3-py3-none-any.whl", hash = "sha256:16613c6211e20223f215fc6d3b266a247b6e2641bf4e0a3ad34cb1aff2aa3f37"}, - {file = "comm-0.1.3.tar.gz", hash = "sha256:a61efa9daffcfbe66fd643ba966f846a624e4e6d6767eda9cf6e993aadaab93e"}, + {file = "comm-0.1.4-py3-none-any.whl", hash = "sha256:6d52794cba11b36ed9860999cd10fd02d6b2eac177068fdd585e1e2f8a96e67a"}, + {file = "comm-0.1.4.tar.gz", hash = "sha256:354e40a59c9dd6db50c5cc6b4acc887d82e9603787f83b68c01a80a923984d15"}, ] [package.dependencies] -traitlets = ">=5.3" +traitlets = ">=4" [package.extras] lint = ["black (>=22.6.0)", "mdformat (>0.7)", "mdformat-gfm (>=0.3.5)", "ruff (>=0.0.156)"] @@ -602,72 +629,64 @@ tests = ["MarkupSafe (>=0.23)", "aiohttp (>=2.3.10,<4)", "aiohttp-jinja2 (>=0.14 [[package]] name = "coverage" -version = "7.2.7" +version = "7.3.2" description = "Code coverage measurement for Python" category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"}, - {file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"}, - {file = "coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6"}, - {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2"}, - {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063"}, - {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1"}, - {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353"}, - {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495"}, - {file = "coverage-7.2.7-cp310-cp310-win32.whl", hash = "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818"}, - {file = "coverage-7.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850"}, - {file = "coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f"}, - {file = "coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe"}, - {file = "coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3"}, - {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f"}, - {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb"}, - {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833"}, - {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97"}, - {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a"}, - {file = "coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a"}, - {file = "coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562"}, - {file = "coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4"}, - {file = "coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4"}, - {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01"}, - {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6"}, - {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d"}, - {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de"}, - {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d"}, - {file = "coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511"}, - {file = "coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3"}, - {file = "coverage-7.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f"}, - {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb"}, - {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9"}, - {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd"}, - {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a"}, - {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959"}, - {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02"}, - {file = "coverage-7.2.7-cp37-cp37m-win32.whl", hash = "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f"}, - {file = "coverage-7.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0"}, - {file = "coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5"}, - {file = "coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5"}, - {file = "coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9"}, - {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6"}, - {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e"}, - {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050"}, - {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5"}, - {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f"}, - {file = "coverage-7.2.7-cp38-cp38-win32.whl", hash = "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e"}, - {file = "coverage-7.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c"}, - {file = "coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9"}, - {file = "coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2"}, - {file = "coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7"}, - {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e"}, - {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1"}, - {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9"}, - {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250"}, - {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2"}, - {file = "coverage-7.2.7-cp39-cp39-win32.whl", hash = "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb"}, - {file = "coverage-7.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27"}, - {file = "coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d"}, - {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"}, + {file = "coverage-7.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d872145f3a3231a5f20fd48500274d7df222e291d90baa2026cc5152b7ce86bf"}, + {file = "coverage-7.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:310b3bb9c91ea66d59c53fa4989f57d2436e08f18fb2f421a1b0b6b8cc7fffda"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f47d39359e2c3779c5331fc740cf4bce6d9d680a7b4b4ead97056a0ae07cb49a"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa72dbaf2c2068404b9870d93436e6d23addd8bbe9295f49cbca83f6e278179c"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:beaa5c1b4777f03fc63dfd2a6bd820f73f036bfb10e925fce067b00a340d0f3f"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dbc1b46b92186cc8074fee9d9fbb97a9dd06c6cbbef391c2f59d80eabdf0faa6"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:315a989e861031334d7bee1f9113c8770472db2ac484e5b8c3173428360a9148"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d1bc430677773397f64a5c88cb522ea43175ff16f8bfcc89d467d974cb2274f9"}, + {file = "coverage-7.3.2-cp310-cp310-win32.whl", hash = "sha256:a889ae02f43aa45032afe364c8ae84ad3c54828c2faa44f3bfcafecb5c96b02f"}, + {file = "coverage-7.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:c0ba320de3fb8c6ec16e0be17ee1d3d69adcda99406c43c0409cb5c41788a611"}, + {file = "coverage-7.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ac8c802fa29843a72d32ec56d0ca792ad15a302b28ca6203389afe21f8fa062c"}, + {file = "coverage-7.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:89a937174104339e3a3ffcf9f446c00e3a806c28b1841c63edb2b369310fd074"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e267e9e2b574a176ddb983399dec325a80dbe161f1a32715c780b5d14b5f583a"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2443cbda35df0d35dcfb9bf8f3c02c57c1d6111169e3c85fc1fcc05e0c9f39a3"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4175e10cc8dda0265653e8714b3174430b07c1dca8957f4966cbd6c2b1b8065a"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf38419fb1a347aaf63481c00f0bdc86889d9fbf3f25109cf96c26b403fda1"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5c913b556a116b8d5f6ef834038ba983834d887d82187c8f73dec21049abd65c"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1981f785239e4e39e6444c63a98da3a1db8e971cb9ceb50a945ba6296b43f312"}, + {file = "coverage-7.3.2-cp311-cp311-win32.whl", hash = "sha256:43668cabd5ca8258f5954f27a3aaf78757e6acf13c17604d89648ecc0cc66640"}, + {file = "coverage-7.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10c39c0452bf6e694511c901426d6b5ac005acc0f78ff265dbe36bf81f808a2"}, + {file = "coverage-7.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4cbae1051ab791debecc4a5dcc4a1ff45fc27b91b9aee165c8a27514dd160836"}, + {file = "coverage-7.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12d15ab5833a997716d76f2ac1e4b4d536814fc213c85ca72756c19e5a6b3d63"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c7bba973ebee5e56fe9251300c00f1579652587a9f4a5ed8404b15a0471f216"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe494faa90ce6381770746077243231e0b83ff3f17069d748f645617cefe19d4"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6e9589bd04d0461a417562649522575d8752904d35c12907d8c9dfeba588faf"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d51ac2a26f71da1b57f2dc81d0e108b6ab177e7d30e774db90675467c847bbdf"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99b89d9f76070237975b315b3d5f4d6956ae354a4c92ac2388a5695516e47c84"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fa28e909776dc69efb6ed975a63691bc8172b64ff357e663a1bb06ff3c9b589a"}, + {file = "coverage-7.3.2-cp312-cp312-win32.whl", hash = "sha256:289fe43bf45a575e3ab10b26d7b6f2ddb9ee2dba447499f5401cfb5ecb8196bb"}, + {file = "coverage-7.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7dbc3ed60e8659bc59b6b304b43ff9c3ed858da2839c78b804973f613d3e92ed"}, + {file = "coverage-7.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f94b734214ea6a36fe16e96a70d941af80ff3bfd716c141300d95ebc85339738"}, + {file = "coverage-7.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:af3d828d2c1cbae52d34bdbb22fcd94d1ce715d95f1a012354a75e5913f1bda2"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:630b13e3036e13c7adc480ca42fa7afc2a5d938081d28e20903cf7fd687872e2"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9eacf273e885b02a0273bb3a2170f30e2d53a6d53b72dbe02d6701b5296101c"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8f17966e861ff97305e0801134e69db33b143bbfb36436efb9cfff6ec7b2fd9"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b4275802d16882cf9c8b3d057a0839acb07ee9379fa2749eca54efbce1535b82"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:72c0cfa5250f483181e677ebc97133ea1ab3eb68645e494775deb6a7f6f83901"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cb536f0dcd14149425996821a168f6e269d7dcd2c273a8bff8201e79f5104e76"}, + {file = "coverage-7.3.2-cp38-cp38-win32.whl", hash = "sha256:307adb8bd3abe389a471e649038a71b4eb13bfd6b7dd9a129fa856f5c695cf92"}, + {file = "coverage-7.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:88ed2c30a49ea81ea3b7f172e0269c182a44c236eb394718f976239892c0a27a"}, + {file = "coverage-7.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b631c92dfe601adf8f5ebc7fc13ced6bb6e9609b19d9a8cd59fa47c4186ad1ce"}, + {file = "coverage-7.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d3d9df4051c4a7d13036524b66ecf7a7537d14c18a384043f30a303b146164e9"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f7363d3b6a1119ef05015959ca24a9afc0ea8a02c687fe7e2d557705375c01f"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f11cc3c967a09d3695d2a6f03fb3e6236622b93be7a4b5dc09166a861be6d25"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:149de1d2401ae4655c436a3dced6dd153f4c3309f599c3d4bd97ab172eaf02d9"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3a4006916aa6fee7cd38db3bfc95aa9c54ebb4ffbfc47c677c8bba949ceba0a6"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9028a3871280110d6e1aa2df1afd5ef003bab5fb1ef421d6dc748ae1c8ef2ebc"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9f805d62aec8eb92bab5b61c0f07329275b6f41c97d80e847b03eb894f38d083"}, + {file = "coverage-7.3.2-cp39-cp39-win32.whl", hash = "sha256:d1c88ec1a7ff4ebca0219f5b1ef863451d828cccf889c173e1253aa84b1e07ce"}, + {file = "coverage-7.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b4767da59464bb593c07afceaddea61b154136300881844768037fd5e859353f"}, + {file = "coverage-7.3.2-pp38.pp39.pp310-none-any.whl", hash = "sha256:ae97af89f0fbf373400970c0a21eef5aa941ffeed90aee43650b81f7d7f47637"}, + {file = "coverage-7.3.2.tar.gz", hash = "sha256:be32ad29341b0170e795ca590e1c07e81fc061cb5b10c74ce7203491484404ef"}, ] [package.dependencies] @@ -678,35 +697,35 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "41.0.2" +version = "41.0.4" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-41.0.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:01f1d9e537f9a15b037d5d9ee442b8c22e3ae11ce65ea1f3316a41c78756b711"}, - {file = "cryptography-41.0.2-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:079347de771f9282fbfe0e0236c716686950c19dee1b76240ab09ce1624d76d7"}, - {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:439c3cc4c0d42fa999b83ded80a9a1fb54d53c58d6e59234cfe97f241e6c781d"}, - {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f14ad275364c8b4e525d018f6716537ae7b6d369c094805cae45300847e0894f"}, - {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:84609ade00a6ec59a89729e87a503c6e36af98ddcd566d5f3be52e29ba993182"}, - {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:49c3222bb8f8e800aead2e376cbef687bc9e3cb9b58b29a261210456a7783d83"}, - {file = "cryptography-41.0.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:d73f419a56d74fef257955f51b18d046f3506270a5fd2ac5febbfa259d6c0fa5"}, - {file = "cryptography-41.0.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:2a034bf7d9ca894720f2ec1d8b7b5832d7e363571828037f9e0c4f18c1b58a58"}, - {file = "cryptography-41.0.2-cp37-abi3-win32.whl", hash = "sha256:d124682c7a23c9764e54ca9ab5b308b14b18eba02722b8659fb238546de83a76"}, - {file = "cryptography-41.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:9c3fe6534d59d071ee82081ca3d71eed3210f76ebd0361798c74abc2bcf347d4"}, - {file = "cryptography-41.0.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a719399b99377b218dac6cf547b6ec54e6ef20207b6165126a280b0ce97e0d2a"}, - {file = "cryptography-41.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:182be4171f9332b6741ee818ec27daff9fb00349f706629f5cbf417bd50e66fd"}, - {file = "cryptography-41.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7a9a3bced53b7f09da251685224d6a260c3cb291768f54954e28f03ef14e3766"}, - {file = "cryptography-41.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f0dc40e6f7aa37af01aba07277d3d64d5a03dc66d682097541ec4da03cc140ee"}, - {file = "cryptography-41.0.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:674b669d5daa64206c38e507808aae49904c988fa0a71c935e7006a3e1e83831"}, - {file = "cryptography-41.0.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7af244b012711a26196450d34f483357e42aeddb04128885d95a69bd8b14b69b"}, - {file = "cryptography-41.0.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9b6d717393dbae53d4e52684ef4f022444fc1cce3c48c38cb74fca29e1f08eaa"}, - {file = "cryptography-41.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:192255f539d7a89f2102d07d7375b1e0a81f7478925b3bc2e0549ebf739dae0e"}, - {file = "cryptography-41.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f772610fe364372de33d76edcd313636a25684edb94cee53fd790195f5989d14"}, - {file = "cryptography-41.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b332cba64d99a70c1e0836902720887fb4529ea49ea7f5462cf6640e095e11d2"}, - {file = "cryptography-41.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9a6673c1828db6270b76b22cc696f40cde9043eb90373da5c2f8f2158957f42f"}, - {file = "cryptography-41.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:342f3767e25876751e14f8459ad85e77e660537ca0a066e10e75df9c9e9099f0"}, - {file = "cryptography-41.0.2.tar.gz", hash = "sha256:7d230bf856164de164ecb615ccc14c7fc6de6906ddd5b491f3af90d3514c925c"}, + {file = "cryptography-41.0.4-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:80907d3faa55dc5434a16579952ac6da800935cd98d14dbd62f6f042c7f5e839"}, + {file = "cryptography-41.0.4-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:35c00f637cd0b9d5b6c6bd11b6c3359194a8eba9c46d4e875a3660e3b400005f"}, + {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cecfefa17042941f94ab54f769c8ce0fe14beff2694e9ac684176a2535bf9714"}, + {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e40211b4923ba5a6dc9769eab704bdb3fbb58d56c5b336d30996c24fcf12aadb"}, + {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:23a25c09dfd0d9f28da2352503b23e086f8e78096b9fd585d1d14eca01613e13"}, + {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2ed09183922d66c4ec5fdaa59b4d14e105c084dd0febd27452de8f6f74704143"}, + {file = "cryptography-41.0.4-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:5a0f09cefded00e648a127048119f77bc2b2ec61e736660b5789e638f43cc397"}, + {file = "cryptography-41.0.4-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:9eeb77214afae972a00dee47382d2591abe77bdae166bda672fb1e24702a3860"}, + {file = "cryptography-41.0.4-cp37-abi3-win32.whl", hash = "sha256:3b224890962a2d7b57cf5eeb16ccaafba6083f7b811829f00476309bce2fe0fd"}, + {file = "cryptography-41.0.4-cp37-abi3-win_amd64.whl", hash = "sha256:c880eba5175f4307129784eca96f4e70b88e57aa3f680aeba3bab0e980b0f37d"}, + {file = "cryptography-41.0.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:004b6ccc95943f6a9ad3142cfabcc769d7ee38a3f60fb0dddbfb431f818c3a67"}, + {file = "cryptography-41.0.4-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:86defa8d248c3fa029da68ce61fe735432b047e32179883bdb1e79ed9bb8195e"}, + {file = "cryptography-41.0.4-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:37480760ae08065437e6573d14be973112c9e6dcaf5f11d00147ee74f37a3829"}, + {file = "cryptography-41.0.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b5f4dfe950ff0479f1f00eda09c18798d4f49b98f4e2006d644b3301682ebdca"}, + {file = "cryptography-41.0.4-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7e53db173370dea832190870e975a1e09c86a879b613948f09eb49324218c14d"}, + {file = "cryptography-41.0.4-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5b72205a360f3b6176485a333256b9bcd48700fc755fef51c8e7e67c4b63e3ac"}, + {file = "cryptography-41.0.4-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:93530900d14c37a46ce3d6c9e6fd35dbe5f5601bf6b3a5c325c7bffc030344d9"}, + {file = "cryptography-41.0.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:efc8ad4e6fc4f1752ebfb58aefece8b4e3c4cae940b0994d43649bdfce8d0d4f"}, + {file = "cryptography-41.0.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c3391bd8e6de35f6f1140e50aaeb3e2b3d6a9012536ca23ab0d9c35ec18c8a91"}, + {file = "cryptography-41.0.4-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:0d9409894f495d465fe6fda92cb70e8323e9648af912d5b9141d616df40a87b8"}, + {file = "cryptography-41.0.4-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8ac4f9ead4bbd0bc8ab2d318f97d85147167a488be0e08814a37eb2f439d5cf6"}, + {file = "cryptography-41.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:047c4603aeb4bbd8db2756e38f5b8bd7e94318c047cfe4efeb5d715e08b49311"}, + {file = "cryptography-41.0.4.tar.gz", hash = "sha256:7febc3094125fc126a7f6fb1f420d0da639f3f32cb15c8ff0dc3997c4549f51a"}, ] [package.dependencies] @@ -763,30 +782,30 @@ langdetect = ["langdetect"] [[package]] name = "debugpy" -version = "1.6.7" +version = "1.8.0" description = "An implementation of the Debug Adapter Protocol for Python" category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "debugpy-1.6.7-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b3e7ac809b991006ad7f857f016fa92014445085711ef111fdc3f74f66144096"}, - {file = "debugpy-1.6.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3876611d114a18aafef6383695dfc3f1217c98a9168c1aaf1a02b01ec7d8d1e"}, - {file = "debugpy-1.6.7-cp310-cp310-win32.whl", hash = "sha256:33edb4afa85c098c24cc361d72ba7c21bb92f501104514d4ffec1fb36e09c01a"}, - {file = "debugpy-1.6.7-cp310-cp310-win_amd64.whl", hash = "sha256:ed6d5413474e209ba50b1a75b2d9eecf64d41e6e4501977991cdc755dc83ab0f"}, - {file = "debugpy-1.6.7-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:38ed626353e7c63f4b11efad659be04c23de2b0d15efff77b60e4740ea685d07"}, - {file = "debugpy-1.6.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:279d64c408c60431c8ee832dfd9ace7c396984fd7341fa3116aee414e7dcd88d"}, - {file = "debugpy-1.6.7-cp37-cp37m-win32.whl", hash = "sha256:dbe04e7568aa69361a5b4c47b4493d5680bfa3a911d1e105fbea1b1f23f3eb45"}, - {file = "debugpy-1.6.7-cp37-cp37m-win_amd64.whl", hash = "sha256:f90a2d4ad9a035cee7331c06a4cf2245e38bd7c89554fe3b616d90ab8aab89cc"}, - {file = "debugpy-1.6.7-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:5224eabbbeddcf1943d4e2821876f3e5d7d383f27390b82da5d9558fd4eb30a9"}, - {file = "debugpy-1.6.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bae1123dff5bfe548ba1683eb972329ba6d646c3a80e6b4c06cd1b1dd0205e9b"}, - {file = "debugpy-1.6.7-cp38-cp38-win32.whl", hash = "sha256:9cd10cf338e0907fdcf9eac9087faa30f150ef5445af5a545d307055141dd7a4"}, - {file = "debugpy-1.6.7-cp38-cp38-win_amd64.whl", hash = "sha256:aaf6da50377ff4056c8ed470da24632b42e4087bc826845daad7af211e00faad"}, - {file = "debugpy-1.6.7-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:0679b7e1e3523bd7d7869447ec67b59728675aadfc038550a63a362b63029d2c"}, - {file = "debugpy-1.6.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de86029696e1b3b4d0d49076b9eba606c226e33ae312a57a46dca14ff370894d"}, - {file = "debugpy-1.6.7-cp39-cp39-win32.whl", hash = "sha256:d71b31117779d9a90b745720c0eab54ae1da76d5b38c8026c654f4a066b0130a"}, - {file = "debugpy-1.6.7-cp39-cp39-win_amd64.whl", hash = "sha256:c0ff93ae90a03b06d85b2c529eca51ab15457868a377c4cc40a23ab0e4e552a3"}, - {file = "debugpy-1.6.7-py2.py3-none-any.whl", hash = "sha256:53f7a456bc50706a0eaabecf2d3ce44c4d5010e46dfc65b6b81a518b42866267"}, - {file = "debugpy-1.6.7.zip", hash = "sha256:c4c2f0810fa25323abfdfa36cbbbb24e5c3b1a42cb762782de64439c575d67f2"}, + {file = "debugpy-1.8.0-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:7fb95ca78f7ac43393cd0e0f2b6deda438ec7c5e47fa5d38553340897d2fbdfb"}, + {file = "debugpy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef9ab7df0b9a42ed9c878afd3eaaff471fce3fa73df96022e1f5c9f8f8c87ada"}, + {file = "debugpy-1.8.0-cp310-cp310-win32.whl", hash = "sha256:a8b7a2fd27cd9f3553ac112f356ad4ca93338feadd8910277aff71ab24d8775f"}, + {file = "debugpy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:5d9de202f5d42e62f932507ee8b21e30d49aae7e46d5b1dd5c908db1d7068637"}, + {file = "debugpy-1.8.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:ef54404365fae8d45cf450d0544ee40cefbcb9cb85ea7afe89a963c27028261e"}, + {file = "debugpy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60009b132c91951354f54363f8ebdf7457aeb150e84abba5ae251b8e9f29a8a6"}, + {file = "debugpy-1.8.0-cp311-cp311-win32.whl", hash = "sha256:8cd0197141eb9e8a4566794550cfdcdb8b3db0818bdf8c49a8e8f8053e56e38b"}, + {file = "debugpy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:a64093656c4c64dc6a438e11d59369875d200bd5abb8f9b26c1f5f723622e153"}, + {file = "debugpy-1.8.0-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:b05a6b503ed520ad58c8dc682749113d2fd9f41ffd45daec16e558ca884008cd"}, + {file = "debugpy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c6fb41c98ec51dd010d7ed650accfd07a87fe5e93eca9d5f584d0578f28f35f"}, + {file = "debugpy-1.8.0-cp38-cp38-win32.whl", hash = "sha256:46ab6780159eeabb43c1495d9c84cf85d62975e48b6ec21ee10c95767c0590aa"}, + {file = "debugpy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:bdc5ef99d14b9c0fcb35351b4fbfc06ac0ee576aeab6b2511702e5a648a2e595"}, + {file = "debugpy-1.8.0-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:61eab4a4c8b6125d41a34bad4e5fe3d2cc145caecd63c3fe953be4cc53e65bf8"}, + {file = "debugpy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:125b9a637e013f9faac0a3d6a82bd17c8b5d2c875fb6b7e2772c5aba6d082332"}, + {file = "debugpy-1.8.0-cp39-cp39-win32.whl", hash = "sha256:57161629133113c97b387382045649a2b985a348f0c9366e22217c87b68b73c6"}, + {file = "debugpy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:e3412f9faa9ade82aa64a50b602544efcba848c91384e9f93497a458767e6926"}, + {file = "debugpy-1.8.0-py2.py3-none-any.whl", hash = "sha256:9c9b0ac1ce2a42888199df1a1906e45e6f3c9555497643a85e0bf2406e3ffbc4"}, + {file = "debugpy-1.8.0.zip", hash = "sha256:12af2c55b419521e33d5fb21bd022df0b5eb267c3e178f1d374a63a2a6bdccd0"}, ] [[package]] @@ -848,14 +867,14 @@ packaging = "*" [[package]] name = "dill" -version = "0.3.6" -description = "serialize all of python" +version = "0.3.7" +description = "serialize all of Python" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "dill-0.3.6-py3-none-any.whl", hash = "sha256:a07ffd2351b8c678dfc4a856a3005f8067aea51d6ba6c700796a4d9e280f39f0"}, - {file = "dill-0.3.6.tar.gz", hash = "sha256:e5db55f3687856d8fbdab002ed78544e1c4559a130302693d839dfe8f93f2373"}, + {file = "dill-0.3.7-py3-none-any.whl", hash = "sha256:76b122c08ef4ce2eedcd4d1abd8e641114bfc6c2867f49f3c41facf65bf19f5e"}, + {file = "dill-0.3.7.tar.gz", hash = "sha256:cc1c8b182eb3013e24bd475ff2e9295af86c1a38eb1aff128dac8962a9ce3c03"}, ] [package.extras] @@ -899,14 +918,14 @@ files = [ [[package]] name = "exceptiongroup" -version = "1.1.2" +version = "1.1.3" description = "Backport of PEP 654 (exception groups)" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.1.2-py3-none-any.whl", hash = "sha256:e346e69d186172ca7cf029c8c1d16235aa0e04035e5750b4b95039e65204328f"}, - {file = "exceptiongroup-1.1.2.tar.gz", hash = "sha256:12c3e887d6485d16943a309616de20ae5582633e0a2eda17f4e10fd61c1e8af5"}, + {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, + {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, ] [package.extras] @@ -914,29 +933,29 @@ test = ["pytest (>=6)"] [[package]] name = "executing" -version = "1.2.0" +version = "2.0.0" description = "Get the currently executing AST node of a frame, and other information" category = "main" optional = false python-versions = "*" files = [ - {file = "executing-1.2.0-py2.py3-none-any.whl", hash = "sha256:0314a69e37426e3608aada02473b4161d4caf5a4b244d1d0c48072b8fee7bacc"}, - {file = "executing-1.2.0.tar.gz", hash = "sha256:19da64c18d2d851112f09c287f8d3dbbdf725ab0e569077efb6cdcbd3497c107"}, + {file = "executing-2.0.0-py2.py3-none-any.whl", hash = "sha256:06df6183df67389625f4e763921c6cf978944721abf3e714000200aab95b0657"}, + {file = "executing-2.0.0.tar.gz", hash = "sha256:0ff053696fdeef426cda5bd18eacd94f82c91f49823a2e9090124212ceea9b08"}, ] [package.extras] -tests = ["asttokens", "littleutils", "pytest", "rich"] +tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] [[package]] name = "fastjsonschema" -version = "2.17.1" +version = "2.18.1" description = "Fastest Python implementation of JSON schema" category = "main" optional = false python-versions = "*" files = [ - {file = "fastjsonschema-2.17.1-py3-none-any.whl", hash = "sha256:4b90b252628ca695280924d863fe37234eebadc29c5360d322571233dc9746e0"}, - {file = "fastjsonschema-2.17.1.tar.gz", hash = "sha256:f4eeb8a77cef54861dbf7424ac8ce71306f12cbb086c45131bcba2c6a4f726e3"}, + {file = "fastjsonschema-2.18.1-py3-none-any.whl", hash = "sha256:aec6a19e9f66e9810ab371cc913ad5f4e9e479b63a7072a2cd060a9369e329a8"}, + {file = "fastjsonschema-2.18.1.tar.gz", hash = "sha256:06dc8680d937628e993fa0cd278f196d20449a1adc087640710846b324d422ea"}, ] [package.extras] @@ -944,20 +963,20 @@ devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benc [[package]] name = "flake8" -version = "6.0.0" +version = "6.1.0" description = "the modular source code checker: pep8 pyflakes and co" category = "dev" optional = false python-versions = ">=3.8.1" files = [ - {file = "flake8-6.0.0-py2.py3-none-any.whl", hash = "sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7"}, - {file = "flake8-6.0.0.tar.gz", hash = "sha256:c61007e76655af75e6785a931f452915b371dc48f56efd765247c8fe68f2b181"}, + {file = "flake8-6.1.0-py2.py3-none-any.whl", hash = "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5"}, + {file = "flake8-6.1.0.tar.gz", hash = "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23"}, ] [package.dependencies] mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.10.0,<2.11.0" -pyflakes = ">=3.0.0,<3.1.0" +pycodestyle = ">=2.11.0,<2.12.0" +pyflakes = ">=3.1.0,<3.2.0" [[package]] name = "flask" @@ -1012,14 +1031,14 @@ files = [ [[package]] name = "google-api-core" -version = "2.11.1" +version = "2.12.0" description = "Google API client core library" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-core-2.11.1.tar.gz", hash = "sha256:25d29e05a0058ed5f19c61c0a78b1b53adea4d9364b464d014fbda941f6d1c9a"}, - {file = "google_api_core-2.11.1-py3-none-any.whl", hash = "sha256:d92a5a92dc36dd4f4b9ee4e55528a90e432b059f93aee6ad857f9de8cc7ae94a"}, + {file = "google-api-core-2.12.0.tar.gz", hash = "sha256:c22e01b1e3c4dcd90998494879612c38d0a3411d1f7b679eb89e2abe3ce1f553"}, + {file = "google_api_core-2.12.0-py3-none-any.whl", hash = "sha256:ec6054f7d64ad13b41e43d96f735acbd763b0f3b695dabaa2d579673f6a6e160"}, ] [package.dependencies] @@ -1035,14 +1054,14 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-api-python-client" -version = "2.93.0" +version = "2.103.0" description = "Google API Client Library for Python" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-python-client-2.93.0.tar.gz", hash = "sha256:62ee28e96031a10a1c341f226a75ac6a4f16bdb1d888dc8222b2cdca133d0031"}, - {file = "google_api_python_client-2.93.0-py2.py3-none-any.whl", hash = "sha256:f34abb671afd488bd19d30721ea20fb30d3796ddd825d6f91f26d8c718a9f07d"}, + {file = "google-api-python-client-2.103.0.tar.gz", hash = "sha256:5b48dc23913b9a1b447991add03f27c335831559b5a870c522316eae671caf44"}, + {file = "google_api_python_client-2.103.0-py2.py3-none-any.whl", hash = "sha256:5d6cf80cc34598a85b73e7e689e6eb1ba34f342095aeab9ec408f94521382a7c"}, ] [package.dependencies] @@ -1054,22 +1073,20 @@ uritemplate = ">=3.0.1,<5" [[package]] name = "google-auth" -version = "2.22.0" +version = "2.23.3" description = "Google Authentication Library" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "google-auth-2.22.0.tar.gz", hash = "sha256:164cba9af4e6e4e40c3a4f90a1a6c12ee56f14c0b4868d1ca91b32826ab334ce"}, - {file = "google_auth-2.22.0-py2.py3-none-any.whl", hash = "sha256:d61d1b40897407b574da67da1a833bdc10d5a11642566e506565d1b1a46ba873"}, + {file = "google-auth-2.23.3.tar.gz", hash = "sha256:6864247895eea5d13b9c57c9e03abb49cb94ce2dc7c58e91cba3248c7477c9e3"}, + {file = "google_auth-2.23.3-py2.py3-none-any.whl", hash = "sha256:a8f4608e65c244ead9e0538f181a96c6e11199ec114d41f1d7b1bffa96937bda"}, ] [package.dependencies] cachetools = ">=2.0.0,<6.0" pyasn1-modules = ">=0.2.1" rsa = ">=3.1.4,<5" -six = ">=1.9.0" -urllib3 = "<2.0" [package.extras] aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] @@ -1080,20 +1097,19 @@ requests = ["requests (>=2.20.0,<3.0.0.dev0)"] [[package]] name = "google-auth-httplib2" -version = "0.1.0" +version = "0.1.1" description = "Google Authentication Library: httplib2 transport" category = "main" optional = false python-versions = "*" files = [ - {file = "google-auth-httplib2-0.1.0.tar.gz", hash = "sha256:a07c39fd632becacd3f07718dfd6021bf396978f03ad3ce4321d060015cc30ac"}, - {file = "google_auth_httplib2-0.1.0-py2.py3-none-any.whl", hash = "sha256:31e49c36c6b5643b57e82617cb3e021e3e1d2df9da63af67252c02fa9c1f4a10"}, + {file = "google-auth-httplib2-0.1.1.tar.gz", hash = "sha256:c64bc555fdc6dd788ea62ecf7bccffcf497bf77244887a3f3d7a5a02f8e3fc29"}, + {file = "google_auth_httplib2-0.1.1-py2.py3-none-any.whl", hash = "sha256:42c50900b8e4dcdf8222364d1f0efe32b8421fb6ed72f2613f12f75cc933478c"}, ] [package.dependencies] google-auth = "*" -httplib2 = ">=0.15.0" -six = "*" +httplib2 = ">=0.19.0" [[package]] name = "google-auth-oauthlib" @@ -1116,14 +1132,14 @@ tool = ["click (>=6.0.0)"] [[package]] name = "googleapis-common-protos" -version = "1.59.1" +version = "1.61.0" description = "Common protobufs used in Google APIs" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "googleapis-common-protos-1.59.1.tar.gz", hash = "sha256:b35d530fe825fb4227857bc47ad84c33c809ac96f312e13182bdeaa2abe1178a"}, - {file = "googleapis_common_protos-1.59.1-py2.py3-none-any.whl", hash = "sha256:0cbedb6fb68f1c07e18eb4c48256320777707e7d0c55063ae56c15db3224a61e"}, + {file = "googleapis-common-protos-1.61.0.tar.gz", hash = "sha256:8a64866a97f6304a7179873a465d6eee97b7a24ec6cfd78e0f575e96b821240b"}, + {file = "googleapis_common_protos-1.61.0-py2.py3-none-any.whl", hash = "sha256:22f1915393bb3245343f6efe87f6fe868532efc12aa26b391b15132e1279f1c0"}, ] [package.dependencies] @@ -1226,80 +1242,78 @@ vertica = ["sqlalchemy (>=1.3.18,<2.0.0)", "sqlalchemy-vertica-python (>=0.5.10) [[package]] name = "greenlet" -version = "2.0.2" +version = "3.0.0" description = "Lightweight in-process concurrent programming" category = "main" optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" -files = [ - {file = "greenlet-2.0.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:bdfea8c661e80d3c1c99ad7c3ff74e6e87184895bbaca6ee8cc61209f8b9b85d"}, - {file = "greenlet-2.0.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9d14b83fab60d5e8abe587d51c75b252bcc21683f24699ada8fb275d7712f5a9"}, - {file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"}, - {file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"}, - {file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"}, - {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d967650d3f56af314b72df7089d96cda1083a7fc2da05b375d2bc48c82ab3f3c"}, - {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"}, - {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"}, - {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"}, - {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d75209eed723105f9596807495d58d10b3470fa6732dd6756595e89925ce2470"}, - {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a51c9751078733d88e013587b108f1b7a1fb106d402fb390740f002b6f6551a"}, - {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"}, - {file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"}, - {file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"}, - {file = "greenlet-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d4606a527e30548153be1a9f155f4e283d109ffba663a15856089fb55f933e47"}, - {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"}, - {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"}, - {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"}, - {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:eff4eb9b7eb3e4d0cae3d28c283dc16d9bed6b193c2e1ace3ed86ce48ea8df19"}, - {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5454276c07d27a740c5892f4907c86327b632127dd9abec42ee62e12427ff7e3"}, - {file = "greenlet-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:7cafd1208fdbe93b67c7086876f061f660cfddc44f404279c1585bbf3cdc64c5"}, - {file = "greenlet-2.0.2-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:910841381caba4f744a44bf81bfd573c94e10b3045ee00de0cbf436fe50673a6"}, - {file = "greenlet-2.0.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:18a7f18b82b52ee85322d7a7874e676f34ab319b9f8cce5de06067384aa8ff43"}, - {file = "greenlet-2.0.2-cp35-cp35m-win32.whl", hash = "sha256:03a8f4f3430c3b3ff8d10a2a86028c660355ab637cee9333d63d66b56f09d52a"}, - {file = "greenlet-2.0.2-cp35-cp35m-win_amd64.whl", hash = "sha256:4b58adb399c4d61d912c4c331984d60eb66565175cdf4a34792cd9600f21b394"}, - {file = "greenlet-2.0.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:703f18f3fda276b9a916f0934d2fb6d989bf0b4fb5a64825260eb9bfd52d78f0"}, - {file = "greenlet-2.0.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:32e5b64b148966d9cccc2c8d35a671409e45f195864560829f395a54226408d3"}, - {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dd11f291565a81d71dab10b7033395b7a3a5456e637cf997a6f33ebdf06f8db"}, - {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0f72c9ddb8cd28532185f54cc1453f2c16fb417a08b53a855c4e6a418edd099"}, - {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd021c754b162c0fb55ad5d6b9d960db667faad0fa2ff25bb6e1301b0b6e6a75"}, - {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:3c9b12575734155d0c09d6c3e10dbd81665d5c18e1a7c6597df72fd05990c8cf"}, - {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b9ec052b06a0524f0e35bd8790686a1da006bd911dd1ef7d50b77bfbad74e292"}, - {file = "greenlet-2.0.2-cp36-cp36m-win32.whl", hash = "sha256:dbfcfc0218093a19c252ca8eb9aee3d29cfdcb586df21049b9d777fd32c14fd9"}, - {file = "greenlet-2.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:9f35ec95538f50292f6d8f2c9c9f8a3c6540bbfec21c9e5b4b751e0a7c20864f"}, - {file = "greenlet-2.0.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:d5508f0b173e6aa47273bdc0a0b5ba055b59662ba7c7ee5119528f466585526b"}, - {file = "greenlet-2.0.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:f82d4d717d8ef19188687aa32b8363e96062911e63ba22a0cff7802a8e58e5f1"}, - {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9c59a2120b55788e800d82dfa99b9e156ff8f2227f07c5e3012a45a399620b7"}, - {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2780572ec463d44c1d3ae850239508dbeb9fed38e294c68d19a24d925d9223ca"}, - {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:937e9020b514ceedb9c830c55d5c9872abc90f4b5862f89c0887033ae33c6f73"}, - {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:36abbf031e1c0f79dd5d596bfaf8e921c41df2bdf54ee1eed921ce1f52999a86"}, - {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:18e98fb3de7dba1c0a852731c3070cf022d14f0d68b4c87a19cc1016f3bb8b33"}, - {file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"}, - {file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"}, - {file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"}, - {file = "greenlet-2.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1087300cf9700bbf455b1b97e24db18f2f77b55302a68272c56209d5587c12d1"}, - {file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"}, - {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"}, - {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"}, - {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acd2162a36d3de67ee896c43effcd5ee3de247eb00354db411feb025aa319857"}, - {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0bf60faf0bc2468089bdc5edd10555bab6e85152191df713e2ab1fcc86382b5a"}, - {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"}, - {file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"}, - {file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"}, - {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8512a0c38cfd4e66a858ddd1b17705587900dd760c6003998e9472b77b56d417"}, - {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"}, - {file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"}, - {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"}, - {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be4ed120b52ae4d974aa40215fcdfde9194d63541c7ded40ee12eb4dda57b76b"}, - {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94c817e84245513926588caf1152e3b559ff794d505555211ca041f032abbb6b"}, - {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1a819eef4b0e0b96bb0d98d797bef17dc1b4a10e8d7446be32d1da33e095dbb8"}, - {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7efde645ca1cc441d6dc4b48c0f7101e8d86b54c8530141b09fd31cef5149ec9"}, - {file = "greenlet-2.0.2-cp39-cp39-win32.whl", hash = "sha256:ea9872c80c132f4663822dd2a08d404073a5a9b5ba6155bea72fb2a79d1093b5"}, - {file = "greenlet-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:db1a39669102a1d8d12b57de2bb7e2ec9066a6f2b3da35ae511ff93b01b5d564"}, - {file = "greenlet-2.0.2.tar.gz", hash = "sha256:e7c8dc13af7db097bed64a051d2dd49e9f0af495c26995c00a9ee842690d34c0"}, +python-versions = ">=3.7" +files = [ + {file = "greenlet-3.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e09dea87cc91aea5500262993cbd484b41edf8af74f976719dd83fe724644cd6"}, + {file = "greenlet-3.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f47932c434a3c8d3c86d865443fadc1fbf574e9b11d6650b656e602b1797908a"}, + {file = "greenlet-3.0.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bdfaeecf8cc705d35d8e6de324bf58427d7eafb55f67050d8f28053a3d57118c"}, + {file = "greenlet-3.0.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6a68d670c8f89ff65c82b936275369e532772eebc027c3be68c6b87ad05ca695"}, + {file = "greenlet-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ad562a104cd41e9d4644f46ea37167b93190c6d5e4048fcc4b80d34ecb278f"}, + {file = "greenlet-3.0.0-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:02a807b2a58d5cdebb07050efe3d7deaf915468d112dfcf5e426d0564aa3aa4a"}, + {file = "greenlet-3.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b1660a15a446206c8545edc292ab5c48b91ff732f91b3d3b30d9a915d5ec4779"}, + {file = "greenlet-3.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:813720bd57e193391dfe26f4871186cf460848b83df7e23e6bef698a7624b4c9"}, + {file = "greenlet-3.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:aa15a2ec737cb609ed48902b45c5e4ff6044feb5dcdfcf6fa8482379190330d7"}, + {file = "greenlet-3.0.0-cp310-universal2-macosx_11_0_x86_64.whl", hash = "sha256:7709fd7bb02b31908dc8fd35bfd0a29fc24681d5cc9ac1d64ad07f8d2b7db62f"}, + {file = "greenlet-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:211ef8d174601b80e01436f4e6905aca341b15a566f35a10dd8d1e93f5dbb3b7"}, + {file = "greenlet-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6512592cc49b2c6d9b19fbaa0312124cd4c4c8a90d28473f86f92685cc5fef8e"}, + {file = "greenlet-3.0.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:871b0a8835f9e9d461b7fdaa1b57e3492dd45398e87324c047469ce2fc9f516c"}, + {file = "greenlet-3.0.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b505fcfc26f4148551826a96f7317e02c400665fa0883fe505d4fcaab1dabfdd"}, + {file = "greenlet-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:123910c58234a8d40eaab595bc56a5ae49bdd90122dde5bdc012c20595a94c14"}, + {file = "greenlet-3.0.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:96d9ea57292f636ec851a9bb961a5cc0f9976900e16e5d5647f19aa36ba6366b"}, + {file = "greenlet-3.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0b72b802496cccbd9b31acea72b6f87e7771ccfd7f7927437d592e5c92ed703c"}, + {file = "greenlet-3.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:527cd90ba3d8d7ae7dceb06fda619895768a46a1b4e423bdb24c1969823b8362"}, + {file = "greenlet-3.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:37f60b3a42d8b5499be910d1267b24355c495064f271cfe74bf28b17b099133c"}, + {file = "greenlet-3.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1482fba7fbed96ea7842b5a7fc11d61727e8be75a077e603e8ab49d24e234383"}, + {file = "greenlet-3.0.0-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:be557119bf467d37a8099d91fbf11b2de5eb1fd5fc5b91598407574848dc910f"}, + {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:73b2f1922a39d5d59cc0e597987300df3396b148a9bd10b76a058a2f2772fc04"}, + {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1e22c22f7826096ad503e9bb681b05b8c1f5a8138469b255eb91f26a76634f2"}, + {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1d363666acc21d2c204dd8705c0e0457d7b2ee7a76cb16ffc099d6799744ac99"}, + {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:334ef6ed8337bd0b58bb0ae4f7f2dcc84c9f116e474bb4ec250a8bb9bd797a66"}, + {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6672fdde0fd1a60b44fb1751a7779c6db487e42b0cc65e7caa6aa686874e79fb"}, + {file = "greenlet-3.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:952256c2bc5b4ee8df8dfc54fc4de330970bf5d79253c863fb5e6761f00dda35"}, + {file = "greenlet-3.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:269d06fa0f9624455ce08ae0179430eea61085e3cf6457f05982b37fd2cefe17"}, + {file = "greenlet-3.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:9adbd8ecf097e34ada8efde9b6fec4dd2a903b1e98037adf72d12993a1c80b51"}, + {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6b5ce7f40f0e2f8b88c28e6691ca6806814157ff05e794cdd161be928550f4c"}, + {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecf94aa539e97a8411b5ea52fc6ccd8371be9550c4041011a091eb8b3ca1d810"}, + {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80dcd3c938cbcac986c5c92779db8e8ce51a89a849c135172c88ecbdc8c056b7"}, + {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e52a712c38e5fb4fd68e00dc3caf00b60cb65634d50e32281a9d6431b33b4af1"}, + {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d5539f6da3418c3dc002739cb2bb8d169056aa66e0c83f6bacae0cd3ac26b423"}, + {file = "greenlet-3.0.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:343675e0da2f3c69d3fb1e894ba0a1acf58f481f3b9372ce1eb465ef93cf6fed"}, + {file = "greenlet-3.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:abe1ef3d780de56defd0c77c5ba95e152f4e4c4e12d7e11dd8447d338b85a625"}, + {file = "greenlet-3.0.0-cp37-cp37m-win32.whl", hash = "sha256:e693e759e172fa1c2c90d35dea4acbdd1d609b6936115d3739148d5e4cd11947"}, + {file = "greenlet-3.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:bdd696947cd695924aecb3870660b7545a19851f93b9d327ef8236bfc49be705"}, + {file = "greenlet-3.0.0-cp37-universal2-macosx_11_0_x86_64.whl", hash = "sha256:cc3e2679ea13b4de79bdc44b25a0c4fcd5e94e21b8f290791744ac42d34a0353"}, + {file = "greenlet-3.0.0-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:63acdc34c9cde42a6534518e32ce55c30f932b473c62c235a466469a710bfbf9"}, + {file = "greenlet-3.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a1a6244ff96343e9994e37e5b4839f09a0207d35ef6134dce5c20d260d0302c"}, + {file = "greenlet-3.0.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b822fab253ac0f330ee807e7485769e3ac85d5eef827ca224feaaefa462dc0d0"}, + {file = "greenlet-3.0.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8060b32d8586e912a7b7dac2d15b28dbbd63a174ab32f5bc6d107a1c4143f40b"}, + {file = "greenlet-3.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:621fcb346141ae08cb95424ebfc5b014361621b8132c48e538e34c3c93ac7365"}, + {file = "greenlet-3.0.0-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6bb36985f606a7c49916eff74ab99399cdfd09241c375d5a820bb855dfb4af9f"}, + {file = "greenlet-3.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10b5582744abd9858947d163843d323d0b67be9432db50f8bf83031032bc218d"}, + {file = "greenlet-3.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f351479a6914fd81a55c8e68963609f792d9b067fb8a60a042c585a621e0de4f"}, + {file = "greenlet-3.0.0-cp38-cp38-win32.whl", hash = "sha256:9de687479faec7db5b198cc365bc34addd256b0028956501f4d4d5e9ca2e240a"}, + {file = "greenlet-3.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:3fd2b18432e7298fcbec3d39e1a0aa91ae9ea1c93356ec089421fabc3651572b"}, + {file = "greenlet-3.0.0-cp38-universal2-macosx_11_0_x86_64.whl", hash = "sha256:3c0d36f5adc6e6100aedbc976d7428a9f7194ea79911aa4bf471f44ee13a9464"}, + {file = "greenlet-3.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4cd83fb8d8e17633ad534d9ac93719ef8937568d730ef07ac3a98cb520fd93e4"}, + {file = "greenlet-3.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a5b2d4cdaf1c71057ff823a19d850ed5c6c2d3686cb71f73ae4d6382aaa7a06"}, + {file = "greenlet-3.0.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e7dcdfad252f2ca83c685b0fa9fba00e4d8f243b73839229d56ee3d9d219314"}, + {file = "greenlet-3.0.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c94e4e924d09b5a3e37b853fe5924a95eac058cb6f6fb437ebb588b7eda79870"}, + {file = "greenlet-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad6fb737e46b8bd63156b8f59ba6cdef46fe2b7db0c5804388a2d0519b8ddb99"}, + {file = "greenlet-3.0.0-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d55db1db455c59b46f794346efce896e754b8942817f46a1bada2d29446e305a"}, + {file = "greenlet-3.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:56867a3b3cf26dc8a0beecdb4459c59f4c47cdd5424618c08515f682e1d46692"}, + {file = "greenlet-3.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9a812224a5fb17a538207e8cf8e86f517df2080c8ee0f8c1ed2bdaccd18f38f4"}, + {file = "greenlet-3.0.0-cp39-cp39-win32.whl", hash = "sha256:0d3f83ffb18dc57243e0151331e3c383b05e5b6c5029ac29f754745c800f8ed9"}, + {file = "greenlet-3.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:831d6f35037cf18ca5e80a737a27d822d87cd922521d18ed3dbc8a6967be50ce"}, + {file = "greenlet-3.0.0-cp39-universal2-macosx_11_0_x86_64.whl", hash = "sha256:a048293392d4e058298710a54dfaefcefdf49d287cd33fb1f7d63d55426e4355"}, + {file = "greenlet-3.0.0.tar.gz", hash = "sha256:19834e3f91f485442adc1ee440171ec5d9a4840a1f7bd5ed97833544719ce10b"}, ] [package.extras] -docs = ["Sphinx", "docutils (<0.18)"] +docs = ["Sphinx"] test = ["objgraph", "psutil"] [[package]] @@ -1413,14 +1427,14 @@ tests = ["pytest", "pytest-cov", "pytest-mock"] [[package]] name = "ipykernel" -version = "6.24.0" +version = "6.25.2" description = "IPython Kernel for Jupyter" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "ipykernel-6.24.0-py3-none-any.whl", hash = "sha256:2f5fffc7ad8f1fd5aadb4e171ba9129d9668dbafa374732cf9511ada52d6547f"}, - {file = "ipykernel-6.24.0.tar.gz", hash = "sha256:29cea0a716b1176d002a61d0b0c851f34536495bc4ef7dd0222c88b41b816123"}, + {file = "ipykernel-6.25.2-py3-none-any.whl", hash = "sha256:2e2ee359baba19f10251b99415bb39de1e97d04e1fab385646f24f0596510b77"}, + {file = "ipykernel-6.25.2.tar.gz", hash = "sha256:f468ddd1f17acb48c8ce67fcfa49ba6d46d4f9ac0438c1f441be7c3d1372230b"}, ] [package.dependencies] @@ -1447,14 +1461,14 @@ test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio" [[package]] name = "ipython" -version = "8.14.0" +version = "8.16.1" description = "IPython: Productive Interactive Computing" category = "main" optional = false python-versions = ">=3.9" files = [ - {file = "ipython-8.14.0-py3-none-any.whl", hash = "sha256:248aca623f5c99a6635bc3857677b7320b9b8039f99f070ee0d20a5ca5a8e6bf"}, - {file = "ipython-8.14.0.tar.gz", hash = "sha256:1d197b907b6ba441b692c48cf2a3a2de280dc0ac91a3405b39349a50272ca0a1"}, + {file = "ipython-8.16.1-py3-none-any.whl", hash = "sha256:0852469d4d579d9cd613c220af7bf0c9cc251813e12be647cb9d463939db9b1e"}, + {file = "ipython-8.16.1.tar.gz", hash = "sha256:ad52f58fca8f9f848e256c629eff888efc0528c12fe0f8ec14f33205f23ef938"}, ] [package.dependencies] @@ -1462,6 +1476,7 @@ appnope = {version = "*", markers = "sys_platform == \"darwin\""} backcall = "*" colorama = {version = "*", markers = "sys_platform == \"win32\""} decorator = "*" +exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} jedi = ">=0.16" matplotlib-inline = "*" pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} @@ -1473,9 +1488,9 @@ traitlets = ">=5" typing-extensions = {version = "*", markers = "python_version < \"3.10\""} [package.extras] -all = ["black", "curio", "docrepr", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.21)", "pandas", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] +all = ["black", "curio", "docrepr", "exceptiongroup", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.21)", "pandas", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] black = ["black"] -doc = ["docrepr", "ipykernel", "matplotlib", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] +doc = ["docrepr", "exceptiongroup", "ipykernel", "matplotlib", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] kernel = ["ipykernel"] nbconvert = ["nbconvert"] nbformat = ["nbformat"] @@ -1485,36 +1500,24 @@ qtconsole = ["qtconsole"] test = ["pytest (<7.1)", "pytest-asyncio", "testpath"] test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pandas", "pytest (<7.1)", "pytest-asyncio", "testpath", "trio"] -[[package]] -name = "ipython-genutils" -version = "0.2.0" -description = "Vestigial utilities from IPython" -category = "main" -optional = false -python-versions = "*" -files = [ - {file = "ipython_genutils-0.2.0-py2.py3-none-any.whl", hash = "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8"}, - {file = "ipython_genutils-0.2.0.tar.gz", hash = "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"}, -] - [[package]] name = "ipywidgets" -version = "8.0.7" +version = "8.1.1" description = "Jupyter interactive widgets" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "ipywidgets-8.0.7-py3-none-any.whl", hash = "sha256:e0aed0c95a1e55b6a123f64305245578bdc09e52965a34941c2b6a578b8c64a0"}, - {file = "ipywidgets-8.0.7.tar.gz", hash = "sha256:50ace0a8886e9a0d68b980db82f94c25d55d21ff2340ed36f802dd9365e94acf"}, + {file = "ipywidgets-8.1.1-py3-none-any.whl", hash = "sha256:2b88d728656aea3bbfd05d32c747cfd0078f9d7e159cf982433b58ad717eed7f"}, + {file = "ipywidgets-8.1.1.tar.gz", hash = "sha256:40211efb556adec6fa450ccc2a77d59ca44a060f4f9f136833df59c9f538e6e8"}, ] [package.dependencies] -ipykernel = ">=4.5.1" +comm = ">=0.1.3" ipython = ">=6.1.0" -jupyterlab-widgets = ">=3.0.7,<3.1.0" +jupyterlab-widgets = ">=3.0.9,<3.1.0" traitlets = ">=4.3.1" -widgetsnbextension = ">=4.0.7,<4.1.0" +widgetsnbextension = ">=4.0.9,<4.1.0" [package.extras] test = ["ipykernel", "jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"] @@ -1581,23 +1584,23 @@ files = [ [[package]] name = "jedi" -version = "0.18.2" +version = "0.19.1" description = "An autocompletion tool for Python that can be used for text editors." category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "jedi-0.18.2-py2.py3-none-any.whl", hash = "sha256:203c1fd9d969ab8f2119ec0a3342e0b49910045abe6af0a3ae83a5764d54639e"}, - {file = "jedi-0.18.2.tar.gz", hash = "sha256:bae794c30d07f6d910d32a7048af09b5a39ed740918da923c6b780790ebac612"}, + {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"}, + {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"}, ] [package.dependencies] -parso = ">=0.8.0,<0.9.0" +parso = ">=0.8.3,<0.9.0" [package.extras] docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] -qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] -testing = ["Django (<3.1)", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] +qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] +testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] [[package]] name = "jeepney" @@ -1633,6 +1636,21 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] +[[package]] +name = "json5" +version = "0.9.14" +description = "A Python implementation of the JSON5 data format." +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "json5-0.9.14-py2.py3-none-any.whl", hash = "sha256:740c7f1b9e584a468dbb2939d8d458db3427f2c93ae2139d05f47e453eae964f"}, + {file = "json5-0.9.14.tar.gz", hash = "sha256:9ed66c3a6ca3510a976a9ef9b8c0787de24802724ab1860bc0153c7fdd589b02"}, +] + +[package.extras] +dev = ["hypothesis"] + [[package]] name = "jsonpatch" version = "1.33" @@ -1662,14 +1680,14 @@ files = [ [[package]] name = "jsonschema" -version = "4.18.3" +version = "4.19.1" description = "An implementation of JSON Schema validation for Python" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "jsonschema-4.18.3-py3-none-any.whl", hash = "sha256:aab78b34c2de001c6b692232f08c21a97b436fe18e0b817bf0511046924fceef"}, - {file = "jsonschema-4.18.3.tar.gz", hash = "sha256:64b7104d72efe856bea49ca4af37a14a9eba31b40bb7238179f3803130fd34d9"}, + {file = "jsonschema-4.19.1-py3-none-any.whl", hash = "sha256:cd5f1f9ed9444e554b38ba003af06c0a8c2868131e56bfbef0550fb450c0330e"}, + {file = "jsonschema-4.19.1.tar.gz", hash = "sha256:ec84cc37cfa703ef7cd4928db24f9cb31428a5d0fa77747b8b51a847458e0bbf"}, ] [package.dependencies] @@ -1692,14 +1710,14 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- [[package]] name = "jsonschema-specifications" -version = "2023.6.1" +version = "2023.7.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "jsonschema_specifications-2023.6.1-py3-none-any.whl", hash = "sha256:3d2b82663aff01815f744bb5c7887e2121a63399b49b104a3c96145474d091d7"}, - {file = "jsonschema_specifications-2023.6.1.tar.gz", hash = "sha256:ca1c4dd059a9e7b34101cf5b3ab7ff1d18b139f35950d598d629837ef66e8f28"}, + {file = "jsonschema_specifications-2023.7.1-py3-none-any.whl", hash = "sha256:05adf340b659828a004220a9613be00fa3f223f2b82002e273dee62fd50524b1"}, + {file = "jsonschema_specifications-2023.7.1.tar.gz", hash = "sha256:c91a50404e88a1f6ba40636778e2ee08f6e24c5613fe4c53ac24578a5a7f72bb"}, ] [package.dependencies] @@ -1707,14 +1725,14 @@ referencing = ">=0.28.0" [[package]] name = "jupyter-client" -version = "8.3.0" +version = "8.4.0" description = "Jupyter protocol implementation and client libraries" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_client-8.3.0-py3-none-any.whl", hash = "sha256:7441af0c0672edc5d28035e92ba5e32fadcfa8a4e608a434c228836a89df6158"}, - {file = "jupyter_client-8.3.0.tar.gz", hash = "sha256:3af69921fe99617be1670399a0b857ad67275eefcfa291e2c81a160b7b650f5f"}, + {file = "jupyter_client-8.4.0-py3-none-any.whl", hash = "sha256:6a2a950ec23a8f62f9e4c66acec7f0ea6c7d1f80ba0992e747b10c56ce2e6dbe"}, + {file = "jupyter_client-8.4.0.tar.gz", hash = "sha256:dc1b857d5d7d76ac101766c6e9b646bf18742721126e72e5d484c75a993cada2"}, ] [package.dependencies] @@ -1731,14 +1749,14 @@ test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pyt [[package]] name = "jupyter-core" -version = "5.3.1" +version = "5.4.0" description = "Jupyter core package. A base package on which Jupyter projects rely." category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_core-5.3.1-py3-none-any.whl", hash = "sha256:ae9036db959a71ec1cac33081eeb040a79e681f08ab68b0883e9a676c7a90dce"}, - {file = "jupyter_core-5.3.1.tar.gz", hash = "sha256:5ba5c7938a7f97a6b0481463f7ff0dbac7c15ba48cf46fa4035ca6e838aa1aba"}, + {file = "jupyter_core-5.4.0-py3-none-any.whl", hash = "sha256:66e252f675ac04dcf2feb6ed4afb3cd7f68cf92f483607522dc251f32d471571"}, + {file = "jupyter_core-5.4.0.tar.gz", hash = "sha256:e4b98344bb94ee2e3e6c4519a97d001656009f9cb2b7f2baf15b3c205770011d"}, ] [package.dependencies] @@ -1752,20 +1770,21 @@ test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] [[package]] name = "jupyter-events" -version = "0.6.3" +version = "0.7.0" description = "Jupyter Event System library" category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "jupyter_events-0.6.3-py3-none-any.whl", hash = "sha256:57a2749f87ba387cd1bfd9b22a0875b889237dbf2edc2121ebb22bde47036c17"}, - {file = "jupyter_events-0.6.3.tar.gz", hash = "sha256:9a6e9995f75d1b7146b436ea24d696ce3a35bfa8bfe45e0c33c334c79464d0b3"}, + {file = "jupyter_events-0.7.0-py3-none-any.whl", hash = "sha256:4753da434c13a37c3f3c89b500afa0c0a6241633441421f6adafe2fb2e2b924e"}, + {file = "jupyter_events-0.7.0.tar.gz", hash = "sha256:7be27f54b8388c03eefea123a4f79247c5b9381c49fb1cd48615ee191eb12615"}, ] [package.dependencies] -jsonschema = {version = ">=3.2.0", extras = ["format-nongpl"]} +jsonschema = {version = ">=4.18.0", extras = ["format-nongpl"]} python-json-logger = ">=2.0.4" pyyaml = ">=5.3" +referencing = "*" rfc3339-validator = "*" rfc3986-validator = ">=0.1.1" traitlets = ">=5.3" @@ -1773,18 +1792,34 @@ traitlets = ">=5.3" [package.extras] cli = ["click", "rich"] docs = ["jupyterlite-sphinx", "myst-parser", "pydata-sphinx-theme", "sphinxcontrib-spelling"] -test = ["click", "coverage", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.19.0)", "pytest-console-scripts", "pytest-cov", "rich"] +test = ["click", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.19.0)", "pytest-console-scripts", "rich"] + +[[package]] +name = "jupyter-lsp" +version = "2.2.0" +description = "Multi-Language Server WebSocket proxy for Jupyter Notebook/Lab server" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter-lsp-2.2.0.tar.gz", hash = "sha256:8ebbcb533adb41e5d635eb8fe82956b0aafbf0fd443b6c4bfa906edeeb8635a1"}, + {file = "jupyter_lsp-2.2.0-py3-none-any.whl", hash = "sha256:9e06b8b4f7dd50300b70dd1a78c0c3b0c3d8fa68e0f2d8a5d1fbab62072aca3f"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} +jupyter-server = ">=1.1.2" [[package]] name = "jupyter-server" -version = "2.7.0" +version = "2.7.3" description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_server-2.7.0-py3-none-any.whl", hash = "sha256:6a77912aff643e53fa14bdb2634884b52b784a4be77ce8e93f7283faed0f0849"}, - {file = "jupyter_server-2.7.0.tar.gz", hash = "sha256:36da0a266d31a41ac335a366c88933c17dfa5bb817a48f5c02c16d303bc9477f"}, + {file = "jupyter_server-2.7.3-py3-none-any.whl", hash = "sha256:8e4b90380b59d7a1e31086c4692231f2a2ea4cb269f5516e60aba72ce8317fc9"}, + {file = "jupyter_server-2.7.3.tar.gz", hash = "sha256:d4916c8581c4ebbc534cebdaa8eca2478d9f3bfdd88eae29fcab0120eac57649"}, ] [package.dependencies] @@ -1802,7 +1837,7 @@ packaging = "*" prometheus-client = "*" pywinpty = {version = "*", markers = "os_name == \"nt\""} pyzmq = ">=24" -send2trash = "*" +send2trash = ">=1.8.2" terminado = ">=0.8.3" tornado = ">=6.2.0" traitlets = ">=5.6.0" @@ -1832,6 +1867,39 @@ terminado = ">=0.8.3" docs = ["jinja2", "jupyter-server", "mistune (<3.0)", "myst-parser", "nbformat", "packaging", "pydata-sphinx-theme", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxcontrib-spelling", "sphinxemoji", "tornado"] test = ["coverage", "jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-cov", "pytest-jupyter[server] (>=0.5.3)", "pytest-timeout"] +[[package]] +name = "jupyterlab" +version = "4.0.7" +description = "JupyterLab computational environment" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyterlab-4.0.7-py3-none-any.whl", hash = "sha256:08683045117cc495531fdb39c22ababb9aaac6977a45e67cfad20046564c9c7c"}, + {file = "jupyterlab-4.0.7.tar.gz", hash = "sha256:48792efd9f962b2bcda1f87d72168ff122c288b1d97d32109e4a11b33dc862be"}, +] + +[package.dependencies] +async-lru = ">=1.0.0" +importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} +ipykernel = "*" +jinja2 = ">=3.0.3" +jupyter-core = "*" +jupyter-lsp = ">=2.0.0" +jupyter-server = ">=2.4.0,<3" +jupyterlab-server = ">=2.19.0,<3" +notebook-shim = ">=0.2" +packaging = "*" +tomli = {version = "*", markers = "python_version < \"3.11\""} +tornado = ">=6.2.0" +traitlets = "*" + +[package.extras] +dev = ["black[jupyter] (==23.7.0)", "build", "bump2version", "coverage", "hatch", "pre-commit", "pytest-cov", "ruff (==0.0.286)"] +docs = ["jsx-lexer", "myst-parser", "pydata-sphinx-theme (>=0.13.0)", "pytest", "pytest-check-links", "pytest-tornasync", "sphinx (>=1.8,<7.2.0)", "sphinx-copybutton"] +docs-screenshots = ["altair (==5.0.1)", "ipython (==8.14.0)", "ipywidgets (==8.0.6)", "jupyterlab-geojson (==3.4.0)", "jupyterlab-language-pack-zh-cn (==4.0.post0)", "matplotlib (==3.7.1)", "nbconvert (>=7.0.0)", "pandas (==2.0.2)", "scipy (==1.10.1)", "vega-datasets (==0.9.0)"] +test = ["coverage", "pytest (>=7.0)", "pytest-check-links (>=0.7)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter (>=0.5.3)", "pytest-timeout", "pytest-tornasync", "requests", "requests-cache", "virtualenv"] + [[package]] name = "jupyterlab-pygments" version = "0.2.2" @@ -1844,16 +1912,43 @@ files = [ {file = "jupyterlab_pygments-0.2.2.tar.gz", hash = "sha256:7405d7fde60819d905a9fa8ce89e4cd830e318cdad22a0030f7a901da705585d"}, ] +[[package]] +name = "jupyterlab-server" +version = "2.25.0" +description = "A set of server components for JupyterLab and JupyterLab like applications." +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyterlab_server-2.25.0-py3-none-any.whl", hash = "sha256:c9f67a98b295c5dee87f41551b0558374e45d449f3edca153dd722140630dcb2"}, + {file = "jupyterlab_server-2.25.0.tar.gz", hash = "sha256:77c2f1f282d610f95e496e20d5bf1d2a7706826dfb7b18f3378ae2870d272fb7"}, +] + +[package.dependencies] +babel = ">=2.10" +importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} +jinja2 = ">=3.0.3" +json5 = ">=0.9.0" +jsonschema = ">=4.18.0" +jupyter-server = ">=1.21,<3" +packaging = ">=21.3" +requests = ">=2.31" + +[package.extras] +docs = ["autodoc-traits", "jinja2 (<3.2.0)", "mistune (<4)", "myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-copybutton", "sphinxcontrib-openapi (>0.8)"] +openapi = ["openapi-core (>=0.18.0,<0.19.0)", "ruamel-yaml"] +test = ["hatch", "ipykernel", "openapi-core (>=0.18.0,<0.19.0)", "openapi-spec-validator (>=0.6.0,<0.7.0)", "pytest (>=7.0)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter[server] (>=0.6.2)", "pytest-timeout", "requests-mock", "ruamel-yaml", "sphinxcontrib-spelling", "strict-rfc3339", "werkzeug"] + [[package]] name = "jupyterlab-widgets" -version = "3.0.8" +version = "3.0.9" description = "Jupyter interactive widgets for JupyterLab" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "jupyterlab_widgets-3.0.8-py3-none-any.whl", hash = "sha256:4715912d6ceab839c9db35953c764b3214ebbc9161c809f6e0510168845dfdf5"}, - {file = "jupyterlab_widgets-3.0.8.tar.gz", hash = "sha256:d428ab97b8d87cc7c54cbf37644d6e0f0e662f23876e05fa460a73ec3257252a"}, + {file = "jupyterlab_widgets-3.0.9-py3-none-any.whl", hash = "sha256:3cf5bdf5b897bf3bccf1c11873aa4afd776d7430200f765e0686bd352487b58d"}, + {file = "jupyterlab_widgets-3.0.9.tar.gz", hash = "sha256:6005a4e974c7beee84060fdfba341a3218495046de8ae3ec64888e5fe19fdb4c"}, ] [[package]] @@ -2007,23 +2102,23 @@ files = [ [[package]] name = "marshmallow" -version = "3.19.0" +version = "3.20.1" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "marshmallow-3.19.0-py3-none-any.whl", hash = "sha256:93f0958568da045b0021ec6aeb7ac37c81bfcccbb9a0e7ed8559885070b3a19b"}, - {file = "marshmallow-3.19.0.tar.gz", hash = "sha256:90032c0fd650ce94b6ec6dc8dfeb0e3ff50c144586462c389b81a07205bedb78"}, + {file = "marshmallow-3.20.1-py3-none-any.whl", hash = "sha256:684939db93e80ad3561392f47be0230743131560a41c5110684c16e21ade0a5c"}, + {file = "marshmallow-3.20.1.tar.gz", hash = "sha256:5d2371bbe42000f2b3fb5eaa065224df7d8f8597bc19a1bbfa5bfe7fba8da889"}, ] [package.dependencies] packaging = ">=17.0" [package.extras] -dev = ["flake8 (==5.0.4)", "flake8-bugbear (==22.10.25)", "mypy (==0.990)", "pre-commit (>=2.4,<3.0)", "pytest", "pytz", "simplejson", "tox"] -docs = ["alabaster (==0.7.12)", "autodocsumm (==0.2.9)", "sphinx (==5.3.0)", "sphinx-issues (==3.0.1)", "sphinx-version-warning (==1.1.2)"] -lint = ["flake8 (==5.0.4)", "flake8-bugbear (==22.10.25)", "mypy (==0.990)", "pre-commit (>=2.4,<3.0)"] +dev = ["flake8 (==6.0.0)", "flake8-bugbear (==23.7.10)", "mypy (==1.4.1)", "pre-commit (>=2.4,<4.0)", "pytest", "pytz", "simplejson", "tox"] +docs = ["alabaster (==0.7.13)", "autodocsumm (==0.2.11)", "sphinx (==7.0.1)", "sphinx-issues (==3.0.1)", "sphinx-version-warning (==1.1.2)"] +lint = ["flake8 (==6.0.0)", "flake8-bugbear (==23.7.10)", "mypy (==1.4.1)", "pre-commit (>=2.4,<4.0)"] tests = ["pytest", "pytz", "simplejson"] [[package]] @@ -2055,50 +2150,51 @@ files = [ [[package]] name = "mistune" -version = "3.0.1" +version = "3.0.2" description = "A sane and fast Markdown parser with useful plugins and renderers" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "mistune-3.0.1-py3-none-any.whl", hash = "sha256:b9b3e438efbb57c62b5beb5e134dab664800bdf1284a7ee09e8b12b13eb1aac6"}, - {file = "mistune-3.0.1.tar.gz", hash = "sha256:e912116c13aa0944f9dc530db38eb88f6a77087ab128f49f84a48f4c05ea163c"}, + {file = "mistune-3.0.2-py3-none-any.whl", hash = "sha256:71481854c30fdbc938963d3605b72501f5c10a9320ecd412c121c163a1c7d205"}, + {file = "mistune-3.0.2.tar.gz", hash = "sha256:fc7f93ded930c92394ef2cb6f04a8aabab4117a91449e72dcc8dfa646a508be8"}, ] [[package]] name = "mypy" -version = "1.4.1" +version = "1.6.0" description = "Optional static typing for Python" category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "mypy-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:566e72b0cd6598503e48ea610e0052d1b8168e60a46e0bfd34b3acf2d57f96a8"}, - {file = "mypy-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ca637024ca67ab24a7fd6f65d280572c3794665eaf5edcc7e90a866544076878"}, - {file = "mypy-1.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dde1d180cd84f0624c5dcaaa89c89775550a675aff96b5848de78fb11adabcd"}, - {file = "mypy-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8c4d8e89aa7de683e2056a581ce63c46a0c41e31bd2b6d34144e2c80f5ea53dc"}, - {file = "mypy-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:bfdca17c36ae01a21274a3c387a63aa1aafe72bff976522886869ef131b937f1"}, - {file = "mypy-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7549fbf655e5825d787bbc9ecf6028731973f78088fbca3a1f4145c39ef09462"}, - {file = "mypy-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:98324ec3ecf12296e6422939e54763faedbfcc502ea4a4c38502082711867258"}, - {file = "mypy-1.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:141dedfdbfe8a04142881ff30ce6e6653c9685b354876b12e4fe6c78598b45e2"}, - {file = "mypy-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8207b7105829eca6f3d774f64a904190bb2231de91b8b186d21ffd98005f14a7"}, - {file = "mypy-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:16f0db5b641ba159eff72cff08edc3875f2b62b2fa2bc24f68c1e7a4e8232d01"}, - {file = "mypy-1.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:470c969bb3f9a9efcedbadcd19a74ffb34a25f8e6b0e02dae7c0e71f8372f97b"}, - {file = "mypy-1.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5952d2d18b79f7dc25e62e014fe5a23eb1a3d2bc66318df8988a01b1a037c5b"}, - {file = "mypy-1.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:190b6bab0302cec4e9e6767d3eb66085aef2a1cc98fe04936d8a42ed2ba77bb7"}, - {file = "mypy-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9d40652cc4fe33871ad3338581dca3297ff5f2213d0df345bcfbde5162abf0c9"}, - {file = "mypy-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:01fd2e9f85622d981fd9063bfaef1aed6e336eaacca00892cd2d82801ab7c042"}, - {file = "mypy-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2460a58faeea905aeb1b9b36f5065f2dc9a9c6e4c992a6499a2360c6c74ceca3"}, - {file = "mypy-1.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2746d69a8196698146a3dbe29104f9eb6a2a4d8a27878d92169a6c0b74435b6"}, - {file = "mypy-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ae704dcfaa180ff7c4cfbad23e74321a2b774f92ca77fd94ce1049175a21c97f"}, - {file = "mypy-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:43d24f6437925ce50139a310a64b2ab048cb2d3694c84c71c3f2a1626d8101dc"}, - {file = "mypy-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c482e1246726616088532b5e964e39765b6d1520791348e6c9dc3af25b233828"}, - {file = "mypy-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:43b592511672017f5b1a483527fd2684347fdffc041c9ef53428c8dc530f79a3"}, - {file = "mypy-1.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34a9239d5b3502c17f07fd7c0b2ae6b7dd7d7f6af35fbb5072c6208e76295816"}, - {file = "mypy-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5703097c4936bbb9e9bce41478c8d08edd2865e177dc4c52be759f81ee4dd26c"}, - {file = "mypy-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e02d700ec8d9b1859790c0475df4e4092c7bf3272a4fd2c9f33d87fac4427b8f"}, - {file = "mypy-1.4.1-py3-none-any.whl", hash = "sha256:45d32cec14e7b97af848bddd97d85ea4f0db4d5a149ed9676caa4eb2f7402bb4"}, - {file = "mypy-1.4.1.tar.gz", hash = "sha256:9bbcd9ab8ea1f2e1c8031c21445b511442cc45c89951e49bbf852cbb70755b1b"}, + {file = "mypy-1.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:091f53ff88cb093dcc33c29eee522c087a438df65eb92acd371161c1f4380ff0"}, + {file = "mypy-1.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eb7ff4007865833c470a601498ba30462b7374342580e2346bf7884557e40531"}, + {file = "mypy-1.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49499cf1e464f533fc45be54d20a6351a312f96ae7892d8e9f1708140e27ce41"}, + {file = "mypy-1.6.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4c192445899c69f07874dabda7e931b0cc811ea055bf82c1ababf358b9b2a72c"}, + {file = "mypy-1.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:3df87094028e52766b0a59a3e46481bb98b27986ed6ded6a6cc35ecc75bb9182"}, + {file = "mypy-1.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c8835a07b8442da900db47ccfda76c92c69c3a575872a5b764332c4bacb5a0a"}, + {file = "mypy-1.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:24f3de8b9e7021cd794ad9dfbf2e9fe3f069ff5e28cb57af6f873ffec1cb0425"}, + {file = "mypy-1.6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:856bad61ebc7d21dbc019b719e98303dc6256cec6dcc9ebb0b214b81d6901bd8"}, + {file = "mypy-1.6.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:89513ddfda06b5c8ebd64f026d20a61ef264e89125dc82633f3c34eeb50e7d60"}, + {file = "mypy-1.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:9f8464ed410ada641c29f5de3e6716cbdd4f460b31cf755b2af52f2d5ea79ead"}, + {file = "mypy-1.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:971104bcb180e4fed0d7bd85504c9036346ab44b7416c75dd93b5c8c6bb7e28f"}, + {file = "mypy-1.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ab98b8f6fdf669711f3abe83a745f67f50e3cbaea3998b90e8608d2b459fd566"}, + {file = "mypy-1.6.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a69db3018b87b3e6e9dd28970f983ea6c933800c9edf8c503c3135b3274d5ad"}, + {file = "mypy-1.6.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:dccd850a2e3863891871c9e16c54c742dba5470f5120ffed8152956e9e0a5e13"}, + {file = "mypy-1.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:f8598307150b5722854f035d2e70a1ad9cc3c72d392c34fffd8c66d888c90f17"}, + {file = "mypy-1.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fea451a3125bf0bfe716e5d7ad4b92033c471e4b5b3e154c67525539d14dc15a"}, + {file = "mypy-1.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e28d7b221898c401494f3b77db3bac78a03ad0a0fff29a950317d87885c655d2"}, + {file = "mypy-1.6.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4b7a99275a61aa22256bab5839c35fe8a6887781862471df82afb4b445daae6"}, + {file = "mypy-1.6.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7469545380dddce5719e3656b80bdfbb217cfe8dbb1438532d6abc754b828fed"}, + {file = "mypy-1.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:7807a2a61e636af9ca247ba8494031fb060a0a744b9fee7de3a54bed8a753323"}, + {file = "mypy-1.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d2dad072e01764823d4b2f06bc7365bb1d4b6c2f38c4d42fade3c8d45b0b4b67"}, + {file = "mypy-1.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b19006055dde8a5425baa5f3b57a19fa79df621606540493e5e893500148c72f"}, + {file = "mypy-1.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31eba8a7a71f0071f55227a8057468b8d2eb5bf578c8502c7f01abaec8141b2f"}, + {file = "mypy-1.6.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e0db37ac4ebb2fee7702767dfc1b773c7365731c22787cb99f507285014fcaf"}, + {file = "mypy-1.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:c69051274762cccd13498b568ed2430f8d22baa4b179911ad0c1577d336ed849"}, + {file = "mypy-1.6.0-py3-none-any.whl", hash = "sha256:9e1589ca150a51d9d00bb839bfeca2f7a04f32cd62fad87a847bc0818e15d7dc"}, + {file = "mypy-1.6.0.tar.gz", hash = "sha256:4f3d27537abde1be6d5f2c96c29a454da333a2a271ae7d5bc7110e6d4b7beb3f"}, ] [package.dependencies] @@ -2109,7 +2205,6 @@ typing-extensions = ">=4.1.0" [package.extras] dmypy = ["psutil (>=4.0)"] install-types = ["pip"] -python2 = ["typed-ast (>=1.4.0,<2)"] reports = ["lxml"] [[package]] @@ -2124,42 +2219,6 @@ files = [ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] -[[package]] -name = "nbclassic" -version = "1.0.0" -description = "Jupyter Notebook as a Jupyter Server extension." -category = "main" -optional = false -python-versions = ">=3.7" -files = [ - {file = "nbclassic-1.0.0-py3-none-any.whl", hash = "sha256:f99e4769b4750076cd4235c044b61232110733322384a94a63791d2e7beacc66"}, - {file = "nbclassic-1.0.0.tar.gz", hash = "sha256:0ae11eb2319455d805596bf320336cda9554b41d99ab9a3c31bf8180bffa30e3"}, -] - -[package.dependencies] -argon2-cffi = "*" -ipykernel = "*" -ipython-genutils = "*" -jinja2 = "*" -jupyter-client = ">=6.1.1" -jupyter-core = ">=4.6.1" -jupyter-server = ">=1.8" -nbconvert = ">=5" -nbformat = "*" -nest-asyncio = ">=1.5" -notebook-shim = ">=0.2.3" -prometheus-client = "*" -pyzmq = ">=17" -Send2Trash = ">=1.8.0" -terminado = ">=0.8.3" -tornado = ">=6.1" -traitlets = ">=4.2.1" - -[package.extras] -docs = ["myst-parser", "nbsphinx", "sphinx", "sphinx-rtd-theme", "sphinxcontrib-github-alt"] -json-logging = ["json-logging"] -test = ["coverage", "nbval", "pytest", "pytest-cov", "pytest-jupyter", "pytest-playwright", "pytest-tornasync", "requests", "requests-unixsocket", "testpath"] - [[package]] name = "nbclient" version = "0.8.0" @@ -2185,14 +2244,14 @@ test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>= [[package]] name = "nbconvert" -version = "7.6.0" +version = "7.9.2" description = "Converting Jupyter Notebooks" category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "nbconvert-7.6.0-py3-none-any.whl", hash = "sha256:5a445c6794b0791984bc5436608fe2c066cb43c83920c7bc91bde3b765e9a264"}, - {file = "nbconvert-7.6.0.tar.gz", hash = "sha256:24fcf27efdef2b51d7f090cc5ce5a9b178766a55be513c4ebab08c91899ab550"}, + {file = "nbconvert-7.9.2-py3-none-any.whl", hash = "sha256:39fe4b8bdd1b0104fdd86fc8a43a9077ba64c720bda4c6132690d917a0a154ee"}, + {file = "nbconvert-7.9.2.tar.gz", hash = "sha256:e56cc7588acc4f93e2bb5a34ec69028e4941797b2bfaf6462f18a41d1cc258c9"}, ] [package.dependencies] @@ -2219,19 +2278,19 @@ docs = ["ipykernel", "ipython", "myst-parser", "nbsphinx (>=0.2.12)", "pydata-sp qtpdf = ["nbconvert[qtpng]"] qtpng = ["pyqtwebengine (>=5.15)"] serve = ["tornado (>=6.1)"] -test = ["ipykernel", "ipywidgets (>=7)", "pre-commit", "pytest", "pytest-dependency"] -webpdf = ["pyppeteer (>=1,<1.1)"] +test = ["flaky", "ipykernel", "ipywidgets (>=7)", "pytest", "pytest-dependency"] +webpdf = ["playwright"] [[package]] name = "nbformat" -version = "5.9.1" +version = "5.9.2" description = "The Jupyter Notebook format" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "nbformat-5.9.1-py3-none-any.whl", hash = "sha256:b7968ebf4811178a4108ee837eae1442e3f054132100f0359219e9ed1ce3ca45"}, - {file = "nbformat-5.9.1.tar.gz", hash = "sha256:3a7f52d040639cbd8a3890218c8b0ffb93211588c57446c90095e32ba5881b5d"}, + {file = "nbformat-5.9.2-py3-none-any.whl", hash = "sha256:1c5172d786a41b82bcfd0c23f9e6b6f072e8fb49c39250219e4acfff1efe89e9"}, + {file = "nbformat-5.9.2.tar.gz", hash = "sha256:5f98b5ba1997dff175e77e0c17d5c10a96eaed2cbd1de3533d1fc35d5e111192"}, ] [package.dependencies] @@ -2246,14 +2305,14 @@ test = ["pep440", "pre-commit", "pytest", "testpath"] [[package]] name = "nest-asyncio" -version = "1.5.6" +version = "1.5.8" description = "Patch asyncio to allow nested event loops" category = "main" optional = false python-versions = ">=3.5" files = [ - {file = "nest_asyncio-1.5.6-py3-none-any.whl", hash = "sha256:b9a953fb40dceaa587d109609098db21900182b16440652454a146cffb06e8b8"}, - {file = "nest_asyncio-1.5.6.tar.gz", hash = "sha256:d267cc1ff794403f7df692964d1d2a3fa9418ffea2a3f6859a439ff482fef290"}, + {file = "nest_asyncio-1.5.8-py3-none-any.whl", hash = "sha256:accda7a339a70599cb08f9dd09a67e0c2ef8d8d6f4c07f96ab203f2ae254e48d"}, + {file = "nest_asyncio-1.5.8.tar.gz", hash = "sha256:25aa2ca0d2a5b5531956b9e273b45cf664cae2b145101d73b86b199978d48fdb"}, ] [[package]] @@ -2277,38 +2336,27 @@ test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"] [[package]] name = "notebook" -version = "6.5.4" -description = "A web-based notebook environment for interactive computing" +version = "7.0.5" +description = "Jupyter Notebook - A web-based notebook environment for interactive computing" category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "notebook-6.5.4-py3-none-any.whl", hash = "sha256:dd17e78aefe64c768737b32bf171c1c766666a21cc79a44d37a1700771cab56f"}, - {file = "notebook-6.5.4.tar.gz", hash = "sha256:517209568bd47261e2def27a140e97d49070602eea0d226a696f42a7f16c9a4e"}, + {file = "notebook-7.0.5-py3-none-any.whl", hash = "sha256:f26bd66accd54fcd96cc6696fb6c2911f15843b1c524318fd7cbdb32a763e6a6"}, + {file = "notebook-7.0.5.tar.gz", hash = "sha256:9e7c7a91de138bc8b5ee50486a20e70fa4d82d407b5622ec8beac9e13e773181"}, ] [package.dependencies] -argon2-cffi = "*" -ipykernel = "*" -ipython-genutils = "*" -jinja2 = "*" -jupyter-client = ">=5.3.4" -jupyter-core = ">=4.6.1" -nbclassic = ">=0.4.7" -nbconvert = ">=5" -nbformat = "*" -nest-asyncio = ">=1.5" -prometheus-client = "*" -pyzmq = ">=17" -Send2Trash = ">=1.8.0" -terminado = ">=0.8.3" -tornado = ">=6.1" -traitlets = ">=4.2.1" +jupyter-server = ">=2.4.0,<3" +jupyterlab = ">=4.0.2,<5" +jupyterlab-server = ">=2.22.1,<3" +notebook-shim = ">=0.2,<0.3" +tornado = ">=6.2.0" [package.extras] -docs = ["myst-parser", "nbsphinx", "sphinx", "sphinx-rtd-theme", "sphinxcontrib-github-alt"] -json-logging = ["json-logging"] -test = ["coverage", "nbval", "pytest", "pytest-cov", "requests", "requests-unixsocket", "selenium (==4.1.5)", "testpath"] +dev = ["hatch", "pre-commit"] +docs = ["myst-parser", "nbsphinx", "pydata-sphinx-theme", "sphinx (>=1.3.6)", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] +test = ["importlib-resources (>=5.0)", "ipykernel", "jupyter-server[test] (>=2.4.0,<3)", "jupyterlab-server[test] (>=2.22.1,<3)", "nbval", "pytest (>=7.0)", "pytest-console-scripts", "pytest-timeout", "pytest-tornasync", "requests"] [[package]] name = "notebook-shim" @@ -2330,37 +2378,44 @@ test = ["pytest", "pytest-console-scripts", "pytest-jupyter", "pytest-tornasync" [[package]] name = "numpy" -version = "1.25.1" +version = "1.26.0" description = "Fundamental package for array computing in Python" category = "main" optional = false -python-versions = ">=3.9" +python-versions = "<3.13,>=3.9" files = [ - {file = "numpy-1.25.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:77d339465dff3eb33c701430bcb9c325b60354698340229e1dff97745e6b3efa"}, - {file = "numpy-1.25.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d736b75c3f2cb96843a5c7f8d8ccc414768d34b0a75f466c05f3a739b406f10b"}, - {file = "numpy-1.25.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a90725800caeaa160732d6b31f3f843ebd45d6b5f3eec9e8cc287e30f2805bf"}, - {file = "numpy-1.25.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c6c9261d21e617c6dc5eacba35cb68ec36bb72adcff0dee63f8fbc899362588"}, - {file = "numpy-1.25.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0def91f8af6ec4bb94c370e38c575855bf1d0be8a8fbfba42ef9c073faf2cf19"}, - {file = "numpy-1.25.1-cp310-cp310-win32.whl", hash = "sha256:fd67b306320dcadea700a8f79b9e671e607f8696e98ec255915c0c6d6b818503"}, - {file = "numpy-1.25.1-cp310-cp310-win_amd64.whl", hash = "sha256:c1516db588987450b85595586605742879e50dcce923e8973f79529651545b57"}, - {file = "numpy-1.25.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6b82655dd8efeea69dbf85d00fca40013d7f503212bc5259056244961268b66e"}, - {file = "numpy-1.25.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e8f6049c4878cb16960fbbfb22105e49d13d752d4d8371b55110941fb3b17800"}, - {file = "numpy-1.25.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41a56b70e8139884eccb2f733c2f7378af06c82304959e174f8e7370af112e09"}, - {file = "numpy-1.25.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5154b1a25ec796b1aee12ac1b22f414f94752c5f94832f14d8d6c9ac40bcca6"}, - {file = "numpy-1.25.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:38eb6548bb91c421261b4805dc44def9ca1a6eef6444ce35ad1669c0f1a3fc5d"}, - {file = "numpy-1.25.1-cp311-cp311-win32.whl", hash = "sha256:791f409064d0a69dd20579345d852c59822c6aa087f23b07b1b4e28ff5880fcb"}, - {file = "numpy-1.25.1-cp311-cp311-win_amd64.whl", hash = "sha256:c40571fe966393b212689aa17e32ed905924120737194b5d5c1b20b9ed0fb171"}, - {file = "numpy-1.25.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3d7abcdd85aea3e6cdddb59af2350c7ab1ed764397f8eec97a038ad244d2d105"}, - {file = "numpy-1.25.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1a180429394f81c7933634ae49b37b472d343cccb5bb0c4a575ac8bbc433722f"}, - {file = "numpy-1.25.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d412c1697c3853c6fc3cb9751b4915859c7afe6a277c2bf00acf287d56c4e625"}, - {file = "numpy-1.25.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20e1266411120a4f16fad8efa8e0454d21d00b8c7cee5b5ccad7565d95eb42dd"}, - {file = "numpy-1.25.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f76aebc3358ade9eacf9bc2bb8ae589863a4f911611694103af05346637df1b7"}, - {file = "numpy-1.25.1-cp39-cp39-win32.whl", hash = "sha256:247d3ffdd7775bdf191f848be8d49100495114c82c2bd134e8d5d075fb386a1c"}, - {file = "numpy-1.25.1-cp39-cp39-win_amd64.whl", hash = "sha256:1d5d3c68e443c90b38fdf8ef40e60e2538a27548b39b12b73132456847f4b631"}, - {file = "numpy-1.25.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:35a9527c977b924042170a0887de727cd84ff179e478481404c5dc66b4170009"}, - {file = "numpy-1.25.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d3fe3dd0506a28493d82dc3cf254be8cd0d26f4008a417385cbf1ae95b54004"}, - {file = "numpy-1.25.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:012097b5b0d00a11070e8f2e261128c44157a8689f7dedcf35576e525893f4fe"}, - {file = "numpy-1.25.1.tar.gz", hash = "sha256:9a3a9f3a61480cc086117b426a8bd86869c213fc4072e606f01c4e4b66eb92bf"}, + {file = "numpy-1.26.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8db2f125746e44dce707dd44d4f4efeea8d7e2b43aace3f8d1f235cfa2733dd"}, + {file = "numpy-1.26.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0621f7daf973d34d18b4e4bafb210bbaf1ef5e0100b5fa750bd9cde84c7ac292"}, + {file = "numpy-1.26.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51be5f8c349fdd1a5568e72713a21f518e7d6707bcf8503b528b88d33b57dc68"}, + {file = "numpy-1.26.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:767254ad364991ccfc4d81b8152912e53e103ec192d1bb4ea6b1f5a7117040be"}, + {file = "numpy-1.26.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:436c8e9a4bdeeee84e3e59614d38c3dbd3235838a877af8c211cfcac8a80b8d3"}, + {file = "numpy-1.26.0-cp310-cp310-win32.whl", hash = "sha256:c2e698cb0c6dda9372ea98a0344245ee65bdc1c9dd939cceed6bb91256837896"}, + {file = "numpy-1.26.0-cp310-cp310-win_amd64.whl", hash = "sha256:09aaee96c2cbdea95de76ecb8a586cb687d281c881f5f17bfc0fb7f5890f6b91"}, + {file = "numpy-1.26.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:637c58b468a69869258b8ae26f4a4c6ff8abffd4a8334c830ffb63e0feefe99a"}, + {file = "numpy-1.26.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:306545e234503a24fe9ae95ebf84d25cba1fdc27db971aa2d9f1ab6bba19a9dd"}, + {file = "numpy-1.26.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c6adc33561bd1d46f81131d5352348350fc23df4d742bb246cdfca606ea1208"}, + {file = "numpy-1.26.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e062aa24638bb5018b7841977c360d2f5917268d125c833a686b7cbabbec496c"}, + {file = "numpy-1.26.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:546b7dd7e22f3c6861463bebb000646fa730e55df5ee4a0224408b5694cc6148"}, + {file = "numpy-1.26.0-cp311-cp311-win32.whl", hash = "sha256:c0b45c8b65b79337dee5134d038346d30e109e9e2e9d43464a2970e5c0e93229"}, + {file = "numpy-1.26.0-cp311-cp311-win_amd64.whl", hash = "sha256:eae430ecf5794cb7ae7fa3808740b015aa80747e5266153128ef055975a72b99"}, + {file = "numpy-1.26.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:166b36197e9debc4e384e9c652ba60c0bacc216d0fc89e78f973a9760b503388"}, + {file = "numpy-1.26.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f042f66d0b4ae6d48e70e28d487376204d3cbf43b84c03bac57e28dac6151581"}, + {file = "numpy-1.26.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5e18e5b14a7560d8acf1c596688f4dfd19b4f2945b245a71e5af4ddb7422feb"}, + {file = "numpy-1.26.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f6bad22a791226d0a5c7c27a80a20e11cfe09ad5ef9084d4d3fc4a299cca505"}, + {file = "numpy-1.26.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4acc65dd65da28060e206c8f27a573455ed724e6179941edb19f97e58161bb69"}, + {file = "numpy-1.26.0-cp312-cp312-win32.whl", hash = "sha256:bb0d9a1aaf5f1cb7967320e80690a1d7ff69f1d47ebc5a9bea013e3a21faec95"}, + {file = "numpy-1.26.0-cp312-cp312-win_amd64.whl", hash = "sha256:ee84ca3c58fe48b8ddafdeb1db87388dce2c3c3f701bf447b05e4cfcc3679112"}, + {file = "numpy-1.26.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4a873a8180479bc829313e8d9798d5234dfacfc2e8a7ac188418189bb8eafbd2"}, + {file = "numpy-1.26.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:914b28d3215e0c721dc75db3ad6d62f51f630cb0c277e6b3bcb39519bed10bd8"}, + {file = "numpy-1.26.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c78a22e95182fb2e7874712433eaa610478a3caf86f28c621708d35fa4fd6e7f"}, + {file = "numpy-1.26.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86f737708b366c36b76e953c46ba5827d8c27b7a8c9d0f471810728e5a2fe57c"}, + {file = "numpy-1.26.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b44e6a09afc12952a7d2a58ca0a2429ee0d49a4f89d83a0a11052da696440e49"}, + {file = "numpy-1.26.0-cp39-cp39-win32.whl", hash = "sha256:5671338034b820c8d58c81ad1dafc0ed5a00771a82fccc71d6438df00302094b"}, + {file = "numpy-1.26.0-cp39-cp39-win_amd64.whl", hash = "sha256:020cdbee66ed46b671429c7265cf00d8ac91c046901c55684954c3958525dab2"}, + {file = "numpy-1.26.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0792824ce2f7ea0c82ed2e4fecc29bb86bee0567a080dacaf2e0a01fe7654369"}, + {file = "numpy-1.26.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d484292eaeb3e84a51432a94f53578689ffdea3f90e10c8b203a99be5af57d8"}, + {file = "numpy-1.26.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:186ba67fad3c60dbe8a3abff3b67a91351100f2661c8e2a80364ae6279720299"}, + {file = "numpy-1.26.0.tar.gz", hash = "sha256:f93fc78fe8bf15afe2b8d6b6499f1c73953169fad1e9a8dd086cdff3190e7fdf"}, ] [[package]] @@ -2416,26 +2471,26 @@ et-xmlfile = "*" [[package]] name = "overrides" -version = "7.3.1" +version = "7.4.0" description = "A decorator to automatically detect mismatch when overriding a method." category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "overrides-7.3.1-py3-none-any.whl", hash = "sha256:6187d8710a935d09b0bcef8238301d6ee2569d2ac1ae0ec39a8c7924e27f58ca"}, - {file = "overrides-7.3.1.tar.gz", hash = "sha256:8b97c6c1e1681b78cbc9424b138d880f0803c2254c5ebaabdde57bb6c62093f2"}, + {file = "overrides-7.4.0-py3-none-any.whl", hash = "sha256:3ad24583f86d6d7a49049695efe9933e67ba62f0c7625d53c59fa832ce4b8b7d"}, + {file = "overrides-7.4.0.tar.gz", hash = "sha256:9502a3cca51f4fac40b5feca985b6703a5c1f6ad815588a7ca9e285b9dca6757"}, ] [[package]] name = "packaging" -version = "23.1" +version = "23.2" description = "Core utilities for Python packages" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, - {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, ] [[package]] @@ -2536,14 +2591,14 @@ testing = ["docopt", "pytest (<6.0.0)"] [[package]] name = "pathspec" -version = "0.11.1" +version = "0.11.2" description = "Utility library for gitignore style pattern matching of file paths." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"}, - {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"}, + {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, + {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, ] [[package]] @@ -2595,30 +2650,30 @@ files = [ [[package]] name = "platformdirs" -version = "3.8.1" +version = "3.11.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "platformdirs-3.8.1-py3-none-any.whl", hash = "sha256:cec7b889196b9144d088e4c57d9ceef7374f6c39694ad1577a0aab50d27ea28c"}, - {file = "platformdirs-3.8.1.tar.gz", hash = "sha256:f87ca4fcff7d2b0f81c6a748a77973d7af0f4d526f98f308477c3c436c74d528"}, + {file = "platformdirs-3.11.0-py3-none-any.whl", hash = "sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e"}, + {file = "platformdirs-3.11.0.tar.gz", hash = "sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3"}, ] [package.extras] -docs = ["furo (>=2023.5.20)", "proselint (>=0.13)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)"] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] [[package]] name = "pluggy" -version = "1.2.0" +version = "1.3.0" description = "plugin and hook calling mechanisms for python" category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, - {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, + {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, + {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, ] [package.extras] @@ -2657,25 +2712,25 @@ wcwidth = "*" [[package]] name = "protobuf" -version = "4.23.4" +version = "4.24.4" description = "" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "protobuf-4.23.4-cp310-abi3-win32.whl", hash = "sha256:5fea3c64d41ea5ecf5697b83e41d09b9589e6f20b677ab3c48e5f242d9b7897b"}, - {file = "protobuf-4.23.4-cp310-abi3-win_amd64.whl", hash = "sha256:7b19b6266d92ca6a2a87effa88ecc4af73ebc5cfde194dc737cf8ef23a9a3b12"}, - {file = "protobuf-4.23.4-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8547bf44fe8cec3c69e3042f5c4fb3e36eb2a7a013bb0a44c018fc1e427aafbd"}, - {file = "protobuf-4.23.4-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:fee88269a090ada09ca63551bf2f573eb2424035bcf2cb1b121895b01a46594a"}, - {file = "protobuf-4.23.4-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:effeac51ab79332d44fba74660d40ae79985901ac21bca408f8dc335a81aa597"}, - {file = "protobuf-4.23.4-cp37-cp37m-win32.whl", hash = "sha256:c3e0939433c40796ca4cfc0fac08af50b00eb66a40bbbc5dee711998fb0bbc1e"}, - {file = "protobuf-4.23.4-cp37-cp37m-win_amd64.whl", hash = "sha256:9053df6df8e5a76c84339ee4a9f5a2661ceee4a0dab019e8663c50ba324208b0"}, - {file = "protobuf-4.23.4-cp38-cp38-win32.whl", hash = "sha256:e1c915778d8ced71e26fcf43c0866d7499891bca14c4368448a82edc61fdbc70"}, - {file = "protobuf-4.23.4-cp38-cp38-win_amd64.whl", hash = "sha256:351cc90f7d10839c480aeb9b870a211e322bf05f6ab3f55fcb2f51331f80a7d2"}, - {file = "protobuf-4.23.4-cp39-cp39-win32.whl", hash = "sha256:6dd9b9940e3f17077e820b75851126615ee38643c2c5332aa7a359988820c720"}, - {file = "protobuf-4.23.4-cp39-cp39-win_amd64.whl", hash = "sha256:0a5759f5696895de8cc913f084e27fd4125e8fb0914bb729a17816a33819f474"}, - {file = "protobuf-4.23.4-py3-none-any.whl", hash = "sha256:e9d0be5bf34b275b9f87ba7407796556abeeba635455d036c7351f7c183ef8ff"}, - {file = "protobuf-4.23.4.tar.gz", hash = "sha256:ccd9430c0719dce806b93f89c91de7977304729e55377f872a92465d548329a9"}, + {file = "protobuf-4.24.4-cp310-abi3-win32.whl", hash = "sha256:ec9912d5cb6714a5710e28e592ee1093d68c5ebfeda61983b3f40331da0b1ebb"}, + {file = "protobuf-4.24.4-cp310-abi3-win_amd64.whl", hash = "sha256:1badab72aa8a3a2b812eacfede5020472e16c6b2212d737cefd685884c191085"}, + {file = "protobuf-4.24.4-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8e61a27f362369c2f33248a0ff6896c20dcd47b5d48239cb9720134bef6082e4"}, + {file = "protobuf-4.24.4-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:bffa46ad9612e6779d0e51ae586fde768339b791a50610d85eb162daeb23661e"}, + {file = "protobuf-4.24.4-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:b493cb590960ff863743b9ff1452c413c2ee12b782f48beca77c8da3e2ffe9d9"}, + {file = "protobuf-4.24.4-cp37-cp37m-win32.whl", hash = "sha256:dbbed8a56e56cee8d9d522ce844a1379a72a70f453bde6243e3c86c30c2a3d46"}, + {file = "protobuf-4.24.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6b7d2e1c753715dcfe9d284a25a52d67818dd43c4932574307daf836f0071e37"}, + {file = "protobuf-4.24.4-cp38-cp38-win32.whl", hash = "sha256:02212557a76cd99574775a81fefeba8738d0f668d6abd0c6b1d3adcc75503dbe"}, + {file = "protobuf-4.24.4-cp38-cp38-win_amd64.whl", hash = "sha256:2fa3886dfaae6b4c5ed2730d3bf47c7a38a72b3a1f0acb4d4caf68e6874b947b"}, + {file = "protobuf-4.24.4-cp39-cp39-win32.whl", hash = "sha256:b77272f3e28bb416e2071186cb39efd4abbf696d682cbb5dc731308ad37fa6dd"}, + {file = "protobuf-4.24.4-cp39-cp39-win_amd64.whl", hash = "sha256:9fee5e8aa20ef1b84123bb9232b3f4a5114d9897ed89b4b8142d81924e05d79b"}, + {file = "protobuf-4.24.4-py3-none-any.whl", hash = "sha256:80797ce7424f8c8d2f2547e2d42bfbb6c08230ce5832d6c099a37335c9c90a92"}, + {file = "protobuf-4.24.4.tar.gz", hash = "sha256:5a70731910cd9104762161719c3d883c960151eea077134458503723b60e3667"}, ] [[package]] @@ -2773,14 +2828,14 @@ pyasn1 = ">=0.4.6,<0.6.0" [[package]] name = "pycodestyle" -version = "2.10.0" +version = "2.11.1" description = "Python style guide checker" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "pycodestyle-2.10.0-py2.py3-none-any.whl", hash = "sha256:8a4eaf0d0495c7395bdab3589ac2db602797d76207242c17d470186815706610"}, - {file = "pycodestyle-2.10.0.tar.gz", hash = "sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053"}, + {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, + {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, ] [[package]] @@ -2797,48 +2852,48 @@ files = [ [[package]] name = "pydantic" -version = "1.10.11" +version = "1.10.13" description = "Data validation and settings management using python type hints" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ff44c5e89315b15ff1f7fdaf9853770b810936d6b01a7bcecaa227d2f8fe444f"}, - {file = "pydantic-1.10.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a6c098d4ab5e2d5b3984d3cb2527e2d6099d3de85630c8934efcfdc348a9760e"}, - {file = "pydantic-1.10.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16928fdc9cb273c6af00d9d5045434c39afba5f42325fb990add2c241402d151"}, - {file = "pydantic-1.10.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0588788a9a85f3e5e9ebca14211a496409cb3deca5b6971ff37c556d581854e7"}, - {file = "pydantic-1.10.11-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e9baf78b31da2dc3d3f346ef18e58ec5f12f5aaa17ac517e2ffd026a92a87588"}, - {file = "pydantic-1.10.11-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:373c0840f5c2b5b1ccadd9286782852b901055998136287828731868027a724f"}, - {file = "pydantic-1.10.11-cp310-cp310-win_amd64.whl", hash = "sha256:c3339a46bbe6013ef7bdd2844679bfe500347ac5742cd4019a88312aa58a9847"}, - {file = "pydantic-1.10.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:08a6c32e1c3809fbc49debb96bf833164f3438b3696abf0fbeceb417d123e6eb"}, - {file = "pydantic-1.10.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a451ccab49971af043ec4e0d207cbc8cbe53dbf148ef9f19599024076fe9c25b"}, - {file = "pydantic-1.10.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b02d24f7b2b365fed586ed73582c20f353a4c50e4be9ba2c57ab96f8091ddae"}, - {file = "pydantic-1.10.11-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f34739a89260dfa420aa3cbd069fbcc794b25bbe5c0a214f8fb29e363484b66"}, - {file = "pydantic-1.10.11-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e297897eb4bebde985f72a46a7552a7556a3dd11e7f76acda0c1093e3dbcf216"}, - {file = "pydantic-1.10.11-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d185819a7a059550ecb85d5134e7d40f2565f3dd94cfd870132c5f91a89cf58c"}, - {file = "pydantic-1.10.11-cp311-cp311-win_amd64.whl", hash = "sha256:4400015f15c9b464c9db2d5d951b6a780102cfa5870f2c036d37c23b56f7fc1b"}, - {file = "pydantic-1.10.11-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2417de68290434461a266271fc57274a138510dca19982336639484c73a07af6"}, - {file = "pydantic-1.10.11-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:331c031ba1554b974c98679bd0780d89670d6fd6f53f5d70b10bdc9addee1713"}, - {file = "pydantic-1.10.11-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8268a735a14c308923e8958363e3a3404f6834bb98c11f5ab43251a4e410170c"}, - {file = "pydantic-1.10.11-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:44e51ba599c3ef227e168424e220cd3e544288c57829520dc90ea9cb190c3248"}, - {file = "pydantic-1.10.11-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d7781f1d13b19700b7949c5a639c764a077cbbdd4322ed505b449d3ca8edcb36"}, - {file = "pydantic-1.10.11-cp37-cp37m-win_amd64.whl", hash = "sha256:7522a7666157aa22b812ce14c827574ddccc94f361237ca6ea8bb0d5c38f1629"}, - {file = "pydantic-1.10.11-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc64eab9b19cd794a380179ac0e6752335e9555d214cfcb755820333c0784cb3"}, - {file = "pydantic-1.10.11-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8dc77064471780262b6a68fe67e013298d130414d5aaf9b562c33987dbd2cf4f"}, - {file = "pydantic-1.10.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe429898f2c9dd209bd0632a606bddc06f8bce081bbd03d1c775a45886e2c1cb"}, - {file = "pydantic-1.10.11-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:192c608ad002a748e4a0bed2ddbcd98f9b56df50a7c24d9a931a8c5dd053bd3d"}, - {file = "pydantic-1.10.11-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ef55392ec4bb5721f4ded1096241e4b7151ba6d50a50a80a2526c854f42e6a2f"}, - {file = "pydantic-1.10.11-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:41e0bb6efe86281623abbeeb0be64eab740c865388ee934cd3e6a358784aca6e"}, - {file = "pydantic-1.10.11-cp38-cp38-win_amd64.whl", hash = "sha256:265a60da42f9f27e0b1014eab8acd3e53bd0bad5c5b4884e98a55f8f596b2c19"}, - {file = "pydantic-1.10.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:469adf96c8e2c2bbfa655fc7735a2a82f4c543d9fee97bd113a7fb509bf5e622"}, - {file = "pydantic-1.10.11-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e6cbfbd010b14c8a905a7b10f9fe090068d1744d46f9e0c021db28daeb8b6de1"}, - {file = "pydantic-1.10.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abade85268cc92dff86d6effcd917893130f0ff516f3d637f50dadc22ae93999"}, - {file = "pydantic-1.10.11-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9738b0f2e6c70f44ee0de53f2089d6002b10c33264abee07bdb5c7f03038303"}, - {file = "pydantic-1.10.11-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:787cf23e5a0cde753f2eabac1b2e73ae3844eb873fd1f5bdbff3048d8dbb7604"}, - {file = "pydantic-1.10.11-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:174899023337b9fc685ac8adaa7b047050616136ccd30e9070627c1aaab53a13"}, - {file = "pydantic-1.10.11-cp39-cp39-win_amd64.whl", hash = "sha256:1954f8778489a04b245a1e7b8b22a9d3ea8ef49337285693cf6959e4b757535e"}, - {file = "pydantic-1.10.11-py3-none-any.whl", hash = "sha256:008c5e266c8aada206d0627a011504e14268a62091450210eda7c07fabe6963e"}, - {file = "pydantic-1.10.11.tar.gz", hash = "sha256:f66d479cf7eb331372c470614be6511eae96f1f120344c25f3f9bb59fb1b5528"}, + {file = "pydantic-1.10.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:efff03cc7a4f29d9009d1c96ceb1e7a70a65cfe86e89d34e4a5f2ab1e5693737"}, + {file = "pydantic-1.10.13-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3ecea2b9d80e5333303eeb77e180b90e95eea8f765d08c3d278cd56b00345d01"}, + {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1740068fd8e2ef6eb27a20e5651df000978edce6da6803c2bef0bc74540f9548"}, + {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84bafe2e60b5e78bc64a2941b4c071a4b7404c5c907f5f5a99b0139781e69ed8"}, + {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bc0898c12f8e9c97f6cd44c0ed70d55749eaf783716896960b4ecce2edfd2d69"}, + {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:654db58ae399fe6434e55325a2c3e959836bd17a6f6a0b6ca8107ea0571d2e17"}, + {file = "pydantic-1.10.13-cp310-cp310-win_amd64.whl", hash = "sha256:75ac15385a3534d887a99c713aa3da88a30fbd6204a5cd0dc4dab3d770b9bd2f"}, + {file = "pydantic-1.10.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c553f6a156deb868ba38a23cf0df886c63492e9257f60a79c0fd8e7173537653"}, + {file = "pydantic-1.10.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5e08865bc6464df8c7d61439ef4439829e3ab62ab1669cddea8dd00cd74b9ffe"}, + {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e31647d85a2013d926ce60b84f9dd5300d44535a9941fe825dc349ae1f760df9"}, + {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:210ce042e8f6f7c01168b2d84d4c9eb2b009fe7bf572c2266e235edf14bacd80"}, + {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8ae5dd6b721459bfa30805f4c25880e0dd78fc5b5879f9f7a692196ddcb5a580"}, + {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f8e81fc5fb17dae698f52bdd1c4f18b6ca674d7068242b2aff075f588301bbb0"}, + {file = "pydantic-1.10.13-cp311-cp311-win_amd64.whl", hash = "sha256:61d9dce220447fb74f45e73d7ff3b530e25db30192ad8d425166d43c5deb6df0"}, + {file = "pydantic-1.10.13-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4b03e42ec20286f052490423682016fd80fda830d8e4119f8ab13ec7464c0132"}, + {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f59ef915cac80275245824e9d771ee939133be38215555e9dc90c6cb148aaeb5"}, + {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a1f9f747851338933942db7af7b6ee8268568ef2ed86c4185c6ef4402e80ba8"}, + {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:97cce3ae7341f7620a0ba5ef6cf043975cd9d2b81f3aa5f4ea37928269bc1b87"}, + {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:854223752ba81e3abf663d685f105c64150873cc6f5d0c01d3e3220bcff7d36f"}, + {file = "pydantic-1.10.13-cp37-cp37m-win_amd64.whl", hash = "sha256:b97c1fac8c49be29486df85968682b0afa77e1b809aff74b83081cc115e52f33"}, + {file = "pydantic-1.10.13-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c958d053453a1c4b1c2062b05cd42d9d5c8eb67537b8d5a7e3c3032943ecd261"}, + {file = "pydantic-1.10.13-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c5370a7edaac06daee3af1c8b1192e305bc102abcbf2a92374b5bc793818599"}, + {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d6f6e7305244bddb4414ba7094ce910560c907bdfa3501e9db1a7fd7eaea127"}, + {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3a3c792a58e1622667a2837512099eac62490cdfd63bd407993aaf200a4cf1f"}, + {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c636925f38b8db208e09d344c7aa4f29a86bb9947495dd6b6d376ad10334fb78"}, + {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:678bcf5591b63cc917100dc50ab6caebe597ac67e8c9ccb75e698f66038ea953"}, + {file = "pydantic-1.10.13-cp38-cp38-win_amd64.whl", hash = "sha256:6cf25c1a65c27923a17b3da28a0bdb99f62ee04230c931d83e888012851f4e7f"}, + {file = "pydantic-1.10.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8ef467901d7a41fa0ca6db9ae3ec0021e3f657ce2c208e98cd511f3161c762c6"}, + {file = "pydantic-1.10.13-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:968ac42970f57b8344ee08837b62f6ee6f53c33f603547a55571c954a4225691"}, + {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9849f031cf8a2f0a928fe885e5a04b08006d6d41876b8bbd2fc68a18f9f2e3fd"}, + {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56e3ff861c3b9c6857579de282ce8baabf443f42ffba355bf070770ed63e11e1"}, + {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f00790179497767aae6bcdc36355792c79e7bbb20b145ff449700eb076c5f96"}, + {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:75b297827b59bc229cac1a23a2f7a4ac0031068e5be0ce385be1462e7e17a35d"}, + {file = "pydantic-1.10.13-cp39-cp39-win_amd64.whl", hash = "sha256:e70ca129d2053fb8b728ee7d1af8e553a928d7e301a311094b8a0501adc8763d"}, + {file = "pydantic-1.10.13-py3-none-any.whl", hash = "sha256:b87326822e71bd5f313e7d3bfdc77ac3247035ac10b0c0618bd99dcf95b1e687"}, + {file = "pydantic-1.10.13.tar.gz", hash = "sha256:32c8b48dcd3b2ac4e78b0ba4af3a2c2eb6048cb75202f0ea7b34feb740efc340"}, ] [package.dependencies] @@ -2850,26 +2905,26 @@ email = ["email-validator (>=1.0.3)"] [[package]] name = "pyflakes" -version = "3.0.1" +version = "3.1.0" description = "passive checker of Python programs" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "pyflakes-3.0.1-py2.py3-none-any.whl", hash = "sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf"}, - {file = "pyflakes-3.0.1.tar.gz", hash = "sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd"}, + {file = "pyflakes-3.1.0-py2.py3-none-any.whl", hash = "sha256:4132f6d49cb4dae6819e5379898f2b8cce3c5f23994194c24b77d5da2e36f774"}, + {file = "pyflakes-3.1.0.tar.gz", hash = "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"}, ] [[package]] name = "pygments" -version = "2.15.1" +version = "2.16.1" description = "Pygments is a syntax highlighting package written in Python." category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "Pygments-2.15.1-py3-none-any.whl", hash = "sha256:db2db3deb4b4179f399a09054b023b6a586b76499d36965813c71aa8ed7b5fd1"}, - {file = "Pygments-2.15.1.tar.gz", hash = "sha256:8ace4d3c1dd481894b2005f560ead0f9f19ee64fe983366be1a21e171d12775c"}, + {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, + {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, ] [package.extras] @@ -2896,18 +2951,18 @@ pandas = ["pandas (>=0.14.0)"] [[package]] name = "pylint" -version = "2.17.4" +version = "2.17.7" description = "python code static checker" category = "dev" optional = false python-versions = ">=3.7.2" files = [ - {file = "pylint-2.17.4-py3-none-any.whl", hash = "sha256:7a1145fb08c251bdb5cca11739722ce64a63db479283d10ce718b2460e54123c"}, - {file = "pylint-2.17.4.tar.gz", hash = "sha256:5dcf1d9e19f41f38e4e85d10f511e5b9c35e1aa74251bf95cdd8cb23584e2db1"}, + {file = "pylint-2.17.7-py3-none-any.whl", hash = "sha256:27a8d4c7ddc8c2f8c18aa0050148f89ffc09838142193fdbe98f172781a3ff87"}, + {file = "pylint-2.17.7.tar.gz", hash = "sha256:f4fcac7ae74cfe36bc8451e931d8438e4a476c20314b1101c458ad0f05191fad"}, ] [package.dependencies] -astroid = ">=2.15.4,<=2.17.0-dev0" +astroid = ">=2.15.8,<=2.17.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = {version = ">=0.2", markers = "python_version < \"3.11\""} isort = ">=4.2.5,<6" @@ -2942,14 +2997,14 @@ test = ["flaky", "pretend", "pytest (>=3.0.1)"] [[package]] name = "pyparsing" -version = "3.1.0" +version = "3.1.1" description = "pyparsing module - Classes and methods to define and execute parsing grammars" category = "main" optional = false python-versions = ">=3.6.8" files = [ - {file = "pyparsing-3.1.0-py3-none-any.whl", hash = "sha256:d554a96d1a7d3ddaf7183104485bc19fd80543ad6ac5bdb6426719d766fb06c1"}, - {file = "pyparsing-3.1.0.tar.gz", hash = "sha256:edb662d6fe322d6e990b1594b5feaeadf806803359e3d4d42f11e295e588f0ea"}, + {file = "pyparsing-3.1.1-py3-none-any.whl", hash = "sha256:32c7c0b711493c72ff18a981d24f28aaf9c1fb7ed5e9667c9e84e3db623bdbfb"}, + {file = "pyparsing-3.1.1.tar.gz", hash = "sha256:ede28a1a32462f5a9705e07aea48001a08f7cf81a021585011deba701581a0db"}, ] [package.extras] @@ -2957,14 +3012,14 @@ diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pytest" -version = "7.4.0" +version = "7.4.2" description = "pytest: simple powerful testing with Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"}, - {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"}, + {file = "pytest-7.4.2-py3-none-any.whl", hash = "sha256:1d881c6124e08ff0a1bb75ba3ec0bfd8b5354a01c194ddd5a0a870a48d99b002"}, + {file = "pytest-7.4.2.tar.gz", hash = "sha256:a766259cfab564a2ad52cb1aae1b881a75c3eb7e34ca3779697c23ed47c47069"}, ] [package.dependencies] @@ -3059,14 +3114,14 @@ files = [ [[package]] name = "pytz" -version = "2023.3" +version = "2023.3.post1" description = "World timezone definitions, modern and historical" category = "main" optional = false python-versions = "*" files = [ - {file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"}, - {file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"}, + {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"}, + {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, ] [[package]] @@ -3107,155 +3162,181 @@ files = [ [[package]] name = "pywinpty" -version = "2.0.10" +version = "2.0.12" description = "Pseudo terminal support for Windows from Python." category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pywinpty-2.0.10-cp310-none-win_amd64.whl", hash = "sha256:4c7d06ad10f6e92bc850a467f26d98f4f30e73d2fe5926536308c6ae0566bc16"}, - {file = "pywinpty-2.0.10-cp311-none-win_amd64.whl", hash = "sha256:7ffbd66310b83e42028fc9df7746118978d94fba8c1ebf15a7c1275fdd80b28a"}, - {file = "pywinpty-2.0.10-cp37-none-win_amd64.whl", hash = "sha256:38cb924f2778b5751ef91a75febd114776b3af0ae411bc667be45dd84fc881d3"}, - {file = "pywinpty-2.0.10-cp38-none-win_amd64.whl", hash = "sha256:902d79444b29ad1833b8d5c3c9aabdfd428f4f068504430df18074007c8c0de8"}, - {file = "pywinpty-2.0.10-cp39-none-win_amd64.whl", hash = "sha256:3c46aef80dd50979aff93de199e4a00a8ee033ba7a03cadf0a91fed45f0c39d7"}, - {file = "pywinpty-2.0.10.tar.gz", hash = "sha256:cdbb5694cf8c7242c2ecfaca35c545d31fa5d5814c3d67a4e628f803f680ebea"}, + {file = "pywinpty-2.0.12-cp310-none-win_amd64.whl", hash = "sha256:21319cd1d7c8844fb2c970fb3a55a3db5543f112ff9cfcd623746b9c47501575"}, + {file = "pywinpty-2.0.12-cp311-none-win_amd64.whl", hash = "sha256:853985a8f48f4731a716653170cd735da36ffbdc79dcb4c7b7140bce11d8c722"}, + {file = "pywinpty-2.0.12-cp312-none-win_amd64.whl", hash = "sha256:1617b729999eb6713590e17665052b1a6ae0ad76ee31e60b444147c5b6a35dca"}, + {file = "pywinpty-2.0.12-cp38-none-win_amd64.whl", hash = "sha256:189380469ca143d06e19e19ff3fba0fcefe8b4a8cc942140a6b863aed7eebb2d"}, + {file = "pywinpty-2.0.12-cp39-none-win_amd64.whl", hash = "sha256:7520575b6546db23e693cbd865db2764097bd6d4ef5dc18c92555904cd62c3d4"}, + {file = "pywinpty-2.0.12.tar.gz", hash = "sha256:8197de460ae8ebb7f5d1701dfa1b5df45b157bb832e92acba316305e18ca00dd"}, ] [[package]] name = "pyyaml" -version = "6.0" +version = "6.0.1" description = "YAML parser and emitter for Python" category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, - {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, - {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, - {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, - {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"}, - {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"}, - {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"}, - {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"}, - {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, - {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, - {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, - {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, - {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, - {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, - {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, - {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, - {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, - {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, - {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, - {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, ] [[package]] name = "pyzmq" -version = "25.1.0" +version = "25.1.1" description = "Python bindings for 0MQ" category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "pyzmq-25.1.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:1a6169e69034eaa06823da6a93a7739ff38716142b3596c180363dee729d713d"}, - {file = "pyzmq-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:19d0383b1f18411d137d891cab567de9afa609b214de68b86e20173dc624c101"}, - {file = "pyzmq-25.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1e931d9a92f628858a50f5bdffdfcf839aebe388b82f9d2ccd5d22a38a789dc"}, - {file = "pyzmq-25.1.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:97d984b1b2f574bc1bb58296d3c0b64b10e95e7026f8716ed6c0b86d4679843f"}, - {file = "pyzmq-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:154bddda2a351161474b36dba03bf1463377ec226a13458725183e508840df89"}, - {file = "pyzmq-25.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:cb6d161ae94fb35bb518b74bb06b7293299c15ba3bc099dccd6a5b7ae589aee3"}, - {file = "pyzmq-25.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:90146ab578931e0e2826ee39d0c948d0ea72734378f1898939d18bc9c823fcf9"}, - {file = "pyzmq-25.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:831ba20b660b39e39e5ac8603e8193f8fce1ee03a42c84ade89c36a251449d80"}, - {file = "pyzmq-25.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3a522510e3434e12aff80187144c6df556bb06fe6b9d01b2ecfbd2b5bfa5c60c"}, - {file = "pyzmq-25.1.0-cp310-cp310-win32.whl", hash = "sha256:be24a5867b8e3b9dd5c241de359a9a5217698ff616ac2daa47713ba2ebe30ad1"}, - {file = "pyzmq-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:5693dcc4f163481cf79e98cf2d7995c60e43809e325b77a7748d8024b1b7bcba"}, - {file = "pyzmq-25.1.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:13bbe36da3f8aaf2b7ec12696253c0bf6ffe05f4507985a8844a1081db6ec22d"}, - {file = "pyzmq-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:69511d604368f3dc58d4be1b0bad99b61ee92b44afe1cd9b7bd8c5e34ea8248a"}, - {file = "pyzmq-25.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a983c8694667fd76d793ada77fd36c8317e76aa66eec75be2653cef2ea72883"}, - {file = "pyzmq-25.1.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:332616f95eb400492103ab9d542b69d5f0ff628b23129a4bc0a2fd48da6e4e0b"}, - {file = "pyzmq-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58416db767787aedbfd57116714aad6c9ce57215ffa1c3758a52403f7c68cff5"}, - {file = "pyzmq-25.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:cad9545f5801a125f162d09ec9b724b7ad9b6440151b89645241d0120e119dcc"}, - {file = "pyzmq-25.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d6128d431b8dfa888bf51c22a04d48bcb3d64431caf02b3cb943269f17fd2994"}, - {file = "pyzmq-25.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:2b15247c49d8cbea695b321ae5478d47cffd496a2ec5ef47131a9e79ddd7e46c"}, - {file = "pyzmq-25.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:442d3efc77ca4d35bee3547a8e08e8d4bb88dadb54a8377014938ba98d2e074a"}, - {file = "pyzmq-25.1.0-cp311-cp311-win32.whl", hash = "sha256:65346f507a815a731092421d0d7d60ed551a80d9b75e8b684307d435a5597425"}, - {file = "pyzmq-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:8b45d722046fea5a5694cba5d86f21f78f0052b40a4bbbbf60128ac55bfcc7b6"}, - {file = "pyzmq-25.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f45808eda8b1d71308c5416ef3abe958f033fdbb356984fabbfc7887bed76b3f"}, - {file = "pyzmq-25.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b697774ea8273e3c0460cf0bba16cd85ca6c46dfe8b303211816d68c492e132"}, - {file = "pyzmq-25.1.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b324fa769577fc2c8f5efcd429cef5acbc17d63fe15ed16d6dcbac2c5eb00849"}, - {file = "pyzmq-25.1.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:5873d6a60b778848ce23b6c0ac26c39e48969823882f607516b91fb323ce80e5"}, - {file = "pyzmq-25.1.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:f0d9e7ba6a815a12c8575ba7887da4b72483e4cfc57179af10c9b937f3f9308f"}, - {file = "pyzmq-25.1.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:414b8beec76521358b49170db7b9967d6974bdfc3297f47f7d23edec37329b00"}, - {file = "pyzmq-25.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:01f06f33e12497dca86353c354461f75275a5ad9eaea181ac0dc1662da8074fa"}, - {file = "pyzmq-25.1.0-cp36-cp36m-win32.whl", hash = "sha256:b5a07c4f29bf7cb0164664ef87e4aa25435dcc1f818d29842118b0ac1eb8e2b5"}, - {file = "pyzmq-25.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:968b0c737797c1809ec602e082cb63e9824ff2329275336bb88bd71591e94a90"}, - {file = "pyzmq-25.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:47b915ba666c51391836d7ed9a745926b22c434efa76c119f77bcffa64d2c50c"}, - {file = "pyzmq-25.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5af31493663cf76dd36b00dafbc839e83bbca8a0662931e11816d75f36155897"}, - {file = "pyzmq-25.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5489738a692bc7ee9a0a7765979c8a572520d616d12d949eaffc6e061b82b4d1"}, - {file = "pyzmq-25.1.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1fc56a0221bdf67cfa94ef2d6ce5513a3d209c3dfd21fed4d4e87eca1822e3a3"}, - {file = "pyzmq-25.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:75217e83faea9edbc29516fc90c817bc40c6b21a5771ecb53e868e45594826b0"}, - {file = "pyzmq-25.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3830be8826639d801de9053cf86350ed6742c4321ba4236e4b5568528d7bfed7"}, - {file = "pyzmq-25.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3575699d7fd7c9b2108bc1c6128641a9a825a58577775ada26c02eb29e09c517"}, - {file = "pyzmq-25.1.0-cp37-cp37m-win32.whl", hash = "sha256:95bd3a998d8c68b76679f6b18f520904af5204f089beebb7b0301d97704634dd"}, - {file = "pyzmq-25.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:dbc466744a2db4b7ca05589f21ae1a35066afada2f803f92369f5877c100ef62"}, - {file = "pyzmq-25.1.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:3bed53f7218490c68f0e82a29c92335daa9606216e51c64f37b48eb78f1281f4"}, - {file = "pyzmq-25.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eb52e826d16c09ef87132c6e360e1879c984f19a4f62d8a935345deac43f3c12"}, - {file = "pyzmq-25.1.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ddbef8b53cd16467fdbfa92a712eae46dd066aa19780681a2ce266e88fbc7165"}, - {file = "pyzmq-25.1.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9301cf1d7fc1ddf668d0abbe3e227fc9ab15bc036a31c247276012abb921b5ff"}, - {file = "pyzmq-25.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7e23a8c3b6c06de40bdb9e06288180d630b562db8ac199e8cc535af81f90e64b"}, - {file = "pyzmq-25.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4a82faae00d1eed4809c2f18b37f15ce39a10a1c58fe48b60ad02875d6e13d80"}, - {file = "pyzmq-25.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c8398a1b1951aaa330269c35335ae69744be166e67e0ebd9869bdc09426f3871"}, - {file = "pyzmq-25.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d40682ac60b2a613d36d8d3a0cd14fbdf8e7e0618fbb40aa9fa7b796c9081584"}, - {file = "pyzmq-25.1.0-cp38-cp38-win32.whl", hash = "sha256:33d5c8391a34d56224bccf74f458d82fc6e24b3213fc68165c98b708c7a69325"}, - {file = "pyzmq-25.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:c66b7ff2527e18554030319b1376d81560ca0742c6e0b17ff1ee96624a5f1afd"}, - {file = "pyzmq-25.1.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:af56229ea6527a849ac9fb154a059d7e32e77a8cba27e3e62a1e38d8808cb1a5"}, - {file = "pyzmq-25.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bdca18b94c404af6ae5533cd1bc310c4931f7ac97c148bbfd2cd4bdd62b96253"}, - {file = "pyzmq-25.1.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0b6b42f7055bbc562f63f3df3b63e3dd1ebe9727ff0f124c3aa7bcea7b3a00f9"}, - {file = "pyzmq-25.1.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4c2fc7aad520a97d64ffc98190fce6b64152bde57a10c704b337082679e74f67"}, - {file = "pyzmq-25.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be86a26415a8b6af02cd8d782e3a9ae3872140a057f1cadf0133de685185c02b"}, - {file = "pyzmq-25.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:851fb2fe14036cfc1960d806628b80276af5424db09fe5c91c726890c8e6d943"}, - {file = "pyzmq-25.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2a21fec5c3cea45421a19ccbe6250c82f97af4175bc09de4d6dd78fb0cb4c200"}, - {file = "pyzmq-25.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bad172aba822444b32eae54c2d5ab18cd7dee9814fd5c7ed026603b8cae2d05f"}, - {file = "pyzmq-25.1.0-cp39-cp39-win32.whl", hash = "sha256:4d67609b37204acad3d566bb7391e0ecc25ef8bae22ff72ebe2ad7ffb7847158"}, - {file = "pyzmq-25.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:71c7b5896e40720d30cd77a81e62b433b981005bbff0cb2f739e0f8d059b5d99"}, - {file = "pyzmq-25.1.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4cb27ef9d3bdc0c195b2dc54fcb8720e18b741624686a81942e14c8b67cc61a6"}, - {file = "pyzmq-25.1.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0c4fc2741e0513b5d5a12fe200d6785bbcc621f6f2278893a9ca7bed7f2efb7d"}, - {file = "pyzmq-25.1.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fc34fdd458ff77a2a00e3c86f899911f6f269d393ca5675842a6e92eea565bae"}, - {file = "pyzmq-25.1.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8751f9c1442624da391bbd92bd4b072def6d7702a9390e4479f45c182392ff78"}, - {file = "pyzmq-25.1.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:6581e886aec3135964a302a0f5eb68f964869b9efd1dbafdebceaaf2934f8a68"}, - {file = "pyzmq-25.1.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5482f08d2c3c42b920e8771ae8932fbaa0a67dff925fc476996ddd8155a170f3"}, - {file = "pyzmq-25.1.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5e7fbcafa3ea16d1de1f213c226005fea21ee16ed56134b75b2dede5a2129e62"}, - {file = "pyzmq-25.1.0-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:adecf6d02b1beab8d7c04bc36f22bb0e4c65a35eb0b4750b91693631d4081c70"}, - {file = "pyzmq-25.1.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6d39e42a0aa888122d1beb8ec0d4ddfb6c6b45aecb5ba4013c27e2f28657765"}, - {file = "pyzmq-25.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7018289b402ebf2b2c06992813523de61d4ce17bd514c4339d8f27a6f6809492"}, - {file = "pyzmq-25.1.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9e68ae9864d260b18f311b68d29134d8776d82e7f5d75ce898b40a88df9db30f"}, - {file = "pyzmq-25.1.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e21cc00e4debe8f54c3ed7b9fcca540f46eee12762a9fa56feb8512fd9057161"}, - {file = "pyzmq-25.1.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f666ae327a6899ff560d741681fdcdf4506f990595201ed39b44278c471ad98"}, - {file = "pyzmq-25.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f5efcc29056dfe95e9c9db0dfbb12b62db9c4ad302f812931b6d21dd04a9119"}, - {file = "pyzmq-25.1.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:48e5e59e77c1a83162ab3c163fc01cd2eebc5b34560341a67421b09be0891287"}, - {file = "pyzmq-25.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:108c96ebbd573d929740d66e4c3d1bdf31d5cde003b8dc7811a3c8c5b0fc173b"}, - {file = "pyzmq-25.1.0.tar.gz", hash = "sha256:80c41023465d36280e801564a69cbfce8ae85ff79b080e1913f6e90481fb8957"}, + {file = "pyzmq-25.1.1-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:381469297409c5adf9a0e884c5eb5186ed33137badcbbb0560b86e910a2f1e76"}, + {file = "pyzmq-25.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:955215ed0604dac5b01907424dfa28b40f2b2292d6493445dd34d0dfa72586a8"}, + {file = "pyzmq-25.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:985bbb1316192b98f32e25e7b9958088431d853ac63aca1d2c236f40afb17c83"}, + {file = "pyzmq-25.1.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:afea96f64efa98df4da6958bae37f1cbea7932c35878b185e5982821bc883369"}, + {file = "pyzmq-25.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76705c9325d72a81155bb6ab48d4312e0032bf045fb0754889133200f7a0d849"}, + {file = "pyzmq-25.1.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:77a41c26205d2353a4c94d02be51d6cbdf63c06fbc1295ea57dad7e2d3381b71"}, + {file = "pyzmq-25.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:12720a53e61c3b99d87262294e2b375c915fea93c31fc2336898c26d7aed34cd"}, + {file = "pyzmq-25.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:57459b68e5cd85b0be8184382cefd91959cafe79ae019e6b1ae6e2ba8a12cda7"}, + {file = "pyzmq-25.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:292fe3fc5ad4a75bc8df0dfaee7d0babe8b1f4ceb596437213821f761b4589f9"}, + {file = "pyzmq-25.1.1-cp310-cp310-win32.whl", hash = "sha256:35b5ab8c28978fbbb86ea54958cd89f5176ce747c1fb3d87356cf698048a7790"}, + {file = "pyzmq-25.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:11baebdd5fc5b475d484195e49bae2dc64b94a5208f7c89954e9e354fc609d8f"}, + {file = "pyzmq-25.1.1-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:d20a0ddb3e989e8807d83225a27e5c2eb2260eaa851532086e9e0fa0d5287d83"}, + {file = "pyzmq-25.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e1c1be77bc5fb77d923850f82e55a928f8638f64a61f00ff18a67c7404faf008"}, + {file = "pyzmq-25.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d89528b4943d27029a2818f847c10c2cecc79fa9590f3cb1860459a5be7933eb"}, + {file = "pyzmq-25.1.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:90f26dc6d5f241ba358bef79be9ce06de58d477ca8485e3291675436d3827cf8"}, + {file = "pyzmq-25.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2b92812bd214018e50b6380ea3ac0c8bb01ac07fcc14c5f86a5bb25e74026e9"}, + {file = "pyzmq-25.1.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:2f957ce63d13c28730f7fd6b72333814221c84ca2421298f66e5143f81c9f91f"}, + {file = "pyzmq-25.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:047a640f5c9c6ade7b1cc6680a0e28c9dd5a0825135acbd3569cc96ea00b2505"}, + {file = "pyzmq-25.1.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7f7e58effd14b641c5e4dec8c7dab02fb67a13df90329e61c869b9cc607ef752"}, + {file = "pyzmq-25.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c2910967e6ab16bf6fbeb1f771c89a7050947221ae12a5b0b60f3bca2ee19bca"}, + {file = "pyzmq-25.1.1-cp311-cp311-win32.whl", hash = "sha256:76c1c8efb3ca3a1818b837aea423ff8a07bbf7aafe9f2f6582b61a0458b1a329"}, + {file = "pyzmq-25.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:44e58a0554b21fc662f2712814a746635ed668d0fbc98b7cb9d74cb798d202e6"}, + {file = "pyzmq-25.1.1-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:e1ffa1c924e8c72778b9ccd386a7067cddf626884fd8277f503c48bb5f51c762"}, + {file = "pyzmq-25.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1af379b33ef33757224da93e9da62e6471cf4a66d10078cf32bae8127d3d0d4a"}, + {file = "pyzmq-25.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cff084c6933680d1f8b2f3b4ff5bbb88538a4aac00d199ac13f49d0698727ecb"}, + {file = "pyzmq-25.1.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2400a94f7dd9cb20cd012951a0cbf8249e3d554c63a9c0cdfd5cbb6c01d2dec"}, + {file = "pyzmq-25.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d81f1ddae3858b8299d1da72dd7d19dd36aab654c19671aa8a7e7fb02f6638a"}, + {file = "pyzmq-25.1.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:255ca2b219f9e5a3a9ef3081512e1358bd4760ce77828e1028b818ff5610b87b"}, + {file = "pyzmq-25.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a882ac0a351288dd18ecae3326b8a49d10c61a68b01419f3a0b9a306190baf69"}, + {file = "pyzmq-25.1.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:724c292bb26365659fc434e9567b3f1adbdb5e8d640c936ed901f49e03e5d32e"}, + {file = "pyzmq-25.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ca1ed0bb2d850aa8471387882247c68f1e62a4af0ce9c8a1dbe0d2bf69e41fb"}, + {file = "pyzmq-25.1.1-cp312-cp312-win32.whl", hash = "sha256:b3451108ab861040754fa5208bca4a5496c65875710f76789a9ad27c801a0075"}, + {file = "pyzmq-25.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:eadbefd5e92ef8a345f0525b5cfd01cf4e4cc651a2cffb8f23c0dd184975d787"}, + {file = "pyzmq-25.1.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:db0b2af416ba735c6304c47f75d348f498b92952f5e3e8bff449336d2728795d"}, + {file = "pyzmq-25.1.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7c133e93b405eb0d36fa430c94185bdd13c36204a8635470cccc200723c13bb"}, + {file = "pyzmq-25.1.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:273bc3959bcbff3f48606b28229b4721716598d76b5aaea2b4a9d0ab454ec062"}, + {file = "pyzmq-25.1.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cbc8df5c6a88ba5ae385d8930da02201165408dde8d8322072e3e5ddd4f68e22"}, + {file = "pyzmq-25.1.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:18d43df3f2302d836f2a56f17e5663e398416e9dd74b205b179065e61f1a6edf"}, + {file = "pyzmq-25.1.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:73461eed88a88c866656e08f89299720a38cb4e9d34ae6bf5df6f71102570f2e"}, + {file = "pyzmq-25.1.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:34c850ce7976d19ebe7b9d4b9bb8c9dfc7aac336c0958e2651b88cbd46682123"}, + {file = "pyzmq-25.1.1-cp36-cp36m-win32.whl", hash = "sha256:d2045d6d9439a0078f2a34b57c7b18c4a6aef0bee37f22e4ec9f32456c852c71"}, + {file = "pyzmq-25.1.1-cp36-cp36m-win_amd64.whl", hash = "sha256:458dea649f2f02a0b244ae6aef8dc29325a2810aa26b07af8374dc2a9faf57e3"}, + {file = "pyzmq-25.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7cff25c5b315e63b07a36f0c2bab32c58eafbe57d0dce61b614ef4c76058c115"}, + {file = "pyzmq-25.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1579413ae492b05de5a6174574f8c44c2b9b122a42015c5292afa4be2507f28"}, + {file = "pyzmq-25.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3d0a409d3b28607cc427aa5c30a6f1e4452cc44e311f843e05edb28ab5e36da0"}, + {file = "pyzmq-25.1.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:21eb4e609a154a57c520e3d5bfa0d97e49b6872ea057b7c85257b11e78068222"}, + {file = "pyzmq-25.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:034239843541ef7a1aee0c7b2cb7f6aafffb005ede965ae9cbd49d5ff4ff73cf"}, + {file = "pyzmq-25.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f8115e303280ba09f3898194791a153862cbf9eef722ad8f7f741987ee2a97c7"}, + {file = "pyzmq-25.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:1a5d26fe8f32f137e784f768143728438877d69a586ddeaad898558dc971a5ae"}, + {file = "pyzmq-25.1.1-cp37-cp37m-win32.whl", hash = "sha256:f32260e556a983bc5c7ed588d04c942c9a8f9c2e99213fec11a031e316874c7e"}, + {file = "pyzmq-25.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:abf34e43c531bbb510ae7e8f5b2b1f2a8ab93219510e2b287a944432fad135f3"}, + {file = "pyzmq-25.1.1-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:87e34f31ca8f168c56d6fbf99692cc8d3b445abb5bfd08c229ae992d7547a92a"}, + {file = "pyzmq-25.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c9c6c9b2c2f80747a98f34ef491c4d7b1a8d4853937bb1492774992a120f475d"}, + {file = "pyzmq-25.1.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5619f3f5a4db5dbb572b095ea3cb5cc035335159d9da950830c9c4db2fbb6995"}, + {file = "pyzmq-25.1.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5a34d2395073ef862b4032343cf0c32a712f3ab49d7ec4f42c9661e0294d106f"}, + {file = "pyzmq-25.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25f0e6b78220aba09815cd1f3a32b9c7cb3e02cb846d1cfc526b6595f6046618"}, + {file = "pyzmq-25.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3669cf8ee3520c2f13b2e0351c41fea919852b220988d2049249db10046a7afb"}, + {file = "pyzmq-25.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2d163a18819277e49911f7461567bda923461c50b19d169a062536fffe7cd9d2"}, + {file = "pyzmq-25.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:df27ffddff4190667d40de7beba4a950b5ce78fe28a7dcc41d6f8a700a80a3c0"}, + {file = "pyzmq-25.1.1-cp38-cp38-win32.whl", hash = "sha256:a382372898a07479bd34bda781008e4a954ed8750f17891e794521c3e21c2e1c"}, + {file = "pyzmq-25.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:52533489f28d62eb1258a965f2aba28a82aa747202c8fa5a1c7a43b5db0e85c1"}, + {file = "pyzmq-25.1.1-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:03b3f49b57264909aacd0741892f2aecf2f51fb053e7d8ac6767f6c700832f45"}, + {file = "pyzmq-25.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:330f9e188d0d89080cde66dc7470f57d1926ff2fb5576227f14d5be7ab30b9fa"}, + {file = "pyzmq-25.1.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2ca57a5be0389f2a65e6d3bb2962a971688cbdd30b4c0bd188c99e39c234f414"}, + {file = "pyzmq-25.1.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d457aed310f2670f59cc5b57dcfced452aeeed77f9da2b9763616bd57e4dbaae"}, + {file = "pyzmq-25.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c56d748ea50215abef7030c72b60dd723ed5b5c7e65e7bc2504e77843631c1a6"}, + {file = "pyzmq-25.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8f03d3f0d01cb5a018debeb412441996a517b11c5c17ab2001aa0597c6d6882c"}, + {file = "pyzmq-25.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:820c4a08195a681252f46926de10e29b6bbf3e17b30037bd4250d72dd3ddaab8"}, + {file = "pyzmq-25.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:17ef5f01d25b67ca8f98120d5fa1d21efe9611604e8eb03a5147360f517dd1e2"}, + {file = "pyzmq-25.1.1-cp39-cp39-win32.whl", hash = "sha256:04ccbed567171579ec2cebb9c8a3e30801723c575601f9a990ab25bcac6b51e2"}, + {file = "pyzmq-25.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:e61f091c3ba0c3578411ef505992d356a812fb200643eab27f4f70eed34a29ef"}, + {file = "pyzmq-25.1.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ade6d25bb29c4555d718ac6d1443a7386595528c33d6b133b258f65f963bb0f6"}, + {file = "pyzmq-25.1.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0c95ddd4f6e9fca4e9e3afaa4f9df8552f0ba5d1004e89ef0a68e1f1f9807c7"}, + {file = "pyzmq-25.1.1-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48e466162a24daf86f6b5ca72444d2bf39a5e58da5f96370078be67c67adc978"}, + {file = "pyzmq-25.1.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abc719161780932c4e11aaebb203be3d6acc6b38d2f26c0f523b5b59d2fc1996"}, + {file = "pyzmq-25.1.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:1ccf825981640b8c34ae54231b7ed00271822ea1c6d8ba1090ebd4943759abf5"}, + {file = "pyzmq-25.1.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c2f20ce161ebdb0091a10c9ca0372e023ce24980d0e1f810f519da6f79c60800"}, + {file = "pyzmq-25.1.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:deee9ca4727f53464daf089536e68b13e6104e84a37820a88b0a057b97bba2d2"}, + {file = "pyzmq-25.1.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:aa8d6cdc8b8aa19ceb319aaa2b660cdaccc533ec477eeb1309e2a291eaacc43a"}, + {file = "pyzmq-25.1.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:019e59ef5c5256a2c7378f2fb8560fc2a9ff1d315755204295b2eab96b254d0a"}, + {file = "pyzmq-25.1.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:b9af3757495c1ee3b5c4e945c1df7be95562277c6e5bccc20a39aec50f826cd0"}, + {file = "pyzmq-25.1.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:548d6482dc8aadbe7e79d1b5806585c8120bafa1ef841167bc9090522b610fa6"}, + {file = "pyzmq-25.1.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:057e824b2aae50accc0f9a0570998adc021b372478a921506fddd6c02e60308e"}, + {file = "pyzmq-25.1.1-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2243700cc5548cff20963f0ca92d3e5e436394375ab8a354bbea2b12911b20b0"}, + {file = "pyzmq-25.1.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79986f3b4af059777111409ee517da24a529bdbd46da578b33f25580adcff728"}, + {file = "pyzmq-25.1.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:11d58723d44d6ed4dd677c5615b2ffb19d5c426636345567d6af82be4dff8a55"}, + {file = "pyzmq-25.1.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:49d238cf4b69652257db66d0c623cd3e09b5d2e9576b56bc067a396133a00d4a"}, + {file = "pyzmq-25.1.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fedbdc753827cf014c01dbbee9c3be17e5a208dcd1bf8641ce2cd29580d1f0d4"}, + {file = "pyzmq-25.1.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bc16ac425cc927d0a57d242589f87ee093884ea4804c05a13834d07c20db203c"}, + {file = "pyzmq-25.1.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11c1d2aed9079c6b0c9550a7257a836b4a637feb334904610f06d70eb44c56d2"}, + {file = "pyzmq-25.1.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e8a701123029cc240cea61dd2d16ad57cab4691804143ce80ecd9286b464d180"}, + {file = "pyzmq-25.1.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:61706a6b6c24bdece85ff177fec393545a3191eeda35b07aaa1458a027ad1304"}, + {file = "pyzmq-25.1.1.tar.gz", hash = "sha256:259c22485b71abacdfa8bf79720cd7bcf4b9d128b30ea554f01ae71fdbfdaa23"}, ] [package.dependencies] @@ -3285,14 +3366,14 @@ networkx = ["networkx (>=2.0.0,<3.0.0)"] [[package]] name = "referencing" -version = "0.29.1" +version = "0.30.2" description = "JSON Referencing + Python" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "referencing-0.29.1-py3-none-any.whl", hash = "sha256:d3c8f323ee1480095da44d55917cfb8278d73d6b4d5f677e3e40eb21314ac67f"}, - {file = "referencing-0.29.1.tar.gz", hash = "sha256:90cb53782d550ba28d2166ef3f55731f38397def8832baac5d45235f1995e35e"}, + {file = "referencing-0.30.2-py3-none-any.whl", hash = "sha256:449b6669b6121a9e96a7f9e410b245d471e8d48964c67113ce9afe50c8dd7bdf"}, + {file = "referencing-0.30.2.tar.gz", hash = "sha256:794ad8003c65938edcdbc027f1933215e0d0ccc0291e3ce20a4d87432b59efc0"}, ] [package.dependencies] @@ -3301,100 +3382,100 @@ rpds-py = ">=0.7.0" [[package]] name = "regex" -version = "2023.6.3" +version = "2023.10.3" description = "Alternative regular expression module, to replace re." category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "regex-2023.6.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:824bf3ac11001849aec3fa1d69abcb67aac3e150a933963fb12bda5151fe1bfd"}, - {file = "regex-2023.6.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:05ed27acdf4465c95826962528f9e8d41dbf9b1aa8531a387dee6ed215a3e9ef"}, - {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b49c764f88a79160fa64f9a7b425620e87c9f46095ef9c9920542ab2495c8bc"}, - {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8e3f1316c2293e5469f8f09dc2d76efb6c3982d3da91ba95061a7e69489a14ef"}, - {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43e1dd9d12df9004246bacb79a0e5886b3b6071b32e41f83b0acbf293f820ee8"}, - {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4959e8bcbfda5146477d21c3a8ad81b185cd252f3d0d6e4724a5ef11c012fb06"}, - {file = "regex-2023.6.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:af4dd387354dc83a3bff67127a124c21116feb0d2ef536805c454721c5d7993d"}, - {file = "regex-2023.6.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2239d95d8e243658b8dbb36b12bd10c33ad6e6933a54d36ff053713f129aa536"}, - {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:890e5a11c97cf0d0c550eb661b937a1e45431ffa79803b942a057c4fb12a2da2"}, - {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a8105e9af3b029f243ab11ad47c19b566482c150c754e4c717900a798806b222"}, - {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:25be746a8ec7bc7b082783216de8e9473803706723b3f6bef34b3d0ed03d57e2"}, - {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:3676f1dd082be28b1266c93f618ee07741b704ab7b68501a173ce7d8d0d0ca18"}, - {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:10cb847aeb1728412c666ab2e2000ba6f174f25b2bdc7292e7dd71b16db07568"}, - {file = "regex-2023.6.3-cp310-cp310-win32.whl", hash = "sha256:dbbbfce33cd98f97f6bffb17801b0576e653f4fdb1d399b2ea89638bc8d08ae1"}, - {file = "regex-2023.6.3-cp310-cp310-win_amd64.whl", hash = "sha256:c5f8037000eb21e4823aa485149f2299eb589f8d1fe4b448036d230c3f4e68e0"}, - {file = "regex-2023.6.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c123f662be8ec5ab4ea72ea300359023a5d1df095b7ead76fedcd8babbedf969"}, - {file = "regex-2023.6.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9edcbad1f8a407e450fbac88d89e04e0b99a08473f666a3f3de0fd292badb6aa"}, - {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcba6dae7de533c876255317c11f3abe4907ba7d9aa15d13e3d9710d4315ec0e"}, - {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29cdd471ebf9e0f2fb3cac165efedc3c58db841d83a518b082077e612d3ee5df"}, - {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:12b74fbbf6cbbf9dbce20eb9b5879469e97aeeaa874145517563cca4029db65c"}, - {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c29ca1bd61b16b67be247be87390ef1d1ef702800f91fbd1991f5c4421ebae8"}, - {file = "regex-2023.6.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77f09bc4b55d4bf7cc5eba785d87001d6757b7c9eec237fe2af57aba1a071d9"}, - {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ea353ecb6ab5f7e7d2f4372b1e779796ebd7b37352d290096978fea83c4dba0c"}, - {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:10590510780b7541969287512d1b43f19f965c2ece6c9b1c00fc367b29d8dce7"}, - {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e2fbd6236aae3b7f9d514312cdb58e6494ee1c76a9948adde6eba33eb1c4264f"}, - {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:6b2675068c8b56f6bfd5a2bda55b8accbb96c02fd563704732fd1c95e2083461"}, - {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:74419d2b50ecb98360cfaa2974da8689cb3b45b9deff0dcf489c0d333bcc1477"}, - {file = "regex-2023.6.3-cp311-cp311-win32.whl", hash = "sha256:fb5ec16523dc573a4b277663a2b5a364e2099902d3944c9419a40ebd56a118f9"}, - {file = "regex-2023.6.3-cp311-cp311-win_amd64.whl", hash = "sha256:09e4a1a6acc39294a36b7338819b10baceb227f7f7dbbea0506d419b5a1dd8af"}, - {file = "regex-2023.6.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0654bca0cdf28a5956c83839162692725159f4cda8d63e0911a2c0dc76166525"}, - {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:463b6a3ceb5ca952e66550a4532cef94c9a0c80dc156c4cc343041951aec1697"}, - {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87b2a5bb5e78ee0ad1de71c664d6eb536dc3947a46a69182a90f4410f5e3f7dd"}, - {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6343c6928282c1f6a9db41f5fd551662310e8774c0e5ebccb767002fcf663ca9"}, - {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6192d5af2ccd2a38877bfef086d35e6659566a335b1492786ff254c168b1693"}, - {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74390d18c75054947e4194019077e243c06fbb62e541d8817a0fa822ea310c14"}, - {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:742e19a90d9bb2f4a6cf2862b8b06dea5e09b96c9f2df1779e53432d7275331f"}, - {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8abbc5d54ea0ee80e37fef009e3cec5dafd722ed3c829126253d3e22f3846f1e"}, - {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:c2b867c17a7a7ae44c43ebbeb1b5ff406b3e8d5b3e14662683e5e66e6cc868d3"}, - {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:d831c2f8ff278179705ca59f7e8524069c1a989e716a1874d6d1aab6119d91d1"}, - {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ee2d1a9a253b1729bb2de27d41f696ae893507c7db224436abe83ee25356f5c1"}, - {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:61474f0b41fe1a80e8dfa70f70ea1e047387b7cd01c85ec88fa44f5d7561d787"}, - {file = "regex-2023.6.3-cp36-cp36m-win32.whl", hash = "sha256:0b71e63226e393b534105fcbdd8740410dc6b0854c2bfa39bbda6b0d40e59a54"}, - {file = "regex-2023.6.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bbb02fd4462f37060122e5acacec78e49c0fbb303c30dd49c7f493cf21fc5b27"}, - {file = "regex-2023.6.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b862c2b9d5ae38a68b92e215b93f98d4c5e9454fa36aae4450f61dd33ff48487"}, - {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:976d7a304b59ede34ca2921305b57356694f9e6879db323fd90a80f865d355a3"}, - {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:83320a09188e0e6c39088355d423aa9d056ad57a0b6c6381b300ec1a04ec3d16"}, - {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9427a399501818a7564f8c90eced1e9e20709ece36be701f394ada99890ea4b3"}, - {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178bbc1b2ec40eaca599d13c092079bf529679bf0371c602edaa555e10b41c3"}, - {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:837328d14cde912af625d5f303ec29f7e28cdab588674897baafaf505341f2fc"}, - {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2d44dc13229905ae96dd2ae2dd7cebf824ee92bc52e8cf03dcead37d926da019"}, - {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d54af539295392611e7efbe94e827311eb8b29668e2b3f4cadcfe6f46df9c777"}, - {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:7117d10690c38a622e54c432dfbbd3cbd92f09401d622902c32f6d377e2300ee"}, - {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bb60b503ec8a6e4e3e03a681072fa3a5adcbfa5479fa2d898ae2b4a8e24c4591"}, - {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:65ba8603753cec91c71de423a943ba506363b0e5c3fdb913ef8f9caa14b2c7e0"}, - {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:271f0bdba3c70b58e6f500b205d10a36fb4b58bd06ac61381b68de66442efddb"}, - {file = "regex-2023.6.3-cp37-cp37m-win32.whl", hash = "sha256:9beb322958aaca059f34975b0df135181f2e5d7a13b84d3e0e45434749cb20f7"}, - {file = "regex-2023.6.3-cp37-cp37m-win_amd64.whl", hash = "sha256:fea75c3710d4f31389eed3c02f62d0b66a9da282521075061ce875eb5300cf23"}, - {file = "regex-2023.6.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8f56fcb7ff7bf7404becdfc60b1e81a6d0561807051fd2f1860b0d0348156a07"}, - {file = "regex-2023.6.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d2da3abc88711bce7557412310dfa50327d5769a31d1c894b58eb256459dc289"}, - {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a99b50300df5add73d307cf66abea093304a07eb017bce94f01e795090dea87c"}, - {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5708089ed5b40a7b2dc561e0c8baa9535b77771b64a8330b684823cfd5116036"}, - {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:687ea9d78a4b1cf82f8479cab23678aff723108df3edeac098e5b2498879f4a7"}, - {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d3850beab9f527f06ccc94b446c864059c57651b3f911fddb8d9d3ec1d1b25d"}, - {file = "regex-2023.6.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8915cc96abeb8983cea1df3c939e3c6e1ac778340c17732eb63bb96247b91d2"}, - {file = "regex-2023.6.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:841d6e0e5663d4c7b4c8099c9997be748677d46cbf43f9f471150e560791f7ff"}, - {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9edce5281f965cf135e19840f4d93d55b3835122aa76ccacfd389e880ba4cf82"}, - {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b956231ebdc45f5b7a2e1f90f66a12be9610ce775fe1b1d50414aac1e9206c06"}, - {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:36efeba71c6539d23c4643be88295ce8c82c88bbd7c65e8a24081d2ca123da3f"}, - {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:cf67ca618b4fd34aee78740bea954d7c69fdda419eb208c2c0c7060bb822d747"}, - {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b4598b1897837067a57b08147a68ac026c1e73b31ef6e36deeeb1fa60b2933c9"}, - {file = "regex-2023.6.3-cp38-cp38-win32.whl", hash = "sha256:f415f802fbcafed5dcc694c13b1292f07fe0befdb94aa8a52905bd115ff41e88"}, - {file = "regex-2023.6.3-cp38-cp38-win_amd64.whl", hash = "sha256:d4f03bb71d482f979bda92e1427f3ec9b220e62a7dd337af0aa6b47bf4498f72"}, - {file = "regex-2023.6.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ccf91346b7bd20c790310c4147eee6ed495a54ddb6737162a36ce9dbef3e4751"}, - {file = "regex-2023.6.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b28f5024a3a041009eb4c333863d7894d191215b39576535c6734cd88b0fcb68"}, - {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0bb18053dfcfed432cc3ac632b5e5e5c5b7e55fb3f8090e867bfd9b054dbcbf"}, - {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a5bfb3004f2144a084a16ce19ca56b8ac46e6fd0651f54269fc9e230edb5e4a"}, - {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c6b48d0fa50d8f4df3daf451be7f9689c2bde1a52b1225c5926e3f54b6a9ed1"}, - {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:051da80e6eeb6e239e394ae60704d2b566aa6a7aed6f2890a7967307267a5dc6"}, - {file = "regex-2023.6.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4c3b7fa4cdaa69268748665a1a6ff70c014d39bb69c50fda64b396c9116cf77"}, - {file = "regex-2023.6.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:457b6cce21bee41ac292d6753d5e94dcbc5c9e3e3a834da285b0bde7aa4a11e9"}, - {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:aad51907d74fc183033ad796dd4c2e080d1adcc4fd3c0fd4fd499f30c03011cd"}, - {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0385e73da22363778ef2324950e08b689abdf0b108a7d8decb403ad7f5191938"}, - {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c6a57b742133830eec44d9b2290daf5cbe0a2f1d6acee1b3c7b1c7b2f3606df7"}, - {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3e5219bf9e75993d73ab3d25985c857c77e614525fac9ae02b1bebd92f7cecac"}, - {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e5087a3c59eef624a4591ef9eaa6e9a8d8a94c779dade95d27c0bc24650261cd"}, - {file = "regex-2023.6.3-cp39-cp39-win32.whl", hash = "sha256:20326216cc2afe69b6e98528160b225d72f85ab080cbdf0b11528cbbaba2248f"}, - {file = "regex-2023.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:bdff5eab10e59cf26bc479f565e25ed71a7d041d1ded04ccf9aee1d9f208487a"}, - {file = "regex-2023.6.3.tar.gz", hash = "sha256:72d1a25bf36d2050ceb35b517afe13864865268dfb45910e2e17a84be6cbfeb0"}, + {file = "regex-2023.10.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4c34d4f73ea738223a094d8e0ffd6d2c1a1b4c175da34d6b0de3d8d69bee6bcc"}, + {file = "regex-2023.10.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8f4e49fc3ce020f65411432183e6775f24e02dff617281094ba6ab079ef0915"}, + {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4cd1bccf99d3ef1ab6ba835308ad85be040e6a11b0977ef7ea8c8005f01a3c29"}, + {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:81dce2ddc9f6e8f543d94b05d56e70d03a0774d32f6cca53e978dc01e4fc75b8"}, + {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c6b4d23c04831e3ab61717a707a5d763b300213db49ca680edf8bf13ab5d91b"}, + {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c15ad0aee158a15e17e0495e1e18741573d04eb6da06d8b84af726cfc1ed02ee"}, + {file = "regex-2023.10.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6239d4e2e0b52c8bd38c51b760cd870069f0bdf99700a62cd509d7a031749a55"}, + {file = "regex-2023.10.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4a8bf76e3182797c6b1afa5b822d1d5802ff30284abe4599e1247be4fd6b03be"}, + {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9c727bbcf0065cbb20f39d2b4f932f8fa1631c3e01fcedc979bd4f51fe051c5"}, + {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3ccf2716add72f80714b9a63899b67fa711b654be3fcdd34fa391d2d274ce767"}, + {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:107ac60d1bfdc3edb53be75e2a52aff7481b92817cfdddd9b4519ccf0e54a6ff"}, + {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:00ba3c9818e33f1fa974693fb55d24cdc8ebafcb2e4207680669d8f8d7cca79a"}, + {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f0a47efb1dbef13af9c9a54a94a0b814902e547b7f21acb29434504d18f36e3a"}, + {file = "regex-2023.10.3-cp310-cp310-win32.whl", hash = "sha256:36362386b813fa6c9146da6149a001b7bd063dabc4d49522a1f7aa65b725c7ec"}, + {file = "regex-2023.10.3-cp310-cp310-win_amd64.whl", hash = "sha256:c65a3b5330b54103e7d21cac3f6bf3900d46f6d50138d73343d9e5b2900b2353"}, + {file = "regex-2023.10.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:90a79bce019c442604662d17bf69df99090e24cdc6ad95b18b6725c2988a490e"}, + {file = "regex-2023.10.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c7964c2183c3e6cce3f497e3a9f49d182e969f2dc3aeeadfa18945ff7bdd7051"}, + {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ef80829117a8061f974b2fda8ec799717242353bff55f8a29411794d635d964"}, + {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5addc9d0209a9afca5fc070f93b726bf7003bd63a427f65ef797a931782e7edc"}, + {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c148bec483cc4b421562b4bcedb8e28a3b84fcc8f0aa4418e10898f3c2c0eb9b"}, + {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d1f21af4c1539051049796a0f50aa342f9a27cde57318f2fc41ed50b0dbc4ac"}, + {file = "regex-2023.10.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b9ac09853b2a3e0d0082104036579809679e7715671cfbf89d83c1cb2a30f58"}, + {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ebedc192abbc7fd13c5ee800e83a6df252bec691eb2c4bedc9f8b2e2903f5e2a"}, + {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d8a993c0a0ffd5f2d3bda23d0cd75e7086736f8f8268de8a82fbc4bd0ac6791e"}, + {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:be6b7b8d42d3090b6c80793524fa66c57ad7ee3fe9722b258aec6d0672543fd0"}, + {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4023e2efc35a30e66e938de5aef42b520c20e7eda7bb5fb12c35e5d09a4c43f6"}, + {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0d47840dc05e0ba04fe2e26f15126de7c755496d5a8aae4a08bda4dd8d646c54"}, + {file = "regex-2023.10.3-cp311-cp311-win32.whl", hash = "sha256:9145f092b5d1977ec8c0ab46e7b3381b2fd069957b9862a43bd383e5c01d18c2"}, + {file = "regex-2023.10.3-cp311-cp311-win_amd64.whl", hash = "sha256:b6104f9a46bd8743e4f738afef69b153c4b8b592d35ae46db07fc28ae3d5fb7c"}, + {file = "regex-2023.10.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:bff507ae210371d4b1fe316d03433ac099f184d570a1a611e541923f78f05037"}, + {file = "regex-2023.10.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:be5e22bbb67924dea15039c3282fa4cc6cdfbe0cbbd1c0515f9223186fc2ec5f"}, + {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a992f702c9be9c72fa46f01ca6e18d131906a7180950958f766c2aa294d4b41"}, + {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7434a61b158be563c1362d9071358f8ab91b8d928728cd2882af060481244c9e"}, + {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2169b2dcabf4e608416f7f9468737583ce5f0a6e8677c4efbf795ce81109d7c"}, + {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9e908ef5889cda4de038892b9accc36d33d72fb3e12c747e2799a0e806ec841"}, + {file = "regex-2023.10.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12bd4bc2c632742c7ce20db48e0d99afdc05e03f0b4c1af90542e05b809a03d9"}, + {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bc72c231f5449d86d6c7d9cc7cd819b6eb30134bb770b8cfdc0765e48ef9c420"}, + {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bce8814b076f0ce5766dc87d5a056b0e9437b8e0cd351b9a6c4e1134a7dfbda9"}, + {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:ba7cd6dc4d585ea544c1412019921570ebd8a597fabf475acc4528210d7c4a6f"}, + {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b0c7d2f698e83f15228ba41c135501cfe7d5740181d5903e250e47f617eb4292"}, + {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5a8f91c64f390ecee09ff793319f30a0f32492e99f5dc1c72bc361f23ccd0a9a"}, + {file = "regex-2023.10.3-cp312-cp312-win32.whl", hash = "sha256:ad08a69728ff3c79866d729b095872afe1e0557251da4abb2c5faff15a91d19a"}, + {file = "regex-2023.10.3-cp312-cp312-win_amd64.whl", hash = "sha256:39cdf8d141d6d44e8d5a12a8569d5a227f645c87df4f92179bd06e2e2705e76b"}, + {file = "regex-2023.10.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4a3ee019a9befe84fa3e917a2dd378807e423d013377a884c1970a3c2792d293"}, + {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76066d7ff61ba6bf3cb5efe2428fc82aac91802844c022d849a1f0f53820502d"}, + {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfe50b61bab1b1ec260fa7cd91106fa9fece57e6beba05630afe27c71259c59b"}, + {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fd88f373cb71e6b59b7fa597e47e518282455c2734fd4306a05ca219a1991b0"}, + {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3ab05a182c7937fb374f7e946f04fb23a0c0699c0450e9fb02ef567412d2fa3"}, + {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dac37cf08fcf2094159922edc7a2784cfcc5c70f8354469f79ed085f0328ebdf"}, + {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e54ddd0bb8fb626aa1f9ba7b36629564544954fff9669b15da3610c22b9a0991"}, + {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3367007ad1951fde612bf65b0dffc8fd681a4ab98ac86957d16491400d661302"}, + {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:16f8740eb6dbacc7113e3097b0a36065a02e37b47c936b551805d40340fb9971"}, + {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:f4f2ca6df64cbdd27f27b34f35adb640b5d2d77264228554e68deda54456eb11"}, + {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:39807cbcbe406efca2a233884e169d056c35aa7e9f343d4e78665246a332f597"}, + {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7eece6fbd3eae4a92d7c748ae825cbc1ee41a89bb1c3db05b5578ed3cfcfd7cb"}, + {file = "regex-2023.10.3-cp37-cp37m-win32.whl", hash = "sha256:ce615c92d90df8373d9e13acddd154152645c0dc060871abf6bd43809673d20a"}, + {file = "regex-2023.10.3-cp37-cp37m-win_amd64.whl", hash = "sha256:0f649fa32fe734c4abdfd4edbb8381c74abf5f34bc0b3271ce687b23729299ed"}, + {file = "regex-2023.10.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9b98b7681a9437262947f41c7fac567c7e1f6eddd94b0483596d320092004533"}, + {file = "regex-2023.10.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:91dc1d531f80c862441d7b66c4505cd6ea9d312f01fb2f4654f40c6fdf5cc37a"}, + {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82fcc1f1cc3ff1ab8a57ba619b149b907072e750815c5ba63e7aa2e1163384a4"}, + {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7979b834ec7a33aafae34a90aad9f914c41fd6eaa8474e66953f3f6f7cbd4368"}, + {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef71561f82a89af6cfcbee47f0fabfdb6e63788a9258e913955d89fdd96902ab"}, + {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd829712de97753367153ed84f2de752b86cd1f7a88b55a3a775eb52eafe8a94"}, + {file = "regex-2023.10.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00e871d83a45eee2f8688d7e6849609c2ca2a04a6d48fba3dff4deef35d14f07"}, + {file = "regex-2023.10.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:706e7b739fdd17cb89e1fbf712d9dc21311fc2333f6d435eac2d4ee81985098c"}, + {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cc3f1c053b73f20c7ad88b0d1d23be7e7b3901229ce89f5000a8399746a6e039"}, + {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6f85739e80d13644b981a88f529d79c5bdf646b460ba190bffcaf6d57b2a9863"}, + {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:741ba2f511cc9626b7561a440f87d658aabb3d6b744a86a3c025f866b4d19e7f"}, + {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e77c90ab5997e85901da85131fd36acd0ed2221368199b65f0d11bca44549711"}, + {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:979c24cbefaf2420c4e377ecd1f165ea08cc3d1fbb44bdc51bccbbf7c66a2cb4"}, + {file = "regex-2023.10.3-cp38-cp38-win32.whl", hash = "sha256:58837f9d221744d4c92d2cf7201c6acd19623b50c643b56992cbd2b745485d3d"}, + {file = "regex-2023.10.3-cp38-cp38-win_amd64.whl", hash = "sha256:c55853684fe08d4897c37dfc5faeff70607a5f1806c8be148f1695be4a63414b"}, + {file = "regex-2023.10.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2c54e23836650bdf2c18222c87f6f840d4943944146ca479858404fedeb9f9af"}, + {file = "regex-2023.10.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:69c0771ca5653c7d4b65203cbfc5e66db9375f1078689459fe196fe08b7b4930"}, + {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ac965a998e1388e6ff2e9781f499ad1eaa41e962a40d11c7823c9952c77123e"}, + {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c0e8fae5b27caa34177bdfa5a960c46ff2f78ee2d45c6db15ae3f64ecadde14"}, + {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6c56c3d47da04f921b73ff9415fbaa939f684d47293f071aa9cbb13c94afc17d"}, + {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ef1e014eed78ab650bef9a6a9cbe50b052c0aebe553fb2881e0453717573f52"}, + {file = "regex-2023.10.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d29338556a59423d9ff7b6eb0cb89ead2b0875e08fe522f3e068b955c3e7b59b"}, + {file = "regex-2023.10.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9c6d0ced3c06d0f183b73d3c5920727268d2201aa0fe6d55c60d68c792ff3588"}, + {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:994645a46c6a740ee8ce8df7911d4aee458d9b1bc5639bc968226763d07f00fa"}, + {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:66e2fe786ef28da2b28e222c89502b2af984858091675044d93cb50e6f46d7af"}, + {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:11175910f62b2b8c055f2b089e0fedd694fe2be3941b3e2633653bc51064c528"}, + {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:06e9abc0e4c9ab4779c74ad99c3fc10d3967d03114449acc2c2762ad4472b8ca"}, + {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fb02e4257376ae25c6dd95a5aec377f9b18c09be6ebdefa7ad209b9137b73d48"}, + {file = "regex-2023.10.3-cp39-cp39-win32.whl", hash = "sha256:3b2c3502603fab52d7619b882c25a6850b766ebd1b18de3df23b2f939360e1bd"}, + {file = "regex-2023.10.3-cp39-cp39-win_amd64.whl", hash = "sha256:adbccd17dcaff65704c856bd29951c58a1bd4b2b0f8ad6b826dbd543fe740988"}, + {file = "regex-2023.10.3.tar.gz", hash = "sha256:3fef4f844d2290ee0ba57addcec17eec9e3df73f10a2748485dfd6a3a188cc0f"}, ] [[package]] @@ -3467,109 +3548,111 @@ files = [ [[package]] name = "rpds-py" -version = "0.8.10" +version = "0.10.6" description = "Python bindings to Rust's persistent data structures (rpds)" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "rpds_py-0.8.10-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:93d06cccae15b3836247319eee7b6f1fdcd6c10dabb4e6d350d27bd0bdca2711"}, - {file = "rpds_py-0.8.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3816a890a6a9e9f1de250afa12ca71c9a7a62f2b715a29af6aaee3aea112c181"}, - {file = "rpds_py-0.8.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7c6304b894546b5a6bdc0fe15761fa53fe87d28527a7142dae8de3c663853e1"}, - {file = "rpds_py-0.8.10-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ad3bfb44c8840fb4be719dc58e229f435e227fbfbe133dc33f34981ff622a8f8"}, - {file = "rpds_py-0.8.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:14f1c356712f66653b777ecd8819804781b23dbbac4eade4366b94944c9e78ad"}, - {file = "rpds_py-0.8.10-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:82bb361cae4d0a627006dadd69dc2f36b7ad5dc1367af9d02e296ec565248b5b"}, - {file = "rpds_py-0.8.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2e3c4f2a8e3da47f850d7ea0d7d56720f0f091d66add889056098c4b2fd576c"}, - {file = "rpds_py-0.8.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:15a90d0ac11b4499171067ae40a220d1ca3cb685ec0acc356d8f3800e07e4cb8"}, - {file = "rpds_py-0.8.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:70bb9c8004b97b4ef7ae56a2aa56dfaa74734a0987c78e7e85f00004ab9bf2d0"}, - {file = "rpds_py-0.8.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d64f9f88d5203274a002b54442cafc9c7a1abff2a238f3e767b70aadf919b451"}, - {file = "rpds_py-0.8.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ccbbd276642788c4376fbe8d4e6c50f0fb4972ce09ecb051509062915891cbf0"}, - {file = "rpds_py-0.8.10-cp310-none-win32.whl", hash = "sha256:fafc0049add8043ad07ab5382ee80d80ed7e3699847f26c9a5cf4d3714d96a84"}, - {file = "rpds_py-0.8.10-cp310-none-win_amd64.whl", hash = "sha256:915031002c86a5add7c6fd4beb601b2415e8a1c956590a5f91d825858e92fe6e"}, - {file = "rpds_py-0.8.10-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:84eb541a44f7a18f07a6bfc48b95240739e93defe1fdfb4f2a295f37837945d7"}, - {file = "rpds_py-0.8.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f59996d0550894affaad8743e97b9b9c98f638b221fac12909210ec3d9294786"}, - {file = "rpds_py-0.8.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9adb5664b78fcfcd830000416c8cc69853ef43cb084d645b3f1f0296edd9bae"}, - {file = "rpds_py-0.8.10-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f96f3f98fbff7af29e9edf9a6584f3c1382e7788783d07ba3721790625caa43e"}, - {file = "rpds_py-0.8.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:376b8de737401050bd12810003d207e824380be58810c031f10ec563ff6aef3d"}, - {file = "rpds_py-0.8.10-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d1c2bc319428d50b3e0fa6b673ab8cc7fa2755a92898db3a594cbc4eeb6d1f7"}, - {file = "rpds_py-0.8.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73a1e48430f418f0ac3dfd87860e4cc0d33ad6c0f589099a298cb53724db1169"}, - {file = "rpds_py-0.8.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:134ec8f14ca7dbc6d9ae34dac632cdd60939fe3734b5d287a69683c037c51acb"}, - {file = "rpds_py-0.8.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4b519bac7c09444dd85280fd60f28c6dde4389c88dddf4279ba9b630aca3bbbe"}, - {file = "rpds_py-0.8.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9cd57981d9fab04fc74438d82460f057a2419974d69a96b06a440822d693b3c0"}, - {file = "rpds_py-0.8.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:69d089c026f6a8b9d64a06ff67dc3be196707b699d7f6ca930c25f00cf5e30d8"}, - {file = "rpds_py-0.8.10-cp311-none-win32.whl", hash = "sha256:220bdcad2d2936f674650d304e20ac480a3ce88a40fe56cd084b5780f1d104d9"}, - {file = "rpds_py-0.8.10-cp311-none-win_amd64.whl", hash = "sha256:6c6a0225b8501d881b32ebf3f5807a08ad3685b5eb5f0a6bfffd3a6e039b2055"}, - {file = "rpds_py-0.8.10-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:e3d0cd3dff0e7638a7b5390f3a53057c4e347f4ef122ee84ed93fc2fb7ea4aa2"}, - {file = "rpds_py-0.8.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d77dff3a5aa5eedcc3da0ebd10ff8e4969bc9541aa3333a8d41715b429e99f47"}, - {file = "rpds_py-0.8.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41c89a366eae49ad9e65ed443a8f94aee762931a1e3723749d72aeac80f5ef2f"}, - {file = "rpds_py-0.8.10-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3793c21494bad1373da517001d0849eea322e9a049a0e4789e50d8d1329df8e7"}, - {file = "rpds_py-0.8.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:805a5f3f05d186c5d50de2e26f765ba7896d0cc1ac5b14ffc36fae36df5d2f10"}, - {file = "rpds_py-0.8.10-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b01b39ad5411563031ea3977bbbc7324d82b088e802339e6296f082f78f6115c"}, - {file = "rpds_py-0.8.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3f1e860be21f3e83011116a65e7310486300e08d9a3028e73e8d13bb6c77292"}, - {file = "rpds_py-0.8.10-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a13c8e56c46474cd5958d525ce6a9996727a83d9335684e41f5192c83deb6c58"}, - {file = "rpds_py-0.8.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:93d99f957a300d7a4ced41615c45aeb0343bb8f067c42b770b505de67a132346"}, - {file = "rpds_py-0.8.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:148b0b38d719c0760e31ce9285a9872972bdd7774969a4154f40c980e5beaca7"}, - {file = "rpds_py-0.8.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3cc5e5b5514796f45f03a568981971b12a3570f3de2e76114f7dc18d4b60a3c4"}, - {file = "rpds_py-0.8.10-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:e8e24b210a4deb5a7744971f8f77393005bae7f873568e37dfd9effe808be7f7"}, - {file = "rpds_py-0.8.10-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b41941583adce4242af003d2a8337b066ba6148ca435f295f31ac6d9e4ea2722"}, - {file = "rpds_py-0.8.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c490204e16bca4f835dba8467869fe7295cdeaa096e4c5a7af97f3454a97991"}, - {file = "rpds_py-0.8.10-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ee45cd1d84beed6cbebc839fd85c2e70a3a1325c8cfd16b62c96e2ffb565eca"}, - {file = "rpds_py-0.8.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a8ca409f1252e1220bf09c57290b76cae2f14723746215a1e0506472ebd7bdf"}, - {file = "rpds_py-0.8.10-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96b293c0498c70162effb13100624c5863797d99df75f2f647438bd10cbf73e4"}, - {file = "rpds_py-0.8.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4627520a02fccbd324b33c7a83e5d7906ec746e1083a9ac93c41ac7d15548c7"}, - {file = "rpds_py-0.8.10-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e39d7ab0c18ac99955b36cd19f43926450baba21e3250f053e0704d6ffd76873"}, - {file = "rpds_py-0.8.10-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ba9f1d1ebe4b63801977cec7401f2d41e888128ae40b5441270d43140efcad52"}, - {file = "rpds_py-0.8.10-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:802f42200d8caf7f25bbb2a6464cbd83e69d600151b7e3b49f49a47fa56b0a38"}, - {file = "rpds_py-0.8.10-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:d19db6ba816e7f59fc806c690918da80a7d186f00247048cd833acdab9b4847b"}, - {file = "rpds_py-0.8.10-cp38-none-win32.whl", hash = "sha256:7947e6e2c2ad68b1c12ee797d15e5f8d0db36331200b0346871492784083b0c6"}, - {file = "rpds_py-0.8.10-cp38-none-win_amd64.whl", hash = "sha256:fa326b3505d5784436d9433b7980171ab2375535d93dd63fbcd20af2b5ca1bb6"}, - {file = "rpds_py-0.8.10-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:7b38a9ac96eeb6613e7f312cd0014de64c3f07000e8bf0004ad6ec153bac46f8"}, - {file = "rpds_py-0.8.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c4d42e83ddbf3445e6514f0aff96dca511421ed0392d9977d3990d9f1ba6753c"}, - {file = "rpds_py-0.8.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b21575031478609db6dbd1f0465e739fe0e7f424a8e7e87610a6c7f68b4eb16"}, - {file = "rpds_py-0.8.10-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:574868858a7ff6011192c023a5289158ed20e3f3b94b54f97210a773f2f22921"}, - {file = "rpds_py-0.8.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae40f4a70a1f40939d66ecbaf8e7edc144fded190c4a45898a8cfe19d8fc85ea"}, - {file = "rpds_py-0.8.10-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37f7ee4dc86db7af3bac6d2a2cedbecb8e57ce4ed081f6464510e537589f8b1e"}, - {file = "rpds_py-0.8.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:695f642a3a5dbd4ad2ffbbacf784716ecd87f1b7a460843b9ddf965ccaeafff4"}, - {file = "rpds_py-0.8.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f43ab4cb04bde6109eb2555528a64dfd8a265cc6a9920a67dcbde13ef53a46c8"}, - {file = "rpds_py-0.8.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a11ab0d97be374efd04f640c04fe5c2d3dabc6dfb998954ea946ee3aec97056d"}, - {file = "rpds_py-0.8.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:92cf5b3ee60eef41f41e1a2cabca466846fb22f37fc580ffbcb934d1bcab225a"}, - {file = "rpds_py-0.8.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ceaac0c603bf5ac2f505a78b2dcab78d3e6b706be6596c8364b64cc613d208d2"}, - {file = "rpds_py-0.8.10-cp39-none-win32.whl", hash = "sha256:dd4f16e57c12c0ae17606c53d1b57d8d1c8792efe3f065a37cb3341340599d49"}, - {file = "rpds_py-0.8.10-cp39-none-win_amd64.whl", hash = "sha256:c03a435d26c3999c2a8642cecad5d1c4d10c961817536af52035f6f4ee2f5dd0"}, - {file = "rpds_py-0.8.10-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:0da53292edafecba5e1d8c1218f99babf2ed0bf1c791d83c0ab5c29b57223068"}, - {file = "rpds_py-0.8.10-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:7d20a8ed227683401cc508e7be58cba90cc97f784ea8b039c8cd01111e6043e0"}, - {file = "rpds_py-0.8.10-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97cab733d303252f7c2f7052bf021a3469d764fc2b65e6dbef5af3cbf89d4892"}, - {file = "rpds_py-0.8.10-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8c398fda6df361a30935ab4c4bccb7f7a3daef2964ca237f607c90e9f3fdf66f"}, - {file = "rpds_py-0.8.10-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2eb4b08c45f8f8d8254cdbfacd3fc5d6b415d64487fb30d7380b0d0569837bf1"}, - {file = "rpds_py-0.8.10-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7dfb1cbb895810fa2b892b68153c17716c6abaa22c7dc2b2f6dcf3364932a1c"}, - {file = "rpds_py-0.8.10-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89c92b74e8bf6f53a6f4995fd52f4bd510c12f103ee62c99e22bc9e05d45583c"}, - {file = "rpds_py-0.8.10-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e9c0683cb35a9b5881b41bc01d5568ffc667910d9dbc632a1fba4e7d59e98773"}, - {file = "rpds_py-0.8.10-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:0eeb2731708207d0fe2619afe6c4dc8cb9798f7de052da891de5f19c0006c315"}, - {file = "rpds_py-0.8.10-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:7495010b658ec5b52835f21d8c8b1a7e52e194c50f095d4223c0b96c3da704b1"}, - {file = "rpds_py-0.8.10-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:c72ebc22e70e04126158c46ba56b85372bc4d54d00d296be060b0db1671638a4"}, - {file = "rpds_py-0.8.10-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:2cd3045e7f6375dda64ed7db1c5136826facb0159ea982f77d9cf6125025bd34"}, - {file = "rpds_py-0.8.10-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:2418cf17d653d24ffb8b75e81f9f60b7ba1b009a23298a433a4720b2a0a17017"}, - {file = "rpds_py-0.8.10-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a2edf8173ac0c7a19da21bc68818be1321998528b5e3f748d6ee90c0ba2a1fd"}, - {file = "rpds_py-0.8.10-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7f29b8c55fd3a2bc48e485e37c4e2df3317f43b5cc6c4b6631c33726f52ffbb3"}, - {file = "rpds_py-0.8.10-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a7d20c1cf8d7b3960c5072c265ec47b3f72a0c608a9a6ee0103189b4f28d531"}, - {file = "rpds_py-0.8.10-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:521fc8861a86ae54359edf53a15a05fabc10593cea7b3357574132f8427a5e5a"}, - {file = "rpds_py-0.8.10-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5c191713e98e7c28800233f039a32a42c1a4f9a001a8a0f2448b07391881036"}, - {file = "rpds_py-0.8.10-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:083df0fafe199371206111583c686c985dddaf95ab3ee8e7b24f1fda54515d09"}, - {file = "rpds_py-0.8.10-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:ed41f3f49507936a6fe7003985ea2574daccfef999775525d79eb67344e23767"}, - {file = "rpds_py-0.8.10-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:2614c2732bf45de5c7f9e9e54e18bc78693fa2f635ae58d2895b7965e470378c"}, - {file = "rpds_py-0.8.10-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:c60528671d9d467009a6ec284582179f6b88651e83367d0ab54cb739021cd7de"}, - {file = "rpds_py-0.8.10-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ee744fca8d1ea822480a2a4e7c5f2e1950745477143668f0b523769426060f29"}, - {file = "rpds_py-0.8.10-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a38b9f526d0d6cbdaa37808c400e3d9f9473ac4ff64d33d9163fd05d243dbd9b"}, - {file = "rpds_py-0.8.10-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60e0e86e870350e03b3e25f9b1dd2c6cc72d2b5f24e070249418320a6f9097b7"}, - {file = "rpds_py-0.8.10-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f53f55a8852f0e49b0fc76f2412045d6ad9d5772251dea8f55ea45021616e7d5"}, - {file = "rpds_py-0.8.10-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c493365d3fad241d52f096e4995475a60a80f4eba4d3ff89b713bc65c2ca9615"}, - {file = "rpds_py-0.8.10-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:300eb606e6b94a7a26f11c8cc8ee59e295c6649bd927f91e1dbd37a4c89430b6"}, - {file = "rpds_py-0.8.10-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a665f6f1a87614d1c3039baf44109094926dedf785e346d8b0a728e9cabd27a"}, - {file = "rpds_py-0.8.10-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:927d784648211447201d4c6f1babddb7971abad922b32257ab74de2f2750fad0"}, - {file = "rpds_py-0.8.10-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:c200b30dd573afa83847bed7e3041aa36a8145221bf0cfdfaa62d974d720805c"}, - {file = "rpds_py-0.8.10-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:08166467258fd0240a1256fce272f689f2360227ee41c72aeea103e9e4f63d2b"}, - {file = "rpds_py-0.8.10-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:996cc95830de9bc22b183661d95559ec6b3cd900ad7bc9154c4cbf5be0c9b734"}, - {file = "rpds_py-0.8.10.tar.gz", hash = "sha256:13e643ce8ad502a0263397362fb887594b49cf84bf518d6038c16f235f2bcea4"}, + {file = "rpds_py-0.10.6-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:6bdc11f9623870d75692cc33c59804b5a18d7b8a4b79ef0b00b773a27397d1f6"}, + {file = "rpds_py-0.10.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:26857f0f44f0e791f4a266595a7a09d21f6b589580ee0585f330aaccccb836e3"}, + {file = "rpds_py-0.10.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7f5e15c953ace2e8dde9824bdab4bec50adb91a5663df08d7d994240ae6fa31"}, + {file = "rpds_py-0.10.6-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:61fa268da6e2e1cd350739bb61011121fa550aa2545762e3dc02ea177ee4de35"}, + {file = "rpds_py-0.10.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c48f3fbc3e92c7dd6681a258d22f23adc2eb183c8cb1557d2fcc5a024e80b094"}, + {file = "rpds_py-0.10.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0503c5b681566e8b722fe8c4c47cce5c7a51f6935d5c7012c4aefe952a35eed"}, + {file = "rpds_py-0.10.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:734c41f9f57cc28658d98270d3436dba65bed0cfc730d115b290e970150c540d"}, + {file = "rpds_py-0.10.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a5d7ed104d158c0042a6a73799cf0eb576dfd5fc1ace9c47996e52320c37cb7c"}, + {file = "rpds_py-0.10.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e3df0bc35e746cce42579826b89579d13fd27c3d5319a6afca9893a9b784ff1b"}, + {file = "rpds_py-0.10.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:73e0a78a9b843b8c2128028864901f55190401ba38aae685350cf69b98d9f7c9"}, + {file = "rpds_py-0.10.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5ed505ec6305abd2c2c9586a7b04fbd4baf42d4d684a9c12ec6110deefe2a063"}, + {file = "rpds_py-0.10.6-cp310-none-win32.whl", hash = "sha256:d97dd44683802000277bbf142fd9f6b271746b4846d0acaf0cefa6b2eaf2a7ad"}, + {file = "rpds_py-0.10.6-cp310-none-win_amd64.whl", hash = "sha256:b455492cab07107bfe8711e20cd920cc96003e0da3c1f91297235b1603d2aca7"}, + {file = "rpds_py-0.10.6-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:e8cdd52744f680346ff8c1ecdad5f4d11117e1724d4f4e1874f3a67598821069"}, + {file = "rpds_py-0.10.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66414dafe4326bca200e165c2e789976cab2587ec71beb80f59f4796b786a238"}, + {file = "rpds_py-0.10.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc435d059f926fdc5b05822b1be4ff2a3a040f3ae0a7bbbe672babb468944722"}, + {file = "rpds_py-0.10.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8e7f2219cb72474571974d29a191714d822e58be1eb171f229732bc6fdedf0ac"}, + {file = "rpds_py-0.10.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3953c6926a63f8ea5514644b7afb42659b505ece4183fdaaa8f61d978754349e"}, + {file = "rpds_py-0.10.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2bb2e4826be25e72013916eecd3d30f66fd076110de09f0e750163b416500721"}, + {file = "rpds_py-0.10.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bf347b495b197992efc81a7408e9a83b931b2f056728529956a4d0858608b80"}, + {file = "rpds_py-0.10.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:102eac53bb0bf0f9a275b438e6cf6904904908562a1463a6fc3323cf47d7a532"}, + {file = "rpds_py-0.10.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40f93086eef235623aa14dbddef1b9fb4b22b99454cb39a8d2e04c994fb9868c"}, + {file = "rpds_py-0.10.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e22260a4741a0e7a206e175232867b48a16e0401ef5bce3c67ca5b9705879066"}, + {file = "rpds_py-0.10.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f4e56860a5af16a0fcfa070a0a20c42fbb2012eed1eb5ceeddcc7f8079214281"}, + {file = "rpds_py-0.10.6-cp311-none-win32.whl", hash = "sha256:0774a46b38e70fdde0c6ded8d6d73115a7c39d7839a164cc833f170bbf539116"}, + {file = "rpds_py-0.10.6-cp311-none-win_amd64.whl", hash = "sha256:4a5ee600477b918ab345209eddafde9f91c0acd931f3776369585a1c55b04c57"}, + {file = "rpds_py-0.10.6-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:5ee97c683eaface61d38ec9a489e353d36444cdebb128a27fe486a291647aff6"}, + {file = "rpds_py-0.10.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0713631d6e2d6c316c2f7b9320a34f44abb644fc487b77161d1724d883662e31"}, + {file = "rpds_py-0.10.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5a53f5998b4bbff1cb2e967e66ab2addc67326a274567697379dd1e326bded7"}, + {file = "rpds_py-0.10.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6a555ae3d2e61118a9d3e549737bb4a56ff0cec88a22bd1dfcad5b4e04759175"}, + {file = "rpds_py-0.10.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:945eb4b6bb8144909b203a88a35e0a03d22b57aefb06c9b26c6e16d72e5eb0f0"}, + {file = "rpds_py-0.10.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:52c215eb46307c25f9fd2771cac8135d14b11a92ae48d17968eda5aa9aaf5071"}, + {file = "rpds_py-0.10.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1b3cd23d905589cb205710b3988fc8f46d4a198cf12862887b09d7aaa6bf9b9"}, + {file = "rpds_py-0.10.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64ccc28683666672d7c166ed465c09cee36e306c156e787acef3c0c62f90da5a"}, + {file = "rpds_py-0.10.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:516a611a2de12fbea70c78271e558f725c660ce38e0006f75139ba337d56b1f6"}, + {file = "rpds_py-0.10.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9ff93d3aedef11f9c4540cf347f8bb135dd9323a2fc705633d83210d464c579d"}, + {file = "rpds_py-0.10.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d858532212f0650be12b6042ff4378dc2efbb7792a286bee4489eaa7ba010586"}, + {file = "rpds_py-0.10.6-cp312-none-win32.whl", hash = "sha256:3c4eff26eddac49d52697a98ea01b0246e44ca82ab09354e94aae8823e8bda02"}, + {file = "rpds_py-0.10.6-cp312-none-win_amd64.whl", hash = "sha256:150eec465dbc9cbca943c8e557a21afdcf9bab8aaabf386c44b794c2f94143d2"}, + {file = "rpds_py-0.10.6-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:cf693eb4a08eccc1a1b636e4392322582db2a47470d52e824b25eca7a3977b53"}, + {file = "rpds_py-0.10.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4134aa2342f9b2ab6c33d5c172e40f9ef802c61bb9ca30d21782f6e035ed0043"}, + {file = "rpds_py-0.10.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e782379c2028a3611285a795b89b99a52722946d19fc06f002f8b53e3ea26ea9"}, + {file = "rpds_py-0.10.6-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f6da6d842195fddc1cd34c3da8a40f6e99e4a113918faa5e60bf132f917c247"}, + {file = "rpds_py-0.10.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b4a9fe992887ac68256c930a2011255bae0bf5ec837475bc6f7edd7c8dfa254e"}, + {file = "rpds_py-0.10.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b788276a3c114e9f51e257f2a6f544c32c02dab4aa7a5816b96444e3f9ffc336"}, + {file = "rpds_py-0.10.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:caa1afc70a02645809c744eefb7d6ee8fef7e2fad170ffdeacca267fd2674f13"}, + {file = "rpds_py-0.10.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bddd4f91eede9ca5275e70479ed3656e76c8cdaaa1b354e544cbcf94c6fc8ac4"}, + {file = "rpds_py-0.10.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:775049dfa63fb58293990fc59473e659fcafd953bba1d00fc5f0631a8fd61977"}, + {file = "rpds_py-0.10.6-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:c6c45a2d2b68c51fe3d9352733fe048291e483376c94f7723458cfd7b473136b"}, + {file = "rpds_py-0.10.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0699ab6b8c98df998c3eacf51a3b25864ca93dab157abe358af46dc95ecd9801"}, + {file = "rpds_py-0.10.6-cp38-none-win32.whl", hash = "sha256:ebdab79f42c5961682654b851f3f0fc68e6cc7cd8727c2ac4ffff955154123c1"}, + {file = "rpds_py-0.10.6-cp38-none-win_amd64.whl", hash = "sha256:24656dc36f866c33856baa3ab309da0b6a60f37d25d14be916bd3e79d9f3afcf"}, + {file = "rpds_py-0.10.6-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:0898173249141ee99ffcd45e3829abe7bcee47d941af7434ccbf97717df020e5"}, + {file = "rpds_py-0.10.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9e9184fa6c52a74a5521e3e87badbf9692549c0fcced47443585876fcc47e469"}, + {file = "rpds_py-0.10.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5752b761902cd15073a527b51de76bbae63d938dc7c5c4ad1e7d8df10e765138"}, + {file = "rpds_py-0.10.6-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:99a57006b4ec39dbfb3ed67e5b27192792ffb0553206a107e4aadb39c5004cd5"}, + {file = "rpds_py-0.10.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09586f51a215d17efdb3a5f090d7cbf1633b7f3708f60a044757a5d48a83b393"}, + {file = "rpds_py-0.10.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e225a6a14ecf44499aadea165299092ab0cba918bb9ccd9304eab1138844490b"}, + {file = "rpds_py-0.10.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2039f8d545f20c4e52713eea51a275e62153ee96c8035a32b2abb772b6fc9e5"}, + {file = "rpds_py-0.10.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:34ad87a831940521d462ac11f1774edf867c34172010f5390b2f06b85dcc6014"}, + {file = "rpds_py-0.10.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dcdc88b6b01015da066da3fb76545e8bb9a6880a5ebf89e0f0b2e3ca557b3ab7"}, + {file = "rpds_py-0.10.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:25860ed5c4e7f5e10c496ea78af46ae8d8468e0be745bd233bab9ca99bfd2647"}, + {file = "rpds_py-0.10.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7854a207ef77319ec457c1eb79c361b48807d252d94348305db4f4b62f40f7f3"}, + {file = "rpds_py-0.10.6-cp39-none-win32.whl", hash = "sha256:e6fcc026a3f27c1282c7ed24b7fcac82cdd70a0e84cc848c0841a3ab1e3dea2d"}, + {file = "rpds_py-0.10.6-cp39-none-win_amd64.whl", hash = "sha256:e98c4c07ee4c4b3acf787e91b27688409d918212dfd34c872201273fdd5a0e18"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:68fe9199184c18d997d2e4293b34327c0009a78599ce703e15cd9a0f47349bba"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:3339eca941568ed52d9ad0f1b8eb9fe0958fa245381747cecf2e9a78a5539c42"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a360cfd0881d36c6dc271992ce1eda65dba5e9368575663de993eeb4523d895f"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:031f76fc87644a234883b51145e43985aa2d0c19b063e91d44379cd2786144f8"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f36a9d751f86455dc5278517e8b65580eeee37d61606183897f122c9e51cef3"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:052a832078943d2b2627aea0d19381f607fe331cc0eb5df01991268253af8417"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:023574366002bf1bd751ebaf3e580aef4a468b3d3c216d2f3f7e16fdabd885ed"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:defa2c0c68734f4a82028c26bcc85e6b92cced99866af118cd6a89b734ad8e0d"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:879fb24304ead6b62dbe5034e7b644b71def53c70e19363f3c3be2705c17a3b4"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:53c43e10d398e365da2d4cc0bcaf0854b79b4c50ee9689652cdc72948e86f487"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:3777cc9dea0e6c464e4b24760664bd8831738cc582c1d8aacf1c3f546bef3f65"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:40578a6469e5d1df71b006936ce95804edb5df47b520c69cf5af264d462f2cbb"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:cf71343646756a072b85f228d35b1d7407da1669a3de3cf47f8bbafe0c8183a4"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10f32b53f424fc75ff7b713b2edb286fdbfc94bf16317890260a81c2c00385dc"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:81de24a1c51cfb32e1fbf018ab0bdbc79c04c035986526f76c33e3f9e0f3356c"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac17044876e64a8ea20ab132080ddc73b895b4abe9976e263b0e30ee5be7b9c2"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e8a78bd4879bff82daef48c14d5d4057f6856149094848c3ed0ecaf49f5aec2"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78ca33811e1d95cac8c2e49cb86c0fb71f4d8409d8cbea0cb495b6dbddb30a55"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c63c3ef43f0b3fb00571cff6c3967cc261c0ebd14a0a134a12e83bdb8f49f21f"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:7fde6d0e00b2fd0dbbb40c0eeec463ef147819f23725eda58105ba9ca48744f4"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:79edd779cfc46b2e15b0830eecd8b4b93f1a96649bcb502453df471a54ce7977"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:9164ec8010327ab9af931d7ccd12ab8d8b5dc2f4c6a16cbdd9d087861eaaefa1"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d29ddefeab1791e3c751e0189d5f4b3dbc0bbe033b06e9c333dca1f99e1d523e"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:30adb75ecd7c2a52f5e76af50644b3e0b5ba036321c390b8e7ec1bb2a16dd43c"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd609fafdcdde6e67a139898196698af37438b035b25ad63704fd9097d9a3482"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6eef672de005736a6efd565577101277db6057f65640a813de6c2707dc69f396"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cf4393c7b41abbf07c88eb83e8af5013606b1cdb7f6bc96b1b3536b53a574b8"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ad857f42831e5b8d41a32437f88d86ead6c191455a3499c4b6d15e007936d4cf"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d7360573f1e046cb3b0dceeb8864025aa78d98be4bb69f067ec1c40a9e2d9df"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d08f63561c8a695afec4975fae445245386d645e3e446e6f260e81663bfd2e38"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:f0f17f2ce0f3529177a5fff5525204fad7b43dd437d017dd0317f2746773443d"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:442626328600bde1d09dc3bb00434f5374948838ce75c41a52152615689f9403"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e9616f5bd2595f7f4a04b67039d890348ab826e943a9bfdbe4938d0eba606971"}, + {file = "rpds_py-0.10.6.tar.gz", hash = "sha256:4ce5a708d65a8dbf3748d2474b580d606b1b9f91b5c6ab2a316e0b0cf7a4ba50"}, ] [[package]] @@ -3608,49 +3691,48 @@ jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] [[package]] name = "ruamel-yaml-clib" -version = "0.2.7" +version = "0.2.8" description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" category = "main" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" files = [ - {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d5859983f26d8cd7bb5c287ef452e8aacc86501487634573d260968f753e1d71"}, - {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:debc87a9516b237d0466a711b18b6ebeb17ba9f391eb7f91c649c5c4ec5006c7"}, - {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:df5828871e6648db72d1c19b4bd24819b80a755c4541d3409f0f7acd0f335c80"}, - {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:efa08d63ef03d079dcae1dfe334f6c8847ba8b645d08df286358b1f5293d24ab"}, - {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-win32.whl", hash = "sha256:763d65baa3b952479c4e972669f679fe490eee058d5aa85da483ebae2009d231"}, - {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:d000f258cf42fec2b1bbf2863c61d7b8918d31ffee905da62dede869254d3b8a"}, - {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:045e0626baf1c52e5527bd5db361bc83180faaba2ff586e763d3d5982a876a9e"}, - {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:1a6391a7cabb7641c32517539ca42cf84b87b667bad38b78d4d42dd23e957c81"}, - {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:9c7617df90c1365638916b98cdd9be833d31d337dbcd722485597b43c4a215bf"}, - {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:41d0f1fa4c6830176eef5b276af04c89320ea616655d01327d5ce65e50575c94"}, - {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-win32.whl", hash = "sha256:f6d3d39611ac2e4f62c3128a9eed45f19a6608670c5a2f4f07f24e8de3441d38"}, - {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:da538167284de58a52109a9b89b8f6a53ff8437dd6dc26d33b57bf6699153122"}, - {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:4b3a93bb9bc662fc1f99c5c3ea8e623d8b23ad22f861eb6fce9377ac07ad6072"}, - {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-macosx_12_0_arm64.whl", hash = "sha256:a234a20ae07e8469da311e182e70ef6b199d0fbeb6c6cc2901204dd87fb867e8"}, - {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:15910ef4f3e537eea7fe45f8a5d19997479940d9196f357152a09031c5be59f3"}, - {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:370445fd795706fd291ab00c9df38a0caed0f17a6fb46b0f607668ecb16ce763"}, - {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-win32.whl", hash = "sha256:ecdf1a604009bd35c674b9225a8fa609e0282d9b896c03dd441a91e5f53b534e"}, - {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-win_amd64.whl", hash = "sha256:f34019dced51047d6f70cb9383b2ae2853b7fc4dce65129a5acd49f4f9256646"}, - {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2aa261c29a5545adfef9296b7e33941f46aa5bbd21164228e833412af4c9c75f"}, - {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f01da5790e95815eb5a8a138508c01c758e5f5bc0ce4286c4f7028b8dd7ac3d0"}, - {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:40d030e2329ce5286d6b231b8726959ebbe0404c92f0a578c0e2482182e38282"}, - {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:c3ca1fbba4ae962521e5eb66d72998b51f0f4d0f608d3c0347a48e1af262efa7"}, - {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-win32.whl", hash = "sha256:7bdb4c06b063f6fd55e472e201317a3bb6cdeeee5d5a38512ea5c01e1acbdd93"}, - {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:be2a7ad8fd8f7442b24323d24ba0b56c51219513cfa45b9ada3b87b76c374d4b"}, - {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:91a789b4aa0097b78c93e3dc4b40040ba55bef518f84a40d4442f713b4094acb"}, - {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:99e77daab5d13a48a4054803d052ff40780278240a902b880dd37a51ba01a307"}, - {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:3243f48ecd450eddadc2d11b5feb08aca941b5cd98c9b1db14b2fd128be8c697"}, - {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:8831a2cedcd0f0927f788c5bdf6567d9dc9cc235646a434986a852af1cb54b4b"}, - {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-win32.whl", hash = "sha256:3110a99e0f94a4a3470ff67fc20d3f96c25b13d24c6980ff841e82bafe827cac"}, - {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:92460ce908546ab69770b2e576e4f99fbb4ce6ab4b245345a3869a0a0410488f"}, - {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5bc0667c1eb8f83a3752b71b9c4ba55ef7c7058ae57022dd9b29065186a113d9"}, - {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:4a4d8d417868d68b979076a9be6a38c676eca060785abaa6709c7b31593c35d1"}, - {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:bf9a6bc4a0221538b1a7de3ed7bca4c93c02346853f44e1cd764be0023cd3640"}, - {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a7b301ff08055d73223058b5c46c55638917f04d21577c95e00e0c4d79201a6b"}, - {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-win32.whl", hash = "sha256:d5e51e2901ec2366b79f16c2299a03e74ba4531ddcfacc1416639c557aef0ad8"}, - {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:184faeaec61dbaa3cace407cffc5819f7b977e75360e8d5ca19461cd851a5fc5"}, - {file = "ruamel.yaml.clib-0.2.7.tar.gz", hash = "sha256:1f08fd5a2bea9c4180db71678e850b995d2a5f4537be0e94557668cf0f5f9497"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:d92f81886165cb14d7b067ef37e142256f1c6a90a65cd156b063a43da1708cfd"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win32.whl", hash = "sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:b5edda50e5e9e15e54a6a8a0070302b00c518a9d32accc2346ad6c984aacd279"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win32.whl", hash = "sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:7048c338b6c86627afb27faecf418768acb6331fc24cfa56c93e8c9780f815fa"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92"}, + {file = "ruamel.yaml.clib-0.2.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a5aa27bad2bb83670b71683aae140a1f52b0857a2deff56ad3f6c13a017a26ed"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c58ecd827313af6864893e7af0a3bb85fd529f862b6adbefe14643947cfe2942"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f481f16baec5290e45aebdc2a5168ebc6d35189ae6fea7a58787613a25f6e875"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3fcc54cb0c8b811ff66082de1680b4b14cf8a81dce0d4fbf665c2265a81e07a1"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7f67a1ee819dc4562d444bbafb135832b0b909f81cc90f7aa00260968c9ca1b3"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win32.whl", hash = "sha256:75e1ed13e1f9de23c5607fe6bd1aeaae21e523b32d83bb33918245361e9cc51b"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win_amd64.whl", hash = "sha256:3f215c5daf6a9d7bbed4a0a4f760f3113b10e82ff4c5c44bec20a68c8014f675"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:665f58bfd29b167039f714c6998178d27ccd83984084c286110ef26b230f259f"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win32.whl", hash = "sha256:955eae71ac26c1ab35924203fda6220f84dce57d6d7884f189743e2abe3a9fbe"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:9eb5dee2772b0f704ca2e45b1713e4e5198c18f515b52743576d196348f374d3"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win32.whl", hash = "sha256:84b554931e932c46f94ab306913ad7e11bba988104c5cff26d90d03f68258cd5"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win_amd64.whl", hash = "sha256:25ac8c08322002b06fa1d49d1646181f0b2c72f5cbc15a85e80b4c30a544bb15"}, + {file = "ruamel.yaml.clib-0.2.8.tar.gz", hash = "sha256:beb2e0404003de9a4cab9753a8805a8fe9320ee6673136ed7f04255fe60bb512"}, ] [[package]] @@ -3686,31 +3768,37 @@ synapse = ["synapseclient (>=2.7.0,<3.0.0)"] [[package]] name = "scipy" -version = "1.11.1" +version = "1.11.3" description = "Fundamental algorithms for scientific computing in Python" category = "main" optional = false python-versions = "<3.13,>=3.9" files = [ - {file = "scipy-1.11.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:aec8c62fbe52914f9cf28d846cf0401dd80ab80788bbab909434eb336ed07c04"}, - {file = "scipy-1.11.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:3b9963798df1d8a52db41a6fc0e6fa65b1c60e85d73da27ae8bb754de4792481"}, - {file = "scipy-1.11.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e8eb42db36526b130dfbc417609498a6192381abc1975b91e3eb238e0b41c1a"}, - {file = "scipy-1.11.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:366a6a937110d80dca4f63b3f5b00cc89d36f678b2d124a01067b154e692bab1"}, - {file = "scipy-1.11.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:08d957ca82d3535b3b9ba6c8ff355d78fe975271874e2af267cb5add5bd78625"}, - {file = "scipy-1.11.1-cp310-cp310-win_amd64.whl", hash = "sha256:e866514bc2d660608447b6ba95c8900d591f2865c07cca0aa4f7ff3c4ca70f30"}, - {file = "scipy-1.11.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ba94eeef3c9caa4cea7b402a35bb02a5714ee1ee77eb98aca1eed4543beb0f4c"}, - {file = "scipy-1.11.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:512fdc18c65f76dadaca139348e525646d440220d8d05f6d21965b8d4466bccd"}, - {file = "scipy-1.11.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cce154372f0ebe88556ed06d7b196e9c2e0c13080ecb58d0f35062dc7cc28b47"}, - {file = "scipy-1.11.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4bb943010203465ac81efa392e4645265077b4d9e99b66cf3ed33ae12254173"}, - {file = "scipy-1.11.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:249cfa465c379c9bb2c20123001e151ff5e29b351cbb7f9c91587260602c58d0"}, - {file = "scipy-1.11.1-cp311-cp311-win_amd64.whl", hash = "sha256:ffb28e3fa31b9c376d0fb1f74c1f13911c8c154a760312fbee87a21eb21efe31"}, - {file = "scipy-1.11.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:39154437654260a52871dfde852adf1b93b1d1bc5dc0ffa70068f16ec0be2624"}, - {file = "scipy-1.11.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:b588311875c58d1acd4ef17c983b9f1ab5391755a47c3d70b6bd503a45bfaf71"}, - {file = "scipy-1.11.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d51565560565a0307ed06fa0ec4c6f21ff094947d4844d6068ed04400c72d0c3"}, - {file = "scipy-1.11.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b41a0f322b4eb51b078cb3441e950ad661ede490c3aca66edef66f4b37ab1877"}, - {file = "scipy-1.11.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:396fae3f8c12ad14c5f3eb40499fd06a6fef8393a6baa352a652ecd51e74e029"}, - {file = "scipy-1.11.1-cp39-cp39-win_amd64.whl", hash = "sha256:be8c962a821957fdde8c4044efdab7a140c13294997a407eaee777acf63cbf0c"}, - {file = "scipy-1.11.1.tar.gz", hash = "sha256:fb5b492fa035334fd249f0973cc79ecad8b09c604b42a127a677b45a9a3d4289"}, + {file = "scipy-1.11.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:370f569c57e1d888304052c18e58f4a927338eafdaef78613c685ca2ea0d1fa0"}, + {file = "scipy-1.11.3-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:9885e3e4f13b2bd44aaf2a1a6390a11add9f48d5295f7a592393ceb8991577a3"}, + {file = "scipy-1.11.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e04aa19acc324a1a076abb4035dabe9b64badb19f76ad9c798bde39d41025cdc"}, + {file = "scipy-1.11.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e1a8a4657673bfae1e05e1e1d6e94b0cabe5ed0c7c144c8aa7b7dbb774ce5c1"}, + {file = "scipy-1.11.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7abda0e62ef00cde826d441485e2e32fe737bdddee3324e35c0e01dee65e2a88"}, + {file = "scipy-1.11.3-cp310-cp310-win_amd64.whl", hash = "sha256:033c3fd95d55012dd1148b201b72ae854d5086d25e7c316ec9850de4fe776929"}, + {file = "scipy-1.11.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:925c6f09d0053b1c0f90b2d92d03b261e889b20d1c9b08a3a51f61afc5f58165"}, + {file = "scipy-1.11.3-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:5664e364f90be8219283eeb844323ff8cd79d7acbd64e15eb9c46b9bc7f6a42a"}, + {file = "scipy-1.11.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00f325434b6424952fbb636506f0567898dca7b0f7654d48f1c382ea338ce9a3"}, + {file = "scipy-1.11.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f290cf561a4b4edfe8d1001ee4be6da60c1c4ea712985b58bf6bc62badee221"}, + {file = "scipy-1.11.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:91770cb3b1e81ae19463b3c235bf1e0e330767dca9eb4cd73ba3ded6c4151e4d"}, + {file = "scipy-1.11.3-cp311-cp311-win_amd64.whl", hash = "sha256:e1f97cd89c0fe1a0685f8f89d85fa305deb3067d0668151571ba50913e445820"}, + {file = "scipy-1.11.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:dfcc1552add7cb7c13fb70efcb2389d0624d571aaf2c80b04117e2755a0c5d15"}, + {file = "scipy-1.11.3-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:0d3a136ae1ff0883fffbb1b05b0b2fea251cb1046a5077d0b435a1839b3e52b7"}, + {file = "scipy-1.11.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bae66a2d7d5768eaa33008fa5a974389f167183c87bf39160d3fefe6664f8ddc"}, + {file = "scipy-1.11.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2f6dee6cbb0e263b8142ed587bc93e3ed5e777f1f75448d24fb923d9fd4dce6"}, + {file = "scipy-1.11.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:74e89dc5e00201e71dd94f5f382ab1c6a9f3ff806c7d24e4e90928bb1aafb280"}, + {file = "scipy-1.11.3-cp312-cp312-win_amd64.whl", hash = "sha256:90271dbde4be191522b3903fc97334e3956d7cfb9cce3f0718d0ab4fd7d8bfd6"}, + {file = "scipy-1.11.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a63d1ec9cadecce838467ce0631c17c15c7197ae61e49429434ba01d618caa83"}, + {file = "scipy-1.11.3-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:5305792c7110e32ff155aed0df46aa60a60fc6e52cd4ee02cdeb67eaccd5356e"}, + {file = "scipy-1.11.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ea7f579182d83d00fed0e5c11a4aa5ffe01460444219dedc448a36adf0c3917"}, + {file = "scipy-1.11.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c77da50c9a91e23beb63c2a711ef9e9ca9a2060442757dffee34ea41847d8156"}, + {file = "scipy-1.11.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:15f237e890c24aef6891c7d008f9ff7e758c6ef39a2b5df264650eb7900403c0"}, + {file = "scipy-1.11.3-cp39-cp39-win_amd64.whl", hash = "sha256:4b4bb134c7aa457e26cc6ea482b016fef45db71417d55cc6d8f43d799cdf9ef2"}, + {file = "scipy-1.11.3.tar.gz", hash = "sha256:bba4d955f54edd61899776bad459bf7326e14b9fa1c552181f0479cc60a568cd"}, ] [package.dependencies] @@ -3809,26 +3897,26 @@ files = [ [[package]] name = "soupsieve" -version = "2.4.1" +version = "2.5" description = "A modern CSS selector implementation for Beautiful Soup." category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "soupsieve-2.4.1-py3-none-any.whl", hash = "sha256:1c1bfee6819544a3447586c889157365a27e10d88cde3ad3da0cf0ddf646feb8"}, - {file = "soupsieve-2.4.1.tar.gz", hash = "sha256:89d12b2d5dfcd2c9e8c22326da9d9aa9cb3dfab0a83a024f05704076ee8d35ea"}, + {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, + {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, ] [[package]] name = "sphinx" -version = "7.0.1" +version = "7.2.6" description = "Python documentation generator" category = "main" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "Sphinx-7.0.1.tar.gz", hash = "sha256:61e025f788c5977d9412587e733733a289e2b9fdc2fef8868ddfbfc4ccfe881d"}, - {file = "sphinx-7.0.1-py3-none-any.whl", hash = "sha256:60c5e04756c1709a98845ed27a2eed7a556af3993afb66e77fec48189f742616"}, + {file = "sphinx-7.2.6-py3-none-any.whl", hash = "sha256:1e09160a40b956dc623c910118fa636da93bd3ca0b9876a7b3df90f07d691560"}, + {file = "sphinx-7.2.6.tar.gz", hash = "sha256:9a5160e1ea90688d5963ba09a2dcd8bdd526620edbb65c328728f1b2228d5ab5"}, ] [package.dependencies] @@ -3840,7 +3928,7 @@ imagesize = ">=1.3" importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""} Jinja2 = ">=3.0" packaging = ">=21.0" -Pygments = ">=2.13" +Pygments = ">=2.14" requests = ">=2.25.0" snowballstemmer = ">=2.0" sphinxcontrib-applehelp = "*" @@ -3848,12 +3936,12 @@ sphinxcontrib-devhelp = "*" sphinxcontrib-htmlhelp = ">=2.0.0" sphinxcontrib-jsmath = "*" sphinxcontrib-qthelp = "*" -sphinxcontrib-serializinghtml = ">=1.1.5" +sphinxcontrib-serializinghtml = ">=1.1.9" [package.extras] docs = ["sphinxcontrib-websupport"] lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-simplify", "isort", "mypy (>=0.990)", "ruff", "sphinx-lint", "types-requests"] -test = ["cython", "filelock", "html5lib", "pytest (>=4.6)"] +test = ["cython (>=3.0)", "filelock", "html5lib", "pytest (>=4.6)", "setuptools (>=67.0)"] [[package]] name = "sphinx-click" @@ -3874,48 +3962,57 @@ sphinx = ">=2.0" [[package]] name = "sphinxcontrib-applehelp" -version = "1.0.4" +version = "1.0.7" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" category = "main" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "sphinxcontrib-applehelp-1.0.4.tar.gz", hash = "sha256:828f867945bbe39817c210a1abfd1bc4895c8b73fcaade56d45357a348a07d7e"}, - {file = "sphinxcontrib_applehelp-1.0.4-py3-none-any.whl", hash = "sha256:29d341f67fb0f6f586b23ad80e072c8e6ad0b48417db2bde114a4c9746feb228"}, + {file = "sphinxcontrib_applehelp-1.0.7-py3-none-any.whl", hash = "sha256:094c4d56209d1734e7d252f6e0b3ccc090bd52ee56807a5d9315b19c122ab15d"}, + {file = "sphinxcontrib_applehelp-1.0.7.tar.gz", hash = "sha256:39fdc8d762d33b01a7d8f026a3b7d71563ea3b72787d5f00ad8465bd9d6dfbfa"}, ] +[package.dependencies] +Sphinx = ">=5" + [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] test = ["pytest"] [[package]] name = "sphinxcontrib-devhelp" -version = "1.0.2" -description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." +version = "1.0.5" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" category = "main" optional = false -python-versions = ">=3.5" +python-versions = ">=3.9" files = [ - {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, - {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, + {file = "sphinxcontrib_devhelp-1.0.5-py3-none-any.whl", hash = "sha256:fe8009aed765188f08fcaadbb3ea0d90ce8ae2d76710b7e29ea7d047177dae2f"}, + {file = "sphinxcontrib_devhelp-1.0.5.tar.gz", hash = "sha256:63b41e0d38207ca40ebbeabcf4d8e51f76c03e78cd61abe118cf4435c73d4212"}, ] +[package.dependencies] +Sphinx = ">=5" + [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] test = ["pytest"] [[package]] name = "sphinxcontrib-htmlhelp" -version = "2.0.1" +version = "2.0.4" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" category = "main" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "sphinxcontrib-htmlhelp-2.0.1.tar.gz", hash = "sha256:0cbdd302815330058422b98a113195c9249825d681e18f11e8b1f78a2f11efff"}, - {file = "sphinxcontrib_htmlhelp-2.0.1-py3-none-any.whl", hash = "sha256:c38cb46dccf316c79de6e5515e1770414b797162b23cd3d06e67020e1d2a6903"}, + {file = "sphinxcontrib_htmlhelp-2.0.4-py3-none-any.whl", hash = "sha256:8001661c077a73c29beaf4a79968d0726103c5605e27db92b9ebed8bab1359e9"}, + {file = "sphinxcontrib_htmlhelp-2.0.4.tar.gz", hash = "sha256:6c26a118a05b76000738429b724a0568dbde5b72391a688577da08f11891092a"}, ] +[package.dependencies] +Sphinx = ">=5" + [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] test = ["html5lib", "pytest"] @@ -3937,32 +4034,38 @@ test = ["flake8", "mypy", "pytest"] [[package]] name = "sphinxcontrib-qthelp" -version = "1.0.3" -description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." +version = "1.0.6" +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" category = "main" optional = false -python-versions = ">=3.5" +python-versions = ">=3.9" files = [ - {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, - {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, + {file = "sphinxcontrib_qthelp-1.0.6-py3-none-any.whl", hash = "sha256:bf76886ee7470b934e363da7a954ea2825650013d367728588732c7350f49ea4"}, + {file = "sphinxcontrib_qthelp-1.0.6.tar.gz", hash = "sha256:62b9d1a186ab7f5ee3356d906f648cacb7a6bdb94d201ee7adf26db55092982d"}, ] +[package.dependencies] +Sphinx = ">=5" + [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] test = ["pytest"] [[package]] name = "sphinxcontrib-serializinghtml" -version = "1.1.5" -description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." +version = "1.1.9" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" category = "main" optional = false -python-versions = ">=3.5" +python-versions = ">=3.9" files = [ - {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, - {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, + {file = "sphinxcontrib_serializinghtml-1.1.9-py3-none-any.whl", hash = "sha256:9b36e503703ff04f20e9675771df105e58aa029cfcbc23b8ed716019b7416ae1"}, + {file = "sphinxcontrib_serializinghtml-1.1.9.tar.gz", hash = "sha256:0c64ff898339e1fac29abd2bf5f11078f3ec413cfe9c046d3120d7ca65530b54"}, ] +[package.dependencies] +Sphinx = ">=5" + [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] test = ["pytest"] @@ -4080,14 +4183,14 @@ url = ["furl (>=0.4.1)"] [[package]] name = "stack-data" -version = "0.6.2" +version = "0.6.3" description = "Extract data from python stack frames and tracebacks for informative displays" category = "main" optional = false python-versions = "*" files = [ - {file = "stack_data-0.6.2-py3-none-any.whl", hash = "sha256:cbb2a53eb64e5785878201a97ed7c7b94883f48b87bfb0bbe8b623c74679e4a8"}, - {file = "stack_data-0.6.2.tar.gz", hash = "sha256:32d2dd0376772d01b6cb9fc996f3c8b57a357089dec328ed4b6553d037eaf815"}, + {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, + {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, ] [package.dependencies] @@ -4157,14 +4260,14 @@ widechars = ["wcwidth"] [[package]] name = "tenacity" -version = "8.2.2" +version = "8.2.3" description = "Retry code until it succeeds" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "tenacity-8.2.2-py3-none-any.whl", hash = "sha256:2f277afb21b851637e8f52e6a613ff08734c347dc19ade928e519d7d2d8569b0"}, - {file = "tenacity-8.2.2.tar.gz", hash = "sha256:43af037822bd0029025877f3b2d97cc4d7bb0c2991000a3d59d71517c5c969e0"}, + {file = "tenacity-8.2.3-py3-none-any.whl", hash = "sha256:ce510e327a630c9e1beaf17d42e6ffacc88185044ad85cf74c0a8887c6a0f88c"}, + {file = "tenacity-8.2.3.tar.gz", hash = "sha256:5398ef0d78e63f40007c1fb4c0bff96e1911394d2fa8d194f77619c05ff6cc8a"}, ] [package.extras] @@ -4226,7 +4329,7 @@ files = [ name = "tomli" version = "2.0.1" description = "A lil' TOML parser" -category = "dev" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4236,14 +4339,14 @@ files = [ [[package]] name = "tomlkit" -version = "0.11.8" +version = "0.12.1" description = "Style preserving TOML library" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "tomlkit-0.11.8-py3-none-any.whl", hash = "sha256:8c726c4c202bdb148667835f68d68780b9a003a9ec34167b6c673b38eff2a171"}, - {file = "tomlkit-0.11.8.tar.gz", hash = "sha256:9330fc7faa1db67b541b28e62018c17d20be733177d290a13b24c62d1614e0c3"}, + {file = "tomlkit-0.12.1-py3-none-any.whl", hash = "sha256:712cbd236609acc6a3e2e97253dfc52d4c2082982a88f61b640ecf0817eab899"}, + {file = "tomlkit-0.12.1.tar.gz", hash = "sha256:38e1ff8edb991273ec9f6181244a6a391ac30e9f5098e7535640ea6be97a7c86"}, ] [[package]] @@ -4260,61 +4363,73 @@ files = [ [[package]] name = "tornado" -version = "6.3.2" +version = "6.3.3" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." category = "main" optional = false python-versions = ">= 3.8" files = [ - {file = "tornado-6.3.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:c367ab6c0393d71171123ca5515c61ff62fe09024fa6bf299cd1339dc9456829"}, - {file = "tornado-6.3.2-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:b46a6ab20f5c7c1cb949c72c1994a4585d2eaa0be4853f50a03b5031e964fc7c"}, - {file = "tornado-6.3.2-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c2de14066c4a38b4ecbbcd55c5cc4b5340eb04f1c5e81da7451ef555859c833f"}, - {file = "tornado-6.3.2-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:05615096845cf50a895026f749195bf0b10b8909f9be672f50b0fe69cba368e4"}, - {file = "tornado-6.3.2-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b17b1cf5f8354efa3d37c6e28fdfd9c1c1e5122f2cb56dac121ac61baa47cbe"}, - {file = "tornado-6.3.2-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:29e71c847a35f6e10ca3b5c2990a52ce38b233019d8e858b755ea6ce4dcdd19d"}, - {file = "tornado-6.3.2-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:834ae7540ad3a83199a8da8f9f2d383e3c3d5130a328889e4cc991acc81e87a0"}, - {file = "tornado-6.3.2-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6a0848f1aea0d196a7c4f6772197cbe2abc4266f836b0aac76947872cd29b411"}, - {file = "tornado-6.3.2-cp38-abi3-win32.whl", hash = "sha256:7efcbcc30b7c654eb6a8c9c9da787a851c18f8ccd4a5a3a95b05c7accfa068d2"}, - {file = "tornado-6.3.2-cp38-abi3-win_amd64.whl", hash = "sha256:0c325e66c8123c606eea33084976c832aa4e766b7dff8aedd7587ea44a604cdf"}, - {file = "tornado-6.3.2.tar.gz", hash = "sha256:4b927c4f19b71e627b13f3db2324e4ae660527143f9e1f2e2fb404f3a187e2ba"}, + {file = "tornado-6.3.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:502fba735c84450974fec147340016ad928d29f1e91f49be168c0a4c18181e1d"}, + {file = "tornado-6.3.3-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:805d507b1f588320c26f7f097108eb4023bbaa984d63176d1652e184ba24270a"}, + {file = "tornado-6.3.3-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bd19ca6c16882e4d37368e0152f99c099bad93e0950ce55e71daed74045908f"}, + {file = "tornado-6.3.3-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ac51f42808cca9b3613f51ffe2a965c8525cb1b00b7b2d56828b8045354f76a"}, + {file = "tornado-6.3.3-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71a8db65160a3c55d61839b7302a9a400074c9c753040455494e2af74e2501f2"}, + {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ceb917a50cd35882b57600709dd5421a418c29ddc852da8bcdab1f0db33406b0"}, + {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:7d01abc57ea0dbb51ddfed477dfe22719d376119844e33c661d873bf9c0e4a16"}, + {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:9dc4444c0defcd3929d5c1eb5706cbe1b116e762ff3e0deca8b715d14bf6ec17"}, + {file = "tornado-6.3.3-cp38-abi3-win32.whl", hash = "sha256:65ceca9500383fbdf33a98c0087cb975b2ef3bfb874cb35b8de8740cf7f41bd3"}, + {file = "tornado-6.3.3-cp38-abi3-win_amd64.whl", hash = "sha256:22d3c2fa10b5793da13c807e6fc38ff49a4f6e1e3868b0a6f4164768bb8e20f5"}, + {file = "tornado-6.3.3.tar.gz", hash = "sha256:e7d8db41c0181c80d76c982aacc442c0783a2c54d6400fe028954201a2e032fe"}, ] [[package]] name = "tqdm" -version = "4.65.0" +version = "4.66.1" description = "Fast, Extensible Progress Meter" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "tqdm-4.65.0-py3-none-any.whl", hash = "sha256:c4f53a17fe37e132815abceec022631be8ffe1b9381c2e6e30aa70edc99e9671"}, - {file = "tqdm-4.65.0.tar.gz", hash = "sha256:1871fb68a86b8fb3b59ca4cdd3dcccbc7e6d613eeed31f4c332531977b89beb5"}, + {file = "tqdm-4.66.1-py3-none-any.whl", hash = "sha256:d302b3c5b53d47bce91fea46679d9c3c6508cf6332229aa1e7d8653723793386"}, + {file = "tqdm-4.66.1.tar.gz", hash = "sha256:d88e651f9db8d8551a62556d3cff9e3034274ca5d66e93197cf2490e2dcb69c7"}, ] [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} [package.extras] -dev = ["py-make (>=0.1.0)", "twine", "wheel"] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] notebook = ["ipywidgets (>=6)"] slack = ["slack-sdk"] telegram = ["requests"] [[package]] name = "traitlets" -version = "5.9.0" +version = "5.11.2" description = "Traitlets Python configuration system" category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "traitlets-5.9.0-py3-none-any.whl", hash = "sha256:9e6ec080259b9a5940c797d58b613b5e31441c2257b87c2e795c5228ae80d2d8"}, - {file = "traitlets-5.9.0.tar.gz", hash = "sha256:f6cde21a9c68cf756af02035f72d5a723bf607e862e7be33ece505abf4a3bad9"}, + {file = "traitlets-5.11.2-py3-none-any.whl", hash = "sha256:98277f247f18b2c5cabaf4af369187754f4fb0e85911d473f72329db8a7f4fae"}, + {file = "traitlets-5.11.2.tar.gz", hash = "sha256:7564b5bf8d38c40fa45498072bf4dc5e8346eb087bbf1e2ae2d8774f6a0f078e"}, ] [package.extras] docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"] +test = ["argcomplete (>=3.0.3)", "mypy (>=1.5.1)", "pre-commit", "pytest (>=7.0,<7.5)", "pytest-mock", "pytest-mypy-testing"] + +[[package]] +name = "types-python-dateutil" +version = "2.8.19.14" +description = "Typing stubs for python-dateutil" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "types-python-dateutil-2.8.19.14.tar.gz", hash = "sha256:1f4f10ac98bb8b16ade9dbee3518d9ace017821d94b057a425b069f834737f4b"}, + {file = "types_python_dateutil-2.8.19.14-py3-none-any.whl", hash = "sha256:f977b8de27787639986b4e28963263fd0e5158942b3ecef91b9335c130cb1ce9"}, +] [[package]] name = "typing-extensions" @@ -4358,14 +4473,14 @@ files = [ [[package]] name = "tzlocal" -version = "5.0.1" +version = "5.1" description = "tzinfo object for the local timezone" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "tzlocal-5.0.1-py3-none-any.whl", hash = "sha256:f3596e180296aaf2dbd97d124fe76ae3a0e3d32b258447de7b939b3fd4be992f"}, - {file = "tzlocal-5.0.1.tar.gz", hash = "sha256:46eb99ad4bdb71f3f72b7d24f4267753e240944ecfc16f25d2719ba89827a803"}, + {file = "tzlocal-5.1-py3-none-any.whl", hash = "sha256:2938498395d5f6a898ab8009555cb37a4d360913ad375d4747ef16826b03ef23"}, + {file = "tzlocal-5.1.tar.gz", hash = "sha256:a5ccb2365b295ed964e0a98ad076fe10c495591e75505d34f154d60a7f1ed722"}, ] [package.dependencies] @@ -4403,30 +4518,30 @@ files = [ [[package]] name = "urllib3" -version = "1.26.16" +version = "1.26.17" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "urllib3-1.26.16-py2.py3-none-any.whl", hash = "sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f"}, - {file = "urllib3-1.26.16.tar.gz", hash = "sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14"}, + {file = "urllib3-1.26.17-py2.py3-none-any.whl", hash = "sha256:94a757d178c9be92ef5539b8840d48dc9cf1b2709c9d6b588232a055c524458b"}, + {file = "urllib3-1.26.17.tar.gz", hash = "sha256:24d6a242c28d29af46c3fae832c36db3bbebcc533dd1bb549172cd739c82df21"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "uwsgi" -version = "2.0.21" +version = "2.0.22" description = "The uWSGI server" category = "dev" optional = false python-versions = "*" files = [ - {file = "uwsgi-2.0.21.tar.gz", hash = "sha256:35a30d83791329429bc04fe44183ce4ab512fcf6968070a7bfba42fc5a0552a9"}, + {file = "uwsgi-2.0.22.tar.gz", hash = "sha256:4cc4727258671ac5fa17ab422155e9aaef8a2008ebb86e4404b66deaae965db2"}, ] [[package]] @@ -4448,14 +4563,14 @@ test = ["flake8 (>=2.4.0)", "isort (>=4.2.2)", "pytest (>=2.2.3)"] [[package]] name = "wcwidth" -version = "0.2.6" +version = "0.2.8" description = "Measures the displayed width of unicode strings in a terminal" category = "main" optional = false python-versions = "*" files = [ - {file = "wcwidth-0.2.6-py2.py3-none-any.whl", hash = "sha256:795b138f6875577cd91bba52baf9e445cd5118fd32723b460e30a0af30ea230e"}, - {file = "wcwidth-0.2.6.tar.gz", hash = "sha256:a5220780a404dbe3353789870978e472cfe477761f06ee55077256e509b156d0"}, + {file = "wcwidth-0.2.8-py2.py3-none-any.whl", hash = "sha256:77f719e01648ed600dfa5402c347481c0992263b81a027344f3e1ba25493a704"}, + {file = "wcwidth-0.2.8.tar.gz", hash = "sha256:8705c569999ffbb4f6a87c6d1b80f324bd6db952f5eb0b95bc07517f4c1813d4"}, ] [[package]] @@ -4488,18 +4603,18 @@ files = [ [[package]] name = "websocket-client" -version = "1.6.1" +version = "1.6.4" description = "WebSocket client for Python with low level API options" category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "websocket-client-1.6.1.tar.gz", hash = "sha256:c951af98631d24f8df89ab1019fc365f2227c0892f12fd150e935607c79dd0dd"}, - {file = "websocket_client-1.6.1-py3-none-any.whl", hash = "sha256:f1f9f2ad5291f0225a49efad77abf9e700b6fef553900623060dad6e26503b9d"}, + {file = "websocket-client-1.6.4.tar.gz", hash = "sha256:b3324019b3c28572086c4a319f91d1dcd44e6e11cd340232978c684a7650d0df"}, + {file = "websocket_client-1.6.4-py3-none-any.whl", hash = "sha256:084072e0a7f5f347ef2ac3d8698a5e0b4ffbfcab607628cadabc650fc9a83a24"}, ] [package.extras] -docs = ["Sphinx (>=3.4)", "sphinx-rtd-theme (>=0.5)"] +docs = ["Sphinx (>=6.0)", "sphinx-rtd-theme (>=1.1.0)"] optional = ["python-socks", "wsaccel"] test = ["websockets"] @@ -4520,14 +4635,14 @@ watchdog = ["watchdog"] [[package]] name = "widgetsnbextension" -version = "4.0.8" +version = "4.0.9" description = "Jupyter interactive widgets for Jupyter Notebook" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "widgetsnbextension-4.0.8-py3-none-any.whl", hash = "sha256:2e37f0ce9da11651056280c7efe96f2db052fe8fc269508e3724f5cbd6c93018"}, - {file = "widgetsnbextension-4.0.8.tar.gz", hash = "sha256:9ec291ba87c2dfad42c3d5b6f68713fa18be1acd7476569516b2431682315c17"}, + {file = "widgetsnbextension-4.0.9-py3-none-any.whl", hash = "sha256:91452ca8445beb805792f206e560c1769284267a30ceb1cec9f5bcc887d15175"}, + {file = "widgetsnbextension-4.0.9.tar.gz", hash = "sha256:3c1f5e46dc1166dfd40a42d685e6a51396fd34ff878742a3e47c6f0cc4a2a385"}, ] [[package]] @@ -4617,19 +4732,19 @@ files = [ [[package]] name = "zipp" -version = "3.16.1" +version = "3.17.0" description = "Backport of pathlib-compatible object wrapper for zip files" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.16.1-py3-none-any.whl", hash = "sha256:0b37c326d826d5ca35f2b9685cd750292740774ef16190008b00a0227c256fe0"}, - {file = "zipp-3.16.1.tar.gz", hash = "sha256:857b158da2cbf427b376da1c24fd11faecbac5a4ac7523c3607f8a01f94c2ec0"}, + {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, + {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] [metadata] lock-version = "2.0" From 523a1c2cb8a00655c51f321486bea00c10f03361 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 16 Oct 2023 09:53:40 -0700 Subject: [PATCH 163/239] remake poetry.lock file --- poetry.lock | 112 ++++++++++++++++++++++++++-------------------------- 1 file changed, 57 insertions(+), 55 deletions(-) diff --git a/poetry.lock b/poetry.lock index 5c71fe849..b8e91169b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1770,14 +1770,14 @@ test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] [[package]] name = "jupyter-events" -version = "0.7.0" +version = "0.8.0" description = "Jupyter Event System library" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_events-0.7.0-py3-none-any.whl", hash = "sha256:4753da434c13a37c3f3c89b500afa0c0a6241633441421f6adafe2fb2e2b924e"}, - {file = "jupyter_events-0.7.0.tar.gz", hash = "sha256:7be27f54b8388c03eefea123a4f79247c5b9381c49fb1cd48615ee191eb12615"}, + {file = "jupyter_events-0.8.0-py3-none-any.whl", hash = "sha256:81f07375c7673ff298bfb9302b4a981864ec64edaed75ca0fe6f850b9b045525"}, + {file = "jupyter_events-0.8.0.tar.gz", hash = "sha256:fda08f0defce5e16930542ce60634ba48e010830d50073c3dfd235759cee77bf"}, ] [package.dependencies] @@ -1812,14 +1812,14 @@ jupyter-server = ">=1.1.2" [[package]] name = "jupyter-server" -version = "2.7.3" +version = "2.8.0" description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_server-2.7.3-py3-none-any.whl", hash = "sha256:8e4b90380b59d7a1e31086c4692231f2a2ea4cb269f5516e60aba72ce8317fc9"}, - {file = "jupyter_server-2.7.3.tar.gz", hash = "sha256:d4916c8581c4ebbc534cebdaa8eca2478d9f3bfdd88eae29fcab0120eac57649"}, + {file = "jupyter_server-2.8.0-py3-none-any.whl", hash = "sha256:c57270faa6530393ae69783a2d2f1874c718b9f109080581ea076b05713249fa"}, + {file = "jupyter_server-2.8.0.tar.gz", hash = "sha256:b11e2ba80667c75f55630faf8ac3d5809f8734f9006d65cce117c46a0a516ab8"}, ] [package.dependencies] @@ -2378,44 +2378,44 @@ test = ["pytest", "pytest-console-scripts", "pytest-jupyter", "pytest-tornasync" [[package]] name = "numpy" -version = "1.26.0" +version = "1.26.1" description = "Fundamental package for array computing in Python" category = "main" optional = false python-versions = "<3.13,>=3.9" files = [ - {file = "numpy-1.26.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8db2f125746e44dce707dd44d4f4efeea8d7e2b43aace3f8d1f235cfa2733dd"}, - {file = "numpy-1.26.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0621f7daf973d34d18b4e4bafb210bbaf1ef5e0100b5fa750bd9cde84c7ac292"}, - {file = "numpy-1.26.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51be5f8c349fdd1a5568e72713a21f518e7d6707bcf8503b528b88d33b57dc68"}, - {file = "numpy-1.26.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:767254ad364991ccfc4d81b8152912e53e103ec192d1bb4ea6b1f5a7117040be"}, - {file = "numpy-1.26.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:436c8e9a4bdeeee84e3e59614d38c3dbd3235838a877af8c211cfcac8a80b8d3"}, - {file = "numpy-1.26.0-cp310-cp310-win32.whl", hash = "sha256:c2e698cb0c6dda9372ea98a0344245ee65bdc1c9dd939cceed6bb91256837896"}, - {file = "numpy-1.26.0-cp310-cp310-win_amd64.whl", hash = "sha256:09aaee96c2cbdea95de76ecb8a586cb687d281c881f5f17bfc0fb7f5890f6b91"}, - {file = "numpy-1.26.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:637c58b468a69869258b8ae26f4a4c6ff8abffd4a8334c830ffb63e0feefe99a"}, - {file = "numpy-1.26.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:306545e234503a24fe9ae95ebf84d25cba1fdc27db971aa2d9f1ab6bba19a9dd"}, - {file = "numpy-1.26.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c6adc33561bd1d46f81131d5352348350fc23df4d742bb246cdfca606ea1208"}, - {file = "numpy-1.26.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e062aa24638bb5018b7841977c360d2f5917268d125c833a686b7cbabbec496c"}, - {file = "numpy-1.26.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:546b7dd7e22f3c6861463bebb000646fa730e55df5ee4a0224408b5694cc6148"}, - {file = "numpy-1.26.0-cp311-cp311-win32.whl", hash = "sha256:c0b45c8b65b79337dee5134d038346d30e109e9e2e9d43464a2970e5c0e93229"}, - {file = "numpy-1.26.0-cp311-cp311-win_amd64.whl", hash = "sha256:eae430ecf5794cb7ae7fa3808740b015aa80747e5266153128ef055975a72b99"}, - {file = "numpy-1.26.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:166b36197e9debc4e384e9c652ba60c0bacc216d0fc89e78f973a9760b503388"}, - {file = "numpy-1.26.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f042f66d0b4ae6d48e70e28d487376204d3cbf43b84c03bac57e28dac6151581"}, - {file = "numpy-1.26.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5e18e5b14a7560d8acf1c596688f4dfd19b4f2945b245a71e5af4ddb7422feb"}, - {file = "numpy-1.26.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f6bad22a791226d0a5c7c27a80a20e11cfe09ad5ef9084d4d3fc4a299cca505"}, - {file = "numpy-1.26.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4acc65dd65da28060e206c8f27a573455ed724e6179941edb19f97e58161bb69"}, - {file = "numpy-1.26.0-cp312-cp312-win32.whl", hash = "sha256:bb0d9a1aaf5f1cb7967320e80690a1d7ff69f1d47ebc5a9bea013e3a21faec95"}, - {file = "numpy-1.26.0-cp312-cp312-win_amd64.whl", hash = "sha256:ee84ca3c58fe48b8ddafdeb1db87388dce2c3c3f701bf447b05e4cfcc3679112"}, - {file = "numpy-1.26.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4a873a8180479bc829313e8d9798d5234dfacfc2e8a7ac188418189bb8eafbd2"}, - {file = "numpy-1.26.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:914b28d3215e0c721dc75db3ad6d62f51f630cb0c277e6b3bcb39519bed10bd8"}, - {file = "numpy-1.26.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c78a22e95182fb2e7874712433eaa610478a3caf86f28c621708d35fa4fd6e7f"}, - {file = "numpy-1.26.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86f737708b366c36b76e953c46ba5827d8c27b7a8c9d0f471810728e5a2fe57c"}, - {file = "numpy-1.26.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b44e6a09afc12952a7d2a58ca0a2429ee0d49a4f89d83a0a11052da696440e49"}, - {file = "numpy-1.26.0-cp39-cp39-win32.whl", hash = "sha256:5671338034b820c8d58c81ad1dafc0ed5a00771a82fccc71d6438df00302094b"}, - {file = "numpy-1.26.0-cp39-cp39-win_amd64.whl", hash = "sha256:020cdbee66ed46b671429c7265cf00d8ac91c046901c55684954c3958525dab2"}, - {file = "numpy-1.26.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0792824ce2f7ea0c82ed2e4fecc29bb86bee0567a080dacaf2e0a01fe7654369"}, - {file = "numpy-1.26.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d484292eaeb3e84a51432a94f53578689ffdea3f90e10c8b203a99be5af57d8"}, - {file = "numpy-1.26.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:186ba67fad3c60dbe8a3abff3b67a91351100f2661c8e2a80364ae6279720299"}, - {file = "numpy-1.26.0.tar.gz", hash = "sha256:f93fc78fe8bf15afe2b8d6b6499f1c73953169fad1e9a8dd086cdff3190e7fdf"}, + {file = "numpy-1.26.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:82e871307a6331b5f09efda3c22e03c095d957f04bf6bc1804f30048d0e5e7af"}, + {file = "numpy-1.26.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cdd9ec98f0063d93baeb01aad472a1a0840dee302842a2746a7a8e92968f9575"}, + {file = "numpy-1.26.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d78f269e0c4fd365fc2992c00353e4530d274ba68f15e968d8bc3c69ce5f5244"}, + {file = "numpy-1.26.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ab9163ca8aeb7fd32fe93866490654d2f7dda4e61bc6297bf72ce07fdc02f67"}, + {file = "numpy-1.26.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:78ca54b2f9daffa5f323f34cdf21e1d9779a54073f0018a3094ab907938331a2"}, + {file = "numpy-1.26.1-cp310-cp310-win32.whl", hash = "sha256:d1cfc92db6af1fd37a7bb58e55c8383b4aa1ba23d012bdbba26b4bcca45ac297"}, + {file = "numpy-1.26.1-cp310-cp310-win_amd64.whl", hash = "sha256:d2984cb6caaf05294b8466966627e80bf6c7afd273279077679cb010acb0e5ab"}, + {file = "numpy-1.26.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cd7837b2b734ca72959a1caf3309457a318c934abef7a43a14bb984e574bbb9a"}, + {file = "numpy-1.26.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1c59c046c31a43310ad0199d6299e59f57a289e22f0f36951ced1c9eac3665b9"}, + {file = "numpy-1.26.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d58e8c51a7cf43090d124d5073bc29ab2755822181fcad978b12e144e5e5a4b3"}, + {file = "numpy-1.26.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6081aed64714a18c72b168a9276095ef9155dd7888b9e74b5987808f0dd0a974"}, + {file = "numpy-1.26.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:97e5d6a9f0702c2863aaabf19f0d1b6c2628fbe476438ce0b5ce06e83085064c"}, + {file = "numpy-1.26.1-cp311-cp311-win32.whl", hash = "sha256:b9d45d1dbb9de84894cc50efece5b09939752a2d75aab3a8b0cef6f3a35ecd6b"}, + {file = "numpy-1.26.1-cp311-cp311-win_amd64.whl", hash = "sha256:3649d566e2fc067597125428db15d60eb42a4e0897fc48d28cb75dc2e0454e53"}, + {file = "numpy-1.26.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1d1bd82d539607951cac963388534da3b7ea0e18b149a53cf883d8f699178c0f"}, + {file = "numpy-1.26.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:afd5ced4e5a96dac6725daeb5242a35494243f2239244fad10a90ce58b071d24"}, + {file = "numpy-1.26.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a03fb25610ef560a6201ff06df4f8105292ba56e7cdd196ea350d123fc32e24e"}, + {file = "numpy-1.26.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcfaf015b79d1f9f9c9fd0731a907407dc3e45769262d657d754c3a028586124"}, + {file = "numpy-1.26.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e509cbc488c735b43b5ffea175235cec24bbc57b227ef1acc691725beb230d1c"}, + {file = "numpy-1.26.1-cp312-cp312-win32.whl", hash = "sha256:af22f3d8e228d84d1c0c44c1fbdeb80f97a15a0abe4f080960393a00db733b66"}, + {file = "numpy-1.26.1-cp312-cp312-win_amd64.whl", hash = "sha256:9f42284ebf91bdf32fafac29d29d4c07e5e9d1af862ea73686581773ef9e73a7"}, + {file = "numpy-1.26.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bb894accfd16b867d8643fc2ba6c8617c78ba2828051e9a69511644ce86ce83e"}, + {file = "numpy-1.26.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e44ccb93f30c75dfc0c3aa3ce38f33486a75ec9abadabd4e59f114994a9c4617"}, + {file = "numpy-1.26.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9696aa2e35cc41e398a6d42d147cf326f8f9d81befcb399bc1ed7ffea339b64e"}, + {file = "numpy-1.26.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5b411040beead47a228bde3b2241100454a6abde9df139ed087bd73fc0a4908"}, + {file = "numpy-1.26.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1e11668d6f756ca5ef534b5be8653d16c5352cbb210a5c2a79ff288e937010d5"}, + {file = "numpy-1.26.1-cp39-cp39-win32.whl", hash = "sha256:d1d2c6b7dd618c41e202c59c1413ef9b2c8e8a15f5039e344af64195459e3104"}, + {file = "numpy-1.26.1-cp39-cp39-win_amd64.whl", hash = "sha256:59227c981d43425ca5e5c01094d59eb14e8772ce6975d4b2fc1e106a833d5ae2"}, + {file = "numpy-1.26.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:06934e1a22c54636a059215d6da99e23286424f316fddd979f5071093b648668"}, + {file = "numpy-1.26.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76ff661a867d9272cd2a99eed002470f46dbe0943a5ffd140f49be84f68ffc42"}, + {file = "numpy-1.26.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:6965888d65d2848e8768824ca8288db0a81263c1efccec881cb35a0d805fcd2f"}, + {file = "numpy-1.26.1.tar.gz", hash = "sha256:c8c6c72d4a9f831f328efb1312642a1cafafaa88981d9ab76368d50d07d93cbe"}, ] [[package]] @@ -2735,26 +2735,28 @@ files = [ [[package]] name = "psutil" -version = "5.9.5" +version = "5.9.6" description = "Cross-platform lib for process and system monitoring in Python." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "psutil-5.9.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:be8929ce4313f9f8146caad4272f6abb8bf99fc6cf59344a3167ecd74f4f203f"}, - {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ab8ed1a1d77c95453db1ae00a3f9c50227ebd955437bcf2a574ba8adbf6a74d5"}, - {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:4aef137f3345082a3d3232187aeb4ac4ef959ba3d7c10c33dd73763fbc063da4"}, - {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ea8518d152174e1249c4f2a1c89e3e6065941df2fa13a1ab45327716a23c2b48"}, - {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:acf2aef9391710afded549ff602b5887d7a2349831ae4c26be7c807c0a39fac4"}, - {file = "psutil-5.9.5-cp27-none-win32.whl", hash = "sha256:5b9b8cb93f507e8dbaf22af6a2fd0ccbe8244bf30b1baad6b3954e935157ae3f"}, - {file = "psutil-5.9.5-cp27-none-win_amd64.whl", hash = "sha256:8c5f7c5a052d1d567db4ddd231a9d27a74e8e4a9c3f44b1032762bd7b9fdcd42"}, - {file = "psutil-5.9.5-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:3c6f686f4225553615612f6d9bc21f1c0e305f75d7d8454f9b46e901778e7217"}, - {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a7dd9997128a0d928ed4fb2c2d57e5102bb6089027939f3b722f3a210f9a8da"}, - {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89518112647f1276b03ca97b65cc7f64ca587b1eb0278383017c2a0dcc26cbe4"}, - {file = "psutil-5.9.5-cp36-abi3-win32.whl", hash = "sha256:104a5cc0e31baa2bcf67900be36acde157756b9c44017b86b2c049f11957887d"}, - {file = "psutil-5.9.5-cp36-abi3-win_amd64.whl", hash = "sha256:b258c0c1c9d145a1d5ceffab1134441c4c5113b2417fafff7315a917a026c3c9"}, - {file = "psutil-5.9.5-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:c607bb3b57dc779d55e1554846352b4e358c10fff3abf3514a7a6601beebdb30"}, - {file = "psutil-5.9.5.tar.gz", hash = "sha256:5410638e4df39c54d957fc51ce03048acd8e6d60abc0f5107af51e5fb566eb3c"}, + {file = "psutil-5.9.6-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:fb8a697f11b0f5994550555fcfe3e69799e5b060c8ecf9e2f75c69302cc35c0d"}, + {file = "psutil-5.9.6-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:91ecd2d9c00db9817a4b4192107cf6954addb5d9d67a969a4f436dbc9200f88c"}, + {file = "psutil-5.9.6-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:10e8c17b4f898d64b121149afb136c53ea8b68c7531155147867b7b1ac9e7e28"}, + {file = "psutil-5.9.6-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:18cd22c5db486f33998f37e2bb054cc62fd06646995285e02a51b1e08da97017"}, + {file = "psutil-5.9.6-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:ca2780f5e038379e520281e4c032dddd086906ddff9ef0d1b9dcf00710e5071c"}, + {file = "psutil-5.9.6-cp27-none-win32.whl", hash = "sha256:70cb3beb98bc3fd5ac9ac617a327af7e7f826373ee64c80efd4eb2856e5051e9"}, + {file = "psutil-5.9.6-cp27-none-win_amd64.whl", hash = "sha256:51dc3d54607c73148f63732c727856f5febec1c7c336f8f41fcbd6315cce76ac"}, + {file = "psutil-5.9.6-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c69596f9fc2f8acd574a12d5f8b7b1ba3765a641ea5d60fb4736bf3c08a8214a"}, + {file = "psutil-5.9.6-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:92e0cc43c524834af53e9d3369245e6cc3b130e78e26100d1f63cdb0abeb3d3c"}, + {file = "psutil-5.9.6-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:748c9dd2583ed86347ed65d0035f45fa8c851e8d90354c122ab72319b5f366f4"}, + {file = "psutil-5.9.6-cp36-cp36m-win32.whl", hash = "sha256:3ebf2158c16cc69db777e3c7decb3c0f43a7af94a60d72e87b2823aebac3d602"}, + {file = "psutil-5.9.6-cp36-cp36m-win_amd64.whl", hash = "sha256:ff18b8d1a784b810df0b0fff3bcb50ab941c3b8e2c8de5726f9c71c601c611aa"}, + {file = "psutil-5.9.6-cp37-abi3-win32.whl", hash = "sha256:a6f01f03bf1843280f4ad16f4bde26b817847b4c1a0db59bf6419807bc5ce05c"}, + {file = "psutil-5.9.6-cp37-abi3-win_amd64.whl", hash = "sha256:6e5fb8dc711a514da83098bc5234264e551ad980cec5f85dabf4d38ed6f15e9a"}, + {file = "psutil-5.9.6-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:daecbcbd29b289aac14ece28eca6a3e60aa361754cf6da3dfb20d4d32b6c7f57"}, + {file = "psutil-5.9.6.tar.gz", hash = "sha256:e4b92ddcd7dd4cdd3f900180ea1e104932c7bce234fb88976e2a3b296441225a"}, ] [package.extras] From aee858993851359c8beef3e5a56338d6f32ec7c2 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 16 Oct 2023 10:13:47 -0700 Subject: [PATCH 164/239] change lock-version to 1.1 manually in poetry.lock --- poetry.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/poetry.lock b/poetry.lock index b8e91169b..f873914c6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4749,6 +4749,6 @@ docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.link testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] [metadata] -lock-version = "2.0" +lock-version = "1.1" python-versions = ">=3.9.0,<3.11" content-hash = "c7fbaf60049ed6f4a60971720b8761423fa94647c1e428d8a04f994d0b1095d7" From ad2e8a2bee8f32a824726a3a37e7c0da2dce1302 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 16 Oct 2023 15:14:02 -0700 Subject: [PATCH 165/239] remake the poetry.lock file again --- poetry.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/poetry.lock b/poetry.lock index f873914c6..b8e91169b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4749,6 +4749,6 @@ docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.link testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] [metadata] -lock-version = "1.1" +lock-version = "2.0" python-versions = ">=3.9.0,<3.11" content-hash = "c7fbaf60049ed6f4a60971720b8761423fa94647c1e428d8a04f994d0b1095d7" From 7de8a597707273c91a1db15ea4cc11842ae43898 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 16 Oct 2023 15:50:31 -0700 Subject: [PATCH 166/239] update test_utils to properly take in datacalsses --- schematic/schemas/data_model_jsonld.py | 195 +++++++++++++++---------- tests/test_utils.py | 47 ++---- 2 files changed, 131 insertions(+), 111 deletions(-) diff --git a/schematic/schemas/data_model_jsonld.py b/schematic/schemas/data_model_jsonld.py index 66c82a194..c03406800 100644 --- a/schematic/schemas/data_model_jsonld.py +++ b/schematic/schemas/data_model_jsonld.py @@ -1,3 +1,4 @@ +import copy from dataclasses import dataclass, field, asdict from dataclasses_json import config, dataclass_json import json @@ -74,8 +75,89 @@ def __init__(self, Graph: nx.MultiDiGraph, output_path:str = ''): class_template = ClassTemplate() self.class_template = json.loads(class_template.to_json()) + def get_edges_associated_with_node(self, node:str)->List[tuple[str,str,dict[str,int]]]: + """Retrieve all edges traveling in and out of a node. + Args: + node, str: Label of node in the graph to look for assiciated edges + Returns: + node_edges, list: List of Tuples of edges associated with the given node, tuple contains the two nodes, plus the weight dict associated with the edge connection. + """ + node_edges = list(self.graph.in_edges(node, data=True)) + node_edges.extend(list(self.graph.out_edges(node,data=True))) + return node_edges + + def add_edge_rels_to_template(self, template: dict, rel_vals:dict, node:str): + """ + Args: + template, dict: single class or property JSONLD template that is in the process of being filled. + rel_vals, dict: sub relationship dict for a given relationship (contains informtion like, 'edge_rel', 'jsonld_key' etc..) + node, str: node whose edge information is presently being added to the JSONLD + Returns: + """ + # Get all edges associated with the current node + node_edges = self.get_edges_associated_with_node(node=node) + + # Get node pairs and weights for each edge + for node_1, node_2, weight in node_edges: + + # Retrieve the relationship(s) and related info between the two nodes + node_edge_relationships = self.graph[node_1][node_2] + + # Get the relationship edge key + edge_key = rel_vals['edge_key'] + + # Check if edge_key is even one of the relationships for this node pair. + if edge_key in node_edge_relationships: + # for each relationship between the given nodes + for relationship, weight_dict in node_edge_relationships.items(): + # If the relationship defined and edge_key + if relationship == edge_key: + # TODO: rewrite to use edge_dir + if edge_key in ['domainIncludes', 'parentOf']: + if node_2 == node: + # Make sure the key is in the template (differs between properties and classes) + if rel_vals['jsonld_key'] in template.keys(): + node_1_id = {'@id': 'bts:'+node_1} + # TODO Move this to a helper function to clear up. + if (isinstance(template[rel_vals['jsonld_key']], list) and + node_1_id not in template[rel_vals['jsonld_key']]): + template[rel_vals['jsonld_key']].append(node_1_id) + else: + template[rel_vals['jsonld_key']] == node_1 + else: + if node_1 == node: + # Make sure the key is in the template (differs between properties and classes) + if rel_vals['jsonld_key'] in template.keys(): + node_2_id = {'@id': 'bts:'+node_2} + # TODO Move this to a helper function to clear up. + if (isinstance(template[rel_vals['jsonld_key']], list) and + node_2_id not in template[rel_vals['jsonld_key']]): + template[rel_vals['jsonld_key']].append(node_2_id) + else: + template[rel_vals['jsonld_key']] == node_2 + return template + + def add_node_info_to_template(self, template, rel_vals, node): + """ For a given node and relationship, add relevant value to template + Args: + template, dict: single class or property JSONLD template that is in the process of being filled. + rel_vals, dict: sub relationship dict for a given relationship (contains informtion like, 'edge_rel', 'jsonld_key' etc..) + node, str: node whose information is presently being added to the JSONLD + Returns: + template, dict: single class or property JSONLD template that is in the process of being filled, and now has had additional node information added. + """ + # Get label for relationship used in the graph + node_label = rel_vals['node_label'] + + # Get recorded info for current node, and the attribute type + node_info = nx.get_node_attributes(self.graph, node_label)[node] + + # Add this information to the template + template[rel_vals['jsonld_key']] = node_info + return template + def fill_entry_template(self, template:dict, node:str)->dict: - """ Fill in a blank JSONLD entry template with information for each node. All relationships are filled from the graph, based on the type of information (node or edge) + """ Fill in a blank JSONLD template with information for each node. All relationships are filled from the graph, based on the type of information (node or edge) Args: template, dict: empty class or property template to be filled with information for the given node. node, str: target node to fill the template out for. @@ -89,62 +171,16 @@ def fill_entry_template(self, template:dict, node:str)->dict: key_context, key_rel = strip_context(context_value=rel_vals['jsonld_key']) + # Fill in the JSONLD template for this node, with data from the graph by looking up the nodes edge relationships, and the value information attached to the node. + # Fill edge information (done per edge type) if rel_vals['edge_rel']: - # Get all edges associated with the current node - node_edges = list(self.graph.in_edges(node, data=True)) - node_edges.extend(list(self.graph.out_edges(node,data=True))) - - # Get node pairs and weights for each edge - for node_1, node_2, weight in node_edges: - - # Retrieve the relationship(s) and related info between the two nodes - node_edge_relationships = self.graph[node_1][node_2] - - # Get the relationship edge key - edge_key = rel_vals['edge_key'] - - # Check if edge_key is even one of the relationships for this node pair. - if edge_key in node_edge_relationships: - # for each relationship between the given nodes - for relationship, weight_dict in node_edge_relationships.items(): - # If the relationship defined and edge_key - if relationship == edge_key: - # TODO: rewrite to use edge_dir - if edge_key in ['domainIncludes', 'parentOf']: - if node_2 == node: - # Make sure the key is in the template (differs between properties and classes) - if rel_vals['jsonld_key'] in template.keys(): - node_1_id = {'@id': 'bts:'+node_1} - # TODO Move this to a helper function to clear up. - if (isinstance(template[rel_vals['jsonld_key']], list) and - node_1_id not in template[rel_vals['jsonld_key']]): - template[rel_vals['jsonld_key']].append(node_1_id) - else: - template[rel_vals['jsonld_key']] == node_1 - else: - if node_1 == node: - # Make sure the key is in the template (differs between properties and classes) - if rel_vals['jsonld_key'] in template.keys(): - node_2_id = {'@id': 'bts:'+node_2} - # TODO Move this to a helper function to clear up. - if (isinstance(template[rel_vals['jsonld_key']], list) and - node_2_id not in template[rel_vals['jsonld_key']]): - template[rel_vals['jsonld_key']].append(node_2_id) - else: - template[rel_vals['jsonld_key']] == node_2 + template = self.add_edge_rels_to_template(template=template, rel_vals=rel_vals, node=node) # Fill in node value information - else: - node_label = rel_vals['node_label'] + else: + template = self.add_node_info_to_template(template=template, rel_vals=rel_vals, node=node) - # Get recorded info for current node, and the attribute type - node_info = nx.get_node_attributes(self.graph, node_label)[node] - try: - # Add this information to the template - template[rel_vals['jsonld_key']] = node_info - except: - breakpoint() - + # Clean up template template = self.clean_template(template=template, data_model_relationships=data_model_relationships, @@ -166,25 +202,31 @@ def add_contexts_to_entries(self, template:dict) -> dict: Note: This will likely need to be modified when Contexts are truly added to the model """ for jsonld_key, entry in template.items(): - try: - # Retrieve the relationships key using the jsonld_key - key= [k for k, v in self.rel_dict.items() if jsonld_key == v['jsonld_key']][0] - except: - continue - # If the current relationship can be defined with a 'node_attr_dict' - if 'node_attr_dict' in self.rel_dict[key].keys(): - try: - # if possible pull standard function to get node information - rel_func = self.rel_dict[key]['node_attr_dict']['standard'] - except: - # if not pull default function to get node information - rel_func = self.rel_dict[key]['node_attr_dict']['default'] - - # Add appropritae contexts that have been removed in previous steps (for JSONLD) or did not exist to begin with (csv) - if key == 'id' and rel_func == get_label_from_display_name and 'bts' not in str(template[jsonld_key]).lower(): - template[jsonld_key] = 'bts:' + template[jsonld_key] - elif key == 'required' and rel_func == convert_bool_to_str and 'sms' not in str(template[jsonld_key]).lower(): - template[jsonld_key] = 'sms:' + str(template[jsonld_key]).lower() + + # Retrieve the relationships key using the jsonld_key + rel_key = [] + + for rel, rel_vals in self.rel_dict.items(): + if 'jsonld_key' in rel_vals and jsonld_key == rel_vals['jsonld_key']: + rel_key.append(rel) + + if rel_key: + rel_key=rel_key[0] + # If the current relationship can be defined with a 'node_attr_dict' + if 'node_attr_dict' in self.rel_dict[rel_key].keys(): + try: + # if possible pull standard function to get node information + rel_func = self.rel_dict[rel_key]['node_attr_dict']['standard'] + except: + # if not pull default function to get node information + rel_func = self.rel_dict[rel_key]['node_attr_dict']['default'] + + # Add appropritae contexts that have been removed in previous steps (for JSONLD) or did not exist to begin with (csv) + if rel_key == 'id' and rel_func == get_label_from_display_name and 'bts' not in str(template[jsonld_key]).lower(): + template[jsonld_key] = 'bts:' + template[jsonld_key] + elif rel_key == 'required' and rel_func == convert_bool_to_str and 'sms' not in str(template[jsonld_key]).lower(): + template[jsonld_key] = 'sms:' + str(template[jsonld_key]).lower() + return template def clean_template(self, template: dict, data_model_relationships: dict) -> dict: @@ -251,10 +293,13 @@ def generate_jsonld_object(self): # Iterativly add graph nodes to json_ld_template as properties or classes for node in self.graph.nodes: if node in properties: - obj = self.fill_entry_template(template = self.property_template, node = node) + # Get property template + property_template = copy.deepcopy(self.property_template) + obj = self.fill_entry_template(template = property_template, node = node) else: - obj = self.fill_entry_template(template = self.class_template, node = node) - + # Get class template + class_template = copy.deepcopy(self.class_template) + obj = self.fill_entry_template(template = class_template, node = node) json_ld_template['@graph'].append(obj) return json_ld_template diff --git a/tests/test_utils.py b/tests/test_utils.py index 0fe92acf4..5fe1df0c2 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,3 +1,4 @@ +import copy import json import logging import os @@ -17,7 +18,7 @@ from schematic.schemas.data_model_parser import DataModelParser from schematic.schemas.data_model_graph import DataModelGraph, DataModelGraphExplorer -from schematic.schemas.data_model_jsonld import DataModelJsonLD +from schematic.schemas.data_model_jsonld import DataModelJsonLD, BaseTemplate, PropertyTemplate, ClassTemplate from schematic.schemas.data_model_json_schema import DataModelJSONSchema from schematic.schemas.data_model_relationships import DataModelRelationships @@ -427,25 +428,10 @@ def test_validate_class_schema(self, helpers): Get a class template, fill it out with mock data, and validate against a JSON Schema """ + class_template = ClassTemplate() + self.class_template = json.loads(class_template.to_json()) - # Get data model path - data_model_path = helpers.get_data_path("example.model.jsonld") - - # Instantiate DataModelParser - data_model_parser = DataModelParser(path_to_data_model = data_model_path) - - #Parse Model - parsed_data_model = data_model_parser.parse_model() - - # Instantiate DataModelGraph - data_model_grapher = DataModelGraph(parsed_data_model) - - # Generate graph - graph_data_model = data_model_grapher.generate_data_model_graph() - - dm_jsonld = DataModelJsonLD(graph_data_model) - - mock_class = dm_jsonld.class_template() + mock_class = copy.deepcopy(self.class_template) mock_class["@id"] = "bts:MockClass" mock_class["@type"] = "rdfs:Class" mock_class["@rdfs:comment"] = "This is a mock class" @@ -458,25 +444,14 @@ def test_validate_class_schema(self, helpers): def test_validate_property_schema(self, helpers): + """ + Get a property template, fill it out with mock data, and validate against a JSON Schema - # Get data model path - data_model_path = helpers.get_data_path("example.model.jsonld") - - # Instantiate DataModelParser - data_model_parser = DataModelParser(path_to_data_model = data_model_path) - - #Parse Model - parsed_data_model = data_model_parser.parse_model() - - # Instantiate DataModelGraph - data_model_grapher = DataModelGraph(parsed_data_model) - - # Generate graph - graph_data_model = data_model_grapher.generate_data_model_graph() - - dm_jsonld = DataModelJsonLD(graph_data_model) + """ + property_template = PropertyTemplate() + self.property_template = json.loads(property_template.to_json()) - mock_class = dm_jsonld.property_template() + mock_class = copy.deepcopy(self.property_template) mock_class["@id"] = "bts:MockProperty" mock_class["@type"] = "rdf:Property" mock_class["@rdfs:comment"] = "This is a mock Patient class" From 6dc7f08105d558b135cd0d19e08e2a0c7cc6c74a Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 16 Oct 2023 16:08:33 -0700 Subject: [PATCH 167/239] update naming in clean template and add andditional docstrings for clarity --- schematic/schemas/data_model_jsonld.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/schematic/schemas/data_model_jsonld.py b/schematic/schemas/data_model_jsonld.py index c03406800..26c1be092 100644 --- a/schematic/schemas/data_model_jsonld.py +++ b/schematic/schemas/data_model_jsonld.py @@ -238,11 +238,16 @@ def clean_template(self, template: dict, data_model_relationships: dict) -> dict template: JSONLD template where unfilled entries have been removed, or filled with default depending on specifications in the relationships dictionary. ''' for rels in data_model_relationships.values(): - if rels['jsonld_key'] in template.keys() and not template[rels['jsonld_key']]: + # Get the current relationships, jsonld key + relationship_jsonld_key = rels['jsonld_key'] + # Check if the relationship_relationship_key is part of the template, and if it is, look to see if it has an entry + if relationship_jsonld_key in template.keys() and not template[rels['jsonld_key']]: + # If there is no value recorded, fill out the template with the default relationship value (if recorded.) if 'jsonld_default' in rels.keys(): - template[rels['jsonld_key']] = rels['jsonld_default'] + template[relationship_jsonld_key] = rels['jsonld_default'] else: - del template[rels['jsonld_key']] + # If there is no default specified in the relationships dictionary, delete the empty value from the template. + del template[relationship_jsonld_key] return template def reorder_template_entries(self, template:dict) -> dict: From 2255ab14804da488529519c3bae93d7f3559eca5 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Tue, 17 Oct 2023 13:03:51 -0700 Subject: [PATCH 168/239] update some k v pair naming to be more descriptive --- schematic/schemas/data_model_jsonld.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/schematic/schemas/data_model_jsonld.py b/schematic/schemas/data_model_jsonld.py index 26c1be092..45344e4cd 100644 --- a/schematic/schemas/data_model_jsonld.py +++ b/schematic/schemas/data_model_jsonld.py @@ -264,19 +264,19 @@ def reorder_template_entries(self, template:dict) -> dict: for jsonld_key, entry in template.items(): # Make sure dealing with an edge relationship: - is_edge = ['True' for k, v in self.rel_dict.items() if v['jsonld_key']==jsonld_key if v['edge_rel'] == True] + is_edge = ['True' for rel_key, rel_vals in self.rel_dict.items() if rel_vals['jsonld_key']==jsonld_key if rel_vals['edge_rel'] == True] #if the entry is of type list and theres more than one value in the list attempt to reorder if is_edge and isinstance(entry, list) and len(entry)>1: # Get edge key from data_model_relationships using the jsonld_key: - key, edge_key = [(k, v['edge_key']) for k, v in self.rel_dict.items() if jsonld_key == v['jsonld_key']][0] - + key, edge_key = [(rel_key, rel_vals['edge_key']) for rel_key, rel_vals in self.rel_dict.items() if jsonld_key == rel_vals['jsonld_key']][0] + # Order edges sorted_edges = self.DME.get_ordered_entry(key=key, source_node_label=template_label) edge_weights_dict={edge:i for i, edge in enumerate(sorted_edges)} ordered_edges = [0]*len(edge_weights_dict.keys()) - for k,v in edge_weights_dict.items(): - ordered_edges[v] = {'@id': 'bts:' + k} + for edge,normalized_weight in edge_weights_dict.items(): + ordered_edges[normalized_weight] = {'@id': 'bts:' + edge} # Throw an error if ordered_edges does not get fully filled as expected. if 0 in ordered_edges: From 446f6d4baafa07b96a729d4aceb53c08c7fac322 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Tue, 17 Oct 2023 13:10:38 -0700 Subject: [PATCH 169/239] Update github actions poetry version --- .github/workflows/api_test.yml | 2 +- .github/workflows/pdoc.yml | 2 +- .github/workflows/publish.yml | 2 +- .github/workflows/test.yml | 2 +- README.md | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/api_test.yml b/.github/workflows/api_test.yml index fd54e8d9e..76a607b1e 100644 --- a/.github/workflows/api_test.yml +++ b/.github/workflows/api_test.yml @@ -15,7 +15,7 @@ jobs: test: runs-on: ubuntu-latest env: - POETRY_VERSION: 1.2.0 + POETRY_VERSION: 1.3.0 strategy: fail-fast: false matrix: diff --git a/.github/workflows/pdoc.yml b/.github/workflows/pdoc.yml index 8823d0c02..cf8e6def9 100644 --- a/.github/workflows/pdoc.yml +++ b/.github/workflows/pdoc.yml @@ -27,7 +27,7 @@ jobs: build: runs-on: ubuntu-latest env: - POETRY_VERSION: 1.2.0 + POETRY_VERSION: 1.3.0 strategy: matrix: python-version: ["3.9", "3.10"] diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 2255a52e2..804ef19e2 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -9,7 +9,7 @@ jobs: pypi_release: runs-on: ubuntu-latest env: - POETRY_VERSION: 1.2.0 + POETRY_VERSION: 1.3.0 if: github.event_name == 'push' && contains(github.ref, 'refs/tags') steps: #---------------------------------------------- diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index dbe438b37..7b4367c27 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -27,7 +27,7 @@ jobs: test: runs-on: ubuntu-latest env: - POETRY_VERSION: 1.2.0 + POETRY_VERSION: 1.3.0 strategy: fail-fast: false matrix: diff --git a/README.md b/README.md index 051e0c6e1..7d3890de8 100644 --- a/README.md +++ b/README.md @@ -59,7 +59,7 @@ Please note we have a [code of conduct](CODE_OF_CONDUCT.md), please follow it in ``` git clone https://github.com/Sage-Bionetworks/schematic.git ``` -2. Install `poetry` (version 1.2 or later) using either the [official installer](https://python-poetry.org/docs/#installing-with-the-official-installer) or [pipx](https://python-poetry.org/docs/#installing-with-pipx). If you have an older installation of Poetry, we recommend uninstalling it first. +2. Install `poetry` (version 1.3.0 or later) using either the [official installer](https://python-poetry.org/docs/#installing-with-the-official-installer) or [pipx](https://python-poetry.org/docs/#installing-with-pipx). If you have an older installation of Poetry, we recommend uninstalling it first. 3. Start the virtual environment by doing: ``` From a5045cc6302c98d069e49bad702e815a3754defb Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Tue, 17 Oct 2023 14:46:33 -0700 Subject: [PATCH 170/239] update example jsonld --- tests/data/example.model.jsonld | 34 +++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/tests/data/example.model.jsonld b/tests/data/example.model.jsonld index 1ebcee1d9..f49346f0e 100644 --- a/tests/data/example.model.jsonld +++ b/tests/data/example.model.jsonld @@ -21,6 +21,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "Patient", + "sms:required": "sms:false", "sms:requiresDependency": [ { "@id": "bts:PatientID" @@ -99,6 +100,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "Year of Birth", + "sms:required": "sms:false", "sms:validationRules": [] }, { @@ -140,6 +142,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "Component", + "sms:required": "sms:false", "sms:validationRules": [] }, { @@ -156,6 +159,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "DataType", + "sms:required": "sms:false", "sms:validationRules": [] }, { @@ -172,6 +176,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "DataProperty", + "sms:required": "sms:false", "sms:validationRules": [] }, { @@ -188,6 +193,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "Female", + "sms:required": "sms:false", "sms:validationRules": [] }, { @@ -204,6 +210,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "Male", + "sms:required": "sms:false", "sms:validationRules": [] }, { @@ -220,6 +227,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "Other", + "sms:required": "sms:false", "sms:validationRules": [] }, { @@ -239,6 +247,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "Healthy", + "sms:required": "sms:false", "sms:validationRules": [] }, { @@ -258,6 +267,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "Cancer", + "sms:required": "sms:false", "sms:requiresDependency": [ { "@id": "bts:CancerType" @@ -352,6 +362,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "ValidValue", + "sms:required": "sms:false", "sms:validationRules": [] }, { @@ -371,6 +382,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "Breast", + "sms:required": "sms:false", "sms:validationRules": [] }, { @@ -390,6 +402,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "Colorectal", + "sms:required": "sms:false", "sms:validationRules": [] }, { @@ -409,6 +422,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "Lung", + "sms:required": "sms:false", "sms:validationRules": [] }, { @@ -428,6 +442,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "Prostate", + "sms:required": "sms:false", "sms:validationRules": [] }, { @@ -447,6 +462,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "Skin", + "sms:required": "sms:false", "sms:validationRules": [] }, { @@ -463,6 +479,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "Biospecimen", + "sms:required": "sms:false", "sms:requiresComponent": [ { "@id": "bts:Patient" @@ -540,6 +557,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "Malignant", + "sms:required": "sms:false", "sms:validationRules": [] }, { @@ -556,6 +574,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "Bulk RNA-seq Assay", + "sms:required": "sms:false", "sms:requiresComponent": [ { "@id": "bts:Biospecimen" @@ -639,6 +658,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "FASTQ", + "sms:required": "sms:false", "sms:validationRules": [] }, { @@ -658,6 +678,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "BAM", + "sms:required": "sms:false", "sms:requiresDependency": [ { "@id": "bts:GenomeBuild" @@ -682,6 +703,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "CRAM", + "sms:required": "sms:false", "sms:requiresDependency": [ { "@id": "bts:GenomeBuild" @@ -709,6 +731,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "CSV/TSV", + "sms:required": "sms:false", "sms:requiresDependency": [ { "@id": "bts:GenomeBuild" @@ -778,6 +801,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "GRCh37", + "sms:required": "sms:false", "sms:validationRules": [] }, { @@ -794,6 +818,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "GRCh38", + "sms:required": "sms:false", "sms:validationRules": [] }, { @@ -810,6 +835,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "GRCm38", + "sms:required": "sms:false", "sms:validationRules": [] }, { @@ -826,6 +852,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "GRCm39", + "sms:required": "sms:false", "sms:validationRules": [] }, { @@ -842,6 +869,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "MockComponent", + "sms:required": "sms:false", "sms:requiresDependency": [ { "@id": "bts:Component" @@ -1204,6 +1232,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "Check Recommended", + "sms:required": "sms:false", "sms:validationRules": [ "recommended" ] @@ -1318,6 +1347,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "ab", + "sms:required": "sms:false", "sms:validationRules": [] }, { @@ -1334,6 +1364,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "cd", + "sms:required": "sms:false", "sms:validationRules": [] }, { @@ -1350,6 +1381,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "ef", + "sms:required": "sms:false", "sms:validationRules": [] }, { @@ -1366,6 +1398,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "gh", + "sms:required": "sms:false", "sms:validationRules": [] }, { @@ -1382,6 +1415,7 @@ "@id": "http://schema.biothings.io" }, "sms:displayName": "MockRDB", + "sms:required": "sms:false", "sms:requiresDependency": [ { "@id": "bts:Component" From ebcf3e2586137c17d96a2947c2816c98783507ec Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Tue, 17 Oct 2023 14:55:08 -0700 Subject: [PATCH 171/239] import logging and remove unused import --- schematic/schemas/data_model_jsonld.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/schematic/schemas/data_model_jsonld.py b/schematic/schemas/data_model_jsonld.py index 45344e4cd..e341d85ed 100644 --- a/schematic/schemas/data_model_jsonld.py +++ b/schematic/schemas/data_model_jsonld.py @@ -2,7 +2,8 @@ from dataclasses import dataclass, field, asdict from dataclasses_json import config, dataclass_json import json -from functools import wraps +import logging + from typing import Any, Dict, Optional, Text, List import networkx as nx @@ -10,6 +11,9 @@ from schematic.schemas.data_model_relationships import DataModelRelationships from schematic.utils.schema_utils import get_label_from_display_name, convert_bool_to_str, strip_context +logging.basicConfig() +logger = logging.getLogger(__name__) + @dataclass_json @dataclass class BaseTemplate: From 1b1c9eb05b88db7dc887050b53c516a9ff015c06 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Tue, 17 Oct 2023 15:06:48 -0700 Subject: [PATCH 172/239] remove ununsed imports from data_model_graph and move to use list over Typing.List --- schematic/schemas/data_model_graph.py | 53 +++++++-------------------- 1 file changed, 13 insertions(+), 40 deletions(-) diff --git a/schematic/schemas/data_model_graph.py b/schematic/schemas/data_model_graph.py index a77f0fbb7..e91b95e9a 100644 --- a/schematic/schemas/data_model_graph.py +++ b/schematic/schemas/data_model_graph.py @@ -1,17 +1,8 @@ import graphviz -import os -import string -import json import logging - -from typing import Any, Dict, Optional, Text, List - -import inflection +from typing import Any, Dict, Optional, Text import networkx as nx - -from rdflib import Graph, Namespace, plugin, query -from networkx.algorithms.cycles import find_cycle -from networkx.readwrite import json_graph +from rdflib import Namespace from schematic.schemas.data_model_edges import DataModelEdges from schematic.schemas.data_model_nodes import DataModelNodes @@ -19,27 +10,9 @@ DataModelRelationships ) -from schematic.utils.curie_utils import ( - expand_curies_in_schema, - uri2label, - extract_name_from_uri_or_curie, -) -from schematic.utils.general import find_duplicates - -from schematic.utils.io_utils import load_default, load_json, load_schemaorg from schematic.utils.schema_utils import get_property_label_from_display_name, get_class_label_from_display_name -from schematic.utils.general import dict2list, unlist +from schematic.utils.general import unlist from schematic.utils.viz_utils import visualize -from schematic.utils.validate_utils import ( - validate_class_schema, - validate_property_schema, - validate_schema, -) -from schematic.schemas.curie import uri2curie, curie2uri - - -namespaces = dict(rdf=Namespace("http://www.w3.org/1999/02/22-rdf-syntax-ns#")) - logger = logging.getLogger(__name__) @@ -177,7 +150,7 @@ def find_node_range(self, node_label:Optional[str]=None, node_display_name:Optio def get_adjacent_nodes_by_relationship(self, node_label: str, - relationship: str) -> List[str]: + relationship: str) -> list[str]: """Get a list of nodes that is / are adjacent to a given node, based on a relationship type. Args: @@ -197,7 +170,7 @@ def get_adjacent_nodes_by_relationship(self, def get_component_requirements(self, source_component: str, - ) -> List[str]: + ) -> list[str]: """Get all components that are associated with a given source component and are required by it. Args: @@ -244,7 +217,7 @@ def get_descendants_by_edge_type(self, relationship: str, connected: bool = True, ordered: bool = False, - ) -> List[str]: + ) -> list[str]: """Get all nodes that are descendants of a given source node, based on a specific type of edge / relationship type. Args: @@ -320,7 +293,7 @@ def get_digraph_by_edge_type(self, edge_type:str) -> nx.DiGraph: def get_edges_by_relationship(self, class_label: str, relationship: str, - ) -> List[str]: + ) -> list[str]: """Get a list of out-edges of a node where the edges match a specifc type of relationship. i.e., the edges connecting a node to its neighbors are of relationship type -- "parentOf" (set of edges to children / sub-class nodes). @@ -411,7 +384,7 @@ def get_node_dependencies(self, source_node: str, display_names: bool = True, schema_ordered: bool = True, - ) -> List[str]: + ) -> list[str]: """Get the immediate dependencies that are related to a given source node. Args: @@ -455,8 +428,8 @@ def get_nodes_descendants(self, node_label:str) -> list: return all_descendants def get_nodes_display_names( - self, node_list: List[str], - ) -> List[str]: + self, node_list: list[str], + ) -> list[str]: """Get display names associated with the given list of nodes. Args: @@ -495,7 +468,7 @@ def get_node_label(self, node_display_name: str) -> str: return node_label - def get_node_range(self, node_label: Optional[str] = None, node_display_name: Optional[str] = None, display_names: bool=False) -> List[str]: + def get_node_range(self, node_label: Optional[str] = None, node_display_name: Optional[str] = None, display_names: bool=False) -> list[str]: """Get the range, i.e., all the valid values that are associated with a node label. Args: @@ -594,7 +567,7 @@ def get_subgraph_by_edge_type( return relationship_subgraph - def find_adjacent_child_classes(self, node_label: Optional[str]=None, node_display_name: Optional[str]=None)->List[str]: + def find_adjacent_child_classes(self, node_label: Optional[str]=None, node_display_name: Optional[str]=None)->list[str]: '''Find child classes of a given node. Args: node_display_name: Display name of the node to look up. @@ -632,7 +605,7 @@ def find_class_specific_properties(self, schema_class): properties.append(n1) return properties - def find_parent_classes(self, node_label:str) -> List[list]: + def find_parent_classes(self, node_label:str) -> list[list]: """Find all parents of the provided node Args: node_label: label of the node to find parents of From 49d28174592b363a88d45237d1db12a266b78935 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Tue, 17 Oct 2023 15:40:38 -0700 Subject: [PATCH 173/239] clean up unused imports and fix all references that were broken, update typing throughout --- schematic/schemas/data_model_graph.py | 52 ++++++++++++++------------- 1 file changed, 27 insertions(+), 25 deletions(-) diff --git a/schematic/schemas/data_model_graph.py b/schematic/schemas/data_model_graph.py index e91b95e9a..bf88721a4 100644 --- a/schematic/schemas/data_model_graph.py +++ b/schematic/schemas/data_model_graph.py @@ -107,7 +107,7 @@ def __init__(self, self.dmr = DataModelRelationships() self.rel_dict = self.dmr.relationships_dictionary - def find_properties(self) -> set: + def find_properties(self) -> set[str]: """Identify all properties, as defined by the first node in a pair, connected with 'domainIncludes' edge type Returns: properties, set: All properties defined in the data model, each property name is defined by its label. @@ -119,7 +119,7 @@ def find_properties(self) -> set: properties = set(properties) return properties - def find_classes(self) -> set: + def find_classes(self) -> set[str]: """Identify all classes, as defined but all nodes, minus all properties (which are explicitly defined) Returns: classes, set: All classes defined in the data model, each class name is defined by its label. @@ -138,7 +138,7 @@ def find_node_range(self, node_label:Optional[str]=None, node_display_name:Optio valid_values, list: List of valid values associated with the provided node. """ if not node_label: - node_label = self.get_node_label(display_name) + node_label = self.get_node_label(node_display_name) valid_values=[] for node_1, node_2, rel in self.graph.edges: @@ -162,7 +162,7 @@ def get_adjacent_nodes_by_relationship(self, #checked """ nodes = set() - for (u, v, key, c) in self.graph.out_edges(node_label, data=True, keys=True): + for (_, _, key, _) in self.graph.out_edges(node_label, data=True, keys=True): if key == relationship: nodes.add(v) @@ -241,9 +241,9 @@ def get_descendants_by_edge_type(self, # prune the descendants subgraph so as to include only those edges that match the relationship type rel_edges = [] - for (u, v, key, c) in descendants_subgraph.edges(data=True, keys=True): + for (node_1, node_2, key, _) in descendants_subgraph.edges(data=True, keys=True): if key == relationship: - rel_edges.append((u, v)) + rel_edges.append((node_1, node_2)) relationship_subgraph = nx.DiGraph() relationship_subgraph.add_edges_from(rel_edges) @@ -285,13 +285,13 @@ def get_digraph_by_edge_type(self, edge_type:str) -> nx.DiGraph: Returns: ''' digraph = nx.DiGraph() - for (u, v, key, c) in self.graph.edges(data=True, keys=True): + for (node_1, node_2, key, _) in self.graph.edges(data=True, keys=True): if key == edge_type: - digraph.add_edge(u, v) + digraph.add_edge(node_1, node_2) return digraph def get_edges_by_relationship(self, - class_label: str, + node: str, relationship: str, ) -> list[str]: """Get a list of out-edges of a node where the edges match a specifc type of relationship. @@ -307,13 +307,13 @@ def get_edges_by_relationship(self, """ edges = [] - for (u, v, key, c) in self.graph.out_edges(node, data=True, keys=True): + for (node_1, node_2, key, _) in self.graph.out_edges(node, data=True, keys=True): if key == relationship: - edges.append((u, v)) + edges.append((node_1, node_2)) return edges - def get_ordered_entry(self, key: str, source_node_label:str) -> list: + def get_ordered_entry(self, key: str, source_node_label:str) -> list[str]: """Order the values associated with a particular node and edge_key to match original ordering in schema. Args: key: a key representing and edge relationship in DataModelRelationships.relationships_dictionary @@ -349,7 +349,7 @@ def get_ordered_entry(self, key: str, source_node_label:str) -> list: return sorted_nodes # Get values associated with a node - def get_nodes_ancestors(self, subgraph, node_label:str) -> list: + def get_nodes_ancestors(self, subgraph:nx.DiGraph, node_label:str) -> list[str]: """Get a list of nodes reachable from source component in graph Args: subgraph: networkx graph object @@ -416,14 +416,14 @@ def get_node_dependencies(self, return required_dependencies - def get_nodes_descendants(self, node_label:str) -> list: + def get_nodes_descendants(self, node_label:str) -> list[str]: """Return a list of nodes reachable from source in graph Args: node_label, str: any given node Return: all_descendants, list: nodes reachable from source in graph """ - all_descendants = list(nx.descendants(self.graph, component)) + all_descendants = list(nx.descendants(self.graph, node_label)) return all_descendants @@ -510,9 +510,8 @@ def get_node_required(self, node_label:Optional[str]=None, node_display_name: Op Note: The possible options that a node can be associated with -- "required" / "optional". Args: - node_display_name: Display name of the node for which you want look up. node_label: Label of the node for which you need to look up. - + node_display_name: Display name of the node for which you want look up. Returns: True: If the given node is a "required" node. False: If the given node is not a "required" (i.e., an "optional") node. @@ -528,8 +527,8 @@ def get_node_validation_rules(self, node_label: Optional[str]=None, node_display """Get validation rules associated with a node, Args: + node_label: Label of the node for which you need to look up. node_display_name: Display name of the node which you want to get the label for. - Returns: A set of validation rules associated with node, as a list. """ @@ -557,9 +556,9 @@ def get_subgraph_by_edge_type( # prune the metadata model graph so as to include only those edges that match the relationship type rel_edges = [] - for (u, v, key, c) in self.graph.out_edges(data=True, keys=True): + for (node_1, node_2, key, _) in self.graph.out_edges(data=True, keys=True): if key == relationship: - rel_edges.append((u, v)) + rel_edges.append((node_1, node_2)) relationship_subgraph = nx.DiGraph() relationship_subgraph.add_edges_from(rel_edges) @@ -575,7 +574,10 @@ def find_adjacent_child_classes(self, node_label: Optional[str]=None, node_displ Returns: List of nodes that are adjacent to the given node, by SubclassOf relationship. ''' - return self.get_adjacent_nodes_by_relationship(node_label = schema_class, relationship = self.rel_dict['subClassOf']['edge_key']) + if not node_label: + node_label = self.get_node_label(node_display_name) + + return self.get_adjacent_nodes_by_relationship(node_label = node_label, relationship = self.rel_dict['subClassOf']['edge_key']) def find_child_classes(self, schema_class: str) -> list: """Find schema classes that inherit from the given class @@ -586,7 +588,7 @@ def find_child_classes(self, schema_class: str) -> list: """ return unlist(list(self.graph.successors(schema_class))) - def find_class_specific_properties(self, schema_class): + def find_class_specific_properties(self, schema_class:str) -> list[str]: """Find properties specifically associated with a given class Args: schema_class, str: node/class label, to identify properties for. @@ -605,7 +607,7 @@ def find_class_specific_properties(self, schema_class): properties.append(n1) return properties - def find_parent_classes(self, node_label:str) -> list[list]: + def find_parent_classes(self, node_label:str) -> list[list[str]]: """Find all parents of the provided node Args: node_label: label of the node to find parents of @@ -638,7 +640,7 @@ def full_schema_graph(self, size:Optional[int]=None)-> graphviz.Digraph: def is_class_in_schema(self, node_label: str) -> bool: """Determine if provided node_label is in the schema graph/data model. Args: - class_label: label of node to search for in the + node_label: label of node to search for in the Returns: True, if node is in the graph schema False, if node is not in graph schema @@ -648,7 +650,7 @@ def is_class_in_schema(self, node_label: str) -> bool: else: return False - def sub_schema_graph(self, source, direction, size=None) -> graphviz.Digraph: + def sub_schema_graph(self, source:str, direction:str, size=None) -> Optional[graphviz.Digraph]: """Create a sub-schema graph Args: source, str: source node label to start graph From 4e16111ff53294a2367f296391d9924b48316d70 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Tue, 17 Oct 2023 18:20:42 -0700 Subject: [PATCH 174/239] fix issue with ref to node --- schematic/schemas/data_model_graph.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/schematic/schemas/data_model_graph.py b/schematic/schemas/data_model_graph.py index bf88721a4..d0aff3c94 100644 --- a/schematic/schemas/data_model_graph.py +++ b/schematic/schemas/data_model_graph.py @@ -162,9 +162,9 @@ def get_adjacent_nodes_by_relationship(self, #checked """ nodes = set() - for (_, _, key, _) in self.graph.out_edges(node_label, data=True, keys=True): + for (node_1, node_2, key, _) in self.graph.out_edges(node_label, data=True, keys=True): if key == relationship: - nodes.add(v) + nodes.add(node_2) return list(nodes) From 1609a04ca28ead2d4f2f47d94694c06a8da19114 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Tue, 17 Oct 2023 20:50:24 -0700 Subject: [PATCH 175/239] remove true == assert from test_schemas --- tests/test_schemas.py | 32 ++++++++++++++++---------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index d32008907..6962a2a72 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -95,10 +95,10 @@ def test_gather_jsonld_attributes_relationships(self, helpers, data_model, dmjso attribute_key = list(attr_rel_dict.keys())[0] # Check that the structure of the model dictionary conforms to expectations. - assert True == (type(attr_rel_dict) == dict) - assert True == (attribute_key in attr_rel_dict.keys()) - assert True == ('Relationships' in attr_rel_dict[attribute_key]) - assert True == ('Attribute' in attr_rel_dict[attribute_key]['Relationships']) + assert type(attr_rel_dict) == dict + assert attribute_key in attr_rel_dict.keys() + assert 'Relationships' in attr_rel_dict[attribute_key] + assert 'Attribute' in attr_rel_dict[attribute_key]['Relationships'] @pytest.mark.parametrize("data_model", ['example.model.jsonld'], ids=["jsonld"]) def test_parse_jsonld_model(self, helpers, data_model, dmjsonldp): @@ -115,10 +115,10 @@ def test_parse_jsonld_model(self, helpers, data_model, dmjsonldp): attribute_key = list(model_dict.keys())[0] # Check that the structure of the model dictionary conforms to expectations. - assert True == (type(model_dict) == dict) - assert True == (attribute_key in model_dict.keys()) - assert True == ('Relationships' in model_dict[attribute_key]) - assert True == ('Attribute' in model_dict[attribute_key]['Relationships']) + assert type(model_dict) == dict + assert attribute_key in model_dict.keys() + assert 'Relationships' in model_dict[attribute_key] + assert 'Attribute' in model_dict[attribute_key]['Relationships'] class TestDataModelRelationships: """Tests for DataModelRelationships class""" @@ -192,22 +192,22 @@ def test_generate_data_model_graph(self, helpers, data_model): graph = generate_graph_data_model(helpers=helpers, data_model_name=data_model) #Check that some edges are present as expected: - assert True == (('FamilyHistory', 'Breast') in graph.edges('FamilyHistory')) - assert True == (('BulkRNA-seqAssay', 'Biospecimen') in graph.edges('BulkRNA-seqAssay')) + assert ('FamilyHistory', 'Breast') in graph.edges('FamilyHistory') + assert ('BulkRNA-seqAssay', 'Biospecimen') in graph.edges('BulkRNA-seqAssay') assert ['Ab', 'Cd', 'Ef', 'Gh'] == [k for k,v in graph['CheckList'].items() for vk, vv in v.items() if vk == 'rangeValue'] # Check that all relationships recorded between 'CheckList' and 'Ab' are present - assert True == ('rangeValue' and 'parentOf' in graph['CheckList']['Ab']) - assert False == ('requiresDependency' in graph['CheckList']['Ab']) + assert 'rangeValue' and 'parentOf' in graph['CheckList']['Ab'] + assert 'requiresDependency' not in graph['CheckList']['Ab'] # Check nodes: - assert True == ('Patient' in graph.nodes) - assert True == ('GRCh38' in graph.nodes) + assert 'Patient' in graph.nodes + assert 'GRCh38' in graph.nodes # Check weights - assert True == (graph['Sex']['Female']['rangeValue']['weight'] == 0) - assert True == (graph['MockComponent']['CheckRegexFormat']['requiresDependency']['weight'] == 4) + assert graph['Sex']['Female']['rangeValue']['weight'] == 0 + assert graph['MockComponent']['CheckRegexFormat']['requiresDependency']['weight'] == 4 # Check Edge directions assert 4 == (len(graph.out_edges('TissueStatus'))) From 07d536559411e950edb229374b5c2448d0dc7d4a Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 18 Oct 2023 11:47:50 -0700 Subject: [PATCH 176/239] add required to class template --- schematic/schemas/data_model_jsonld.py | 1 + 1 file changed, 1 insertion(+) diff --git a/schematic/schemas/data_model_jsonld.py b/schematic/schemas/data_model_jsonld.py index e341d85ed..2353d8f8c 100644 --- a/schematic/schemas/data_model_jsonld.py +++ b/schematic/schemas/data_model_jsonld.py @@ -52,6 +52,7 @@ class ClassTemplate: magic_range_includes: list = field(default_factory=list, metadata=config(field_name="schema:rangeIncludes")) magic_isPartOf: dict = field(default_factory=dict, metadata=config(field_name="schema:isPartOf")) magic_displayName:str = field(default="", metadata=config(field_name="sms:displayName")) + magic_required: str = field(default="sms:false", metadata=config(field_name="sms:required")) magic_requiresDependency: list = field(default_factory=list, metadata=config(field_name="sms:requiresDependency")) magic_requiresComponent: list = field(default_factory=list, metadata=config(field_name="sms:requiresComponent")) magic_validationRules: list = field(default_factory=list, metadata=config(field_name="sms:validationRules")) From 05623478c0db7c065660bdef23123bbb9c7213c1 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 18 Oct 2023 14:21:58 -0700 Subject: [PATCH 177/239] remove test_schemas portion --- tests/test_schemas.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 1a6d505ba..33e4e910a 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -995,9 +995,3 @@ def test_convert_graph_to_jsonld(self, helpers, data_model): assert list(jsonld_dm.keys()) == ['@context', '@graph', '@id'] assert len(jsonld_dm['@graph']) > 1 -class TestSchemas: - def test_convert_csv_to_graph(self, helpers): - return - def test_convert_jsonld_to_graph(self, helpers): - return - From 27cb86b557e22398a9edce4fccf2c95b806d0ee2 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Wed, 18 Oct 2023 15:31:09 -0700 Subject: [PATCH 178/239] change fixture name --- tests/test_schemas.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 4cab48f8b..7954697d6 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -60,7 +60,7 @@ def fixture_DMR(): yield DataModelRelationships() @pytest.fixture -def DMEdges(): +def data_model_edges(): """ Yields a Data Model Edges object for testing TODO: Update naming for DataModelGraphExplorer and fixture to avoid overlapping namespace @@ -303,7 +303,7 @@ class TestDataModelEdges: rangeIncludes relationship edge """ - def test_skip_edge(self, helpers, DMR, DMEdges): + def test_skip_edge(self, helpers, DMR, data_model_edges): # Instantiate graph object and set node G = nx.MultiDiGraph() node = "Diagnosis" @@ -336,7 +336,7 @@ def test_skip_edge(self, helpers, DMR, DMEdges): # Generate an edge in the graph with one node and a subset of the parsed data model # We're attempting to add an edge for a node that is the only one in the graph, # so `generate_edge` should skip adding edges and return the same graph - G = DMEdges.generate_edge(G, node, node_dict, {node:parsed_data_model[node]}, edge_relationships) + G = data_model_edges.generate_edge(G, node, node_dict, {node:parsed_data_model[node]}, edge_relationships) # Assert that no edges were added and that the current graph edges are the same as before the call to `generate_edge` assert before_edges == G.edges @@ -351,7 +351,7 @@ def test_skip_edge(self, helpers, DMR, DMEdges): "Valid Value", "all others" ]) - def test_generate_edge(self, helpers, DMR, DMEdges, node_to_add, edge_relationship): + def test_generate_edge(self, helpers, DMR, data_model_edges, node_to_add, edge_relationship): # Instantiate graph object G = nx.MultiDiGraph() @@ -382,7 +382,7 @@ def test_generate_edge(self, helpers, DMR, DMEdges, node_to_add, edge_relationsh before_edges = deepcopy(G.edges) # Generate edges for whichever node we are testing - G = DMEdges.generate_edge(G, node_to_add, all_node_dict, parsed_data_model, edge_relationships) + G = data_model_edges.generate_edge(G, node_to_add, all_node_dict, parsed_data_model, edge_relationships) # Assert that the current edges are different from the edges of the graph before assert G.edges > before_edges @@ -397,7 +397,7 @@ def test_generate_edge(self, helpers, DMR, DMEdges, node_to_add, edge_relationsh [("Patient ID", "Biospecimen", 1, "validator_dag_test.model.csv"), ("dataset_id", "cohorts", -1, "properties.test.model.csv")], ids=["list", "domainIncludes"]) - def test_generate_weights(self, helpers, DMR, DMEdges, node_to_add, other_node, expected_weight, data_model_path): + def test_generate_weights(self, helpers, DMR, data_model_edges, node_to_add, other_node, expected_weight, data_model_path): # Instantiate graph object G = nx.MultiDiGraph() @@ -428,7 +428,7 @@ def test_generate_weights(self, helpers, DMR, DMEdges, node_to_add, other_node, before_edges = deepcopy(G.edges) # Generate edges for whichever node we are testing - G = DMEdges.generate_edge(G, node_to_add, all_node_dict, parsed_data_model, edge_relationships) + G = data_model_edges.generate_edge(G, node_to_add, all_node_dict, parsed_data_model, edge_relationships) # Assert that the current edges are different from the edges of the graph before assert G.edges > before_edges From b58a185737f3a16d8a62b1f07b056c26db1346ed Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Wed, 18 Oct 2023 15:32:05 -0700 Subject: [PATCH 179/239] remove returns --- tests/test_schemas.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 7954697d6..5a363690f 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -340,8 +340,6 @@ def test_skip_edge(self, helpers, DMR, data_model_edges): # Assert that no edges were added and that the current graph edges are the same as before the call to `generate_edge` assert before_edges == G.edges - - return @pytest.mark.parametrize("node_to_add, edge_relationship", [("DataType", "parentOf"), @@ -390,8 +388,6 @@ def test_generate_edge(self, helpers, DMR, data_model_edges, node_to_add, edge_r # Assert that somewhere in the current edges for the node we added, that the correct relationship exists relationship_df = pd.DataFrame(G.edges, columns= ['node1', 'node2', 'edge']) assert (relationship_df['edge'] == edge_relationship).any() - - return @pytest.mark.parametrize("node_to_add, other_node, expected_weight, data_model_path", [("Patient ID", "Biospecimen", 1, "validator_dag_test.model.csv"), @@ -449,8 +445,6 @@ def test_generate_weights(self, helpers, DMR, data_model_edges, node_to_add, oth assert edges_and_weights.loc[other_node, 'weights']['weight'] == expected_weight elif node_to_add in ['cohorts']: assert edges_and_weights.loc[node_to_add, 'weights']['weight'] == expected_weight - - return From 5321ff3005629eacb991379a686ac7bf5741f60f Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Wed, 18 Oct 2023 15:38:16 -0700 Subject: [PATCH 180/239] add comment on negative weight --- tests/test_schemas.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 5a363690f..440ebb904 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -432,6 +432,7 @@ def test_generate_weights(self, helpers, DMR, data_model_edges, node_to_add, oth # Cast the edges and weights to a DataFrame for easier indexing edges_and_weights = pd.DataFrame(G.edges.data(), columns= ['node1', 'node2', 'weights']).set_index('node1') + # Weights are set to a negative nubmer to indicate that the weight cannot be known reliably beforehand and must be determined by reading the schema # Get the index of the property in the schema # Weights for properties are determined by their order in the schema. # This would allow the tests to continue to function correctly in the case were other attributes were added to the schema From d0287efbe332368923734c49b588105c0fa5f783 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Wed, 18 Oct 2023 16:28:32 -0700 Subject: [PATCH 181/239] change var cap --- tests/test_schemas.py | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 440ebb904..0c3179d86 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -315,20 +315,20 @@ def test_skip_edge(self, helpers, DMR, data_model_edges): parsed_data_model = data_model_parser.parse_model() # Instantiate data model Nodes object - dmn = DataModelNodes(parsed_data_model) + DMN = DataModelNodes(parsed_data_model) # Get edge relationships and all nodes from the parsed model edge_relationships = DMR.define_edge_relationships() - all_nodes = dmn.gather_all_nodes(attr_rel_dict=parsed_data_model) + all_nodes = DMN.gather_all_nodes(attr_rel_dict=parsed_data_model) # Sanity check to ensure that the node we intend to test exists in the data model assert node in all_nodes # Add a single node to the graph node_dict = {} - node_dict = dmn.generate_node_dict(node, parsed_data_model) + node_dict = DMN.generate_node_dict(node, parsed_data_model) node_dict[node] = node_dict - G = dmn.generate_node(G, node_dict) + G = DMN.generate_node(G, node_dict) # Check the edges in the graph, there should be none before_edges = deepcopy(G.edges) @@ -360,11 +360,11 @@ def test_generate_edge(self, helpers, DMR, data_model_edges, node_to_add, edge_r parsed_data_model = data_model_parser.parse_model() # Instantiate data model Nodes object - dmn = DataModelNodes(parsed_data_model) + DMN = DataModelNodes(parsed_data_model) # Get edge relationships and all nodes from the parsed model edge_relationships = DMR.define_edge_relationships() - all_nodes = dmn.gather_all_nodes(attr_rel_dict=parsed_data_model) + all_nodes = DMN.gather_all_nodes(attr_rel_dict=parsed_data_model) # Sanity check to ensure that the node we intend to test exists in the data model assert node_to_add in all_nodes @@ -372,9 +372,9 @@ def test_generate_edge(self, helpers, DMR, data_model_edges, node_to_add, edge_r # Add all nodes to the graph all_node_dict = {} for node in all_nodes: - node_dict = dmn.generate_node_dict(node, parsed_data_model) + node_dict = DMN.generate_node_dict(node, parsed_data_model) all_node_dict[node] = node_dict - G = dmn.generate_node(G, node_dict) + G = DMN.generate_node(G, node_dict) # Check the edges in the graph, there should be none before_edges = deepcopy(G.edges) @@ -404,11 +404,11 @@ def test_generate_weights(self, helpers, DMR, data_model_edges, node_to_add, oth parsed_data_model = data_model_parser.parse_model() # Instantiate data model Nodes object - dmn = DataModelNodes(parsed_data_model) + DMN = DataModelNodes(parsed_data_model) # Get edge relationships and all nodes from the parsed model edge_relationships = DMR.define_edge_relationships() - all_nodes = dmn.gather_all_nodes(attr_rel_dict=parsed_data_model) + all_nodes = DMN.gather_all_nodes(attr_rel_dict=parsed_data_model) # Sanity check to ensure that the node we intend to test exists in the data model assert node_to_add in all_nodes @@ -416,9 +416,9 @@ def test_generate_weights(self, helpers, DMR, data_model_edges, node_to_add, oth # Add all nodes to the graph all_node_dict = {} for node in all_nodes: - node_dict = dmn.generate_node_dict(node, parsed_data_model) + node_dict = DMN.generate_node_dict(node, parsed_data_model) all_node_dict[node] = node_dict - G = dmn.generate_node(G, node_dict) + G = DMN.generate_node(G, node_dict) # Check the edges in the graph, there should be none before_edges = deepcopy(G.edges) From 731293bb97d8efaed75b90f161093b1b2c2557d2 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 18 Oct 2023 20:39:25 -0700 Subject: [PATCH 182/239] move functions out of conftest, and clean up conftest functions that were added in refactor --- tests/conftest.py | 40 +++------------------------------------- 1 file changed, 3 insertions(+), 37 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index e3293d508..e965bdc5c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -18,7 +18,6 @@ load_dotenv() - logging.basicConfig(level=logging.DEBUG) logger = logging.getLogger(__name__) @@ -55,9 +54,8 @@ def get_data_frame(path, *paths, **kwargs): fullpath = os.path.join(DATA_DIR, path, *paths) return load_df(fullpath, **kwargs) - @staticmethod - def get_data_model_explorer(path=None, *paths): + def get_data_model_graph_explorer(path=None, *paths): #commenting this now bc we dont want to have multiple instances if path is None: return @@ -77,41 +75,9 @@ def get_data_model_explorer(path=None, *paths): graph_data_model = data_model_grapher.generate_data_model_graph() #Instantiate DataModelGraphExplorer - DME = DataModelGraphExplorer(graph_data_model) - - return DME - - @staticmethod - def get_data_model_parser(data_model_name:str=None, *paths): - # Get path to data model - fullpath = Helpers.get_data_path(path=data_model_name, *paths) - # Instantiate DataModelParser - data_model_parser = DataModelParser(path_to_data_model=fullpath) - return data_model_parser - - @staticmethod - def get_data_model_json_schema(data_model_name:str=None, *paths): - # Get path to data model - fullpath = Helpers.get_data_path(path=data_model_name, *paths) - - # Instantiate DataModelParser - data_model_parser = DataModelParser(path_to_data_model = fullpath) - - #Parse Model - parsed_data_model = data_model_parser.parse_model() - - # Instantiate DataModelGraph - data_model_grapher = DataModelGraph(parsed_data_model) - - # Generate graph - graph_data_model = data_model_grapher.generate_data_model_graph() - - #Instantiate DataModelGraphExplorer - DME = DataModelGraphExplorer(graph_data_model) + DMGE = DataModelGraphExplorer(graph_data_model) - # Instantiate DataModelJsonSchema - dmjs = DataModelJSONSchema(fullpath, graph=graph_data_model) - return dmjs + return DMGE @staticmethod def get_python_version(self): From 516413c81f6f8aab02897ec72b8ca2591c113f64 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 18 Oct 2023 20:42:50 -0700 Subject: [PATCH 183/239] clean up tests after merging all togther make cohesive, address comments --- tests/test_schemas.py | 294 +++++++++++++++++++++--------------------- 1 file changed, 146 insertions(+), 148 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 33e4e910a..6c5ec5b02 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -47,14 +47,21 @@ def test_fake_func(): NODE_DISPLAY_NAME_DICT = {'Patient':False, 'Sex': True} +def get_data_model_parser(helpers, data_model_name:str=None): + # Get path to data model + fullpath = helpers.get_data_path(path=data_model_name) -def generate_graph_data_model(helpers, data_model_name): + # Instantiate DataModelParser + data_model_parser = DataModelParser(path_to_data_model=fullpath) + return data_model_parser + +def generate_graph_data_model(helpers, data_model_name:str) -> nx.MultiDiGraph: """ Simple helper function to generate a networkx graph data model from a CSV or JSONLD data model """ - + # Instantiate Parser - data_model_parser = helpers.get_data_model_parser(data_model_name=data_model_name) + data_model_parser = get_data_model_parser(helpers=helpers, data_model_name=data_model_name) #Parse Model parsed_data_model = data_model_parser.parse_model() @@ -68,9 +75,9 @@ def generate_graph_data_model(helpers, data_model_name): return graph_data_model -def generate_data_model_nodes(helpers, data_model_name): +def generate_data_model_nodes(helpers, data_model_name:str) -> DataModelNodes: # Instantiate Parser - data_model_parser = helpers.get_data_model_parser(data_model_name=data_model_name) + data_model_parser = get_data_model_parser(helpers=helpers, data_model_name=data_model_name) # Parse Model parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelNodes @@ -78,22 +85,16 @@ def generate_data_model_nodes(helpers, data_model_name): return data_model_nodes -@pytest.fixture(name='dmjsonldp') -def fixture_dm_jsonld_parser(): - yield DataModelJSONLDParser() +def get_data_model_json_schema(helpers, data_model_name:str=None): + # Get path to data model + fullpath = helpers.get_data_path(path=data_model_name) -@pytest.fixture -def DME(helpers, data_model_name='example.model.csv'): - ''' - In future could pull using helpers. - ''' + # Get Data Model Graph graph_data_model = generate_graph_data_model(helpers, data_model_name=data_model_name) - DME = DataModelGraphExplorer(graph_data_model) - yield DME -@pytest.fixture(name='dmcsvp') -def fixture_dm_csv_parser(): - yield DataModelCSVParser() + # Instantiate DataModelJsonSchema + dmjs = DataModelJSONSchema(fullpath, graph=graph_data_model) + return dmjs @pytest.fixture(name='relationships') def get_relationships(helpers): @@ -102,11 +103,19 @@ def get_relationships(helpers): relationships = list(relationships_dict.keys()) yield relationships -@pytest.fixture(name="dmr") +@pytest.fixture(name="DMR") def fixture_dmr(): """Yields a data model relationships object for testing""" yield DataModelRelationships() +@pytest.fixture(name='csv_parser') +def fixture_dm_csv_parser(): + yield DataModelCSVParser() + +@pytest.fixture(name='jsonld_parser') +def fixture_dm_jsonld_parser(): + yield DataModelJSONLDParser() + class TestDataModelParser: def test_get_base_schema_path(self, helpers): '''Test that base schema path is returned properly. @@ -115,7 +124,7 @@ def test_get_base_schema_path(self, helpers): so just test that default BioThings data model path is returned. ''' # Instantiate Data model parser. - data_model_parser = helpers.get_data_model_parser(data_model_name='example.model.csv') + data_model_parser = get_data_model_parser(helpers=helpers, data_model_name='example.model.csv') # Get path to default biothings model. biothings_path = data_model_parser._get_base_schema_path(base_schema=None) @@ -123,92 +132,92 @@ def test_get_base_schema_path(self, helpers): assert os.path.basename(biothings_path) == "biothings.model.jsonld" @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) - def test_get_model_type(self, helpers, data_model): + def test_get_model_type(self, helpers, data_model:str): # Instantiate Data model parser. - data_model_parser = helpers.get_data_model_parser(data_model_name=data_model) + data_model_parser = get_data_model_parser(helpers=helpers, data_model_name=data_model) # Check the data model type assert (data_model == 'example.model.csv') == (data_model_parser.model_type == 'CSV') assert (data_model == 'example.model.jsonld') == (data_model_parser.model_type == 'JSONLD') @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) - def test_parse_model(self, helpers, data_model): + def test_parse_model(self, helpers, data_model:str): '''Test that the correct parser is called and that a dictionary is returned in the expected structure. ''' # Instantiate Data model parser. - data_model_parser = helpers.get_data_model_parser(data_model_name=data_model) + data_model_parser = get_data_model_parser(helpers=helpers, data_model_name=data_model) # Parse Model - model_dict = data_model_parser.parse_model() + attr_rel_dictionary = data_model_parser.parse_model() # Get a key in the model - attribute_key = list(model_dict.keys())[0] + attribute_key = list(attr_rel_dictionary.keys())[0] # Check that the structure of the model dictionary conforms to expectations. - assert type(model_dict) == dict - assert attribute_key in model_dict.keys() - assert 'Relationships' in model_dict[attribute_key] - assert 'Attribute' in model_dict[attribute_key]['Relationships'] + assert type(attr_rel_dictionary) == dict + assert attribute_key in attr_rel_dictionary.keys() + assert 'Relationships' in attr_rel_dictionary[attribute_key] + assert 'Attribute' in attr_rel_dictionary[attribute_key]['Relationships'] + +@pytest.mark.parametrize("data_model", ['example.model.csv'], ids=["csv"]) class TestDataModelCsvParser: - @pytest.mark.parametrize("data_model", ['example.model.csv'], ids=["csv"]) - def test_check_schema_definition(self, helpers, data_model, dmcsvp:DataModelCSVParser): + def test_check_schema_definition(self, helpers, data_model:str, csv_parser:DataModelCSVParser): """If the csv schema contains the required headers, then this function should not return anything. Check that this is so. """ - path_to_data_model = helpers.get_data_path(path=data_model) - model_df = load_df(path_to_data_model, data_model=True) - assert None == (dmcsvp.check_schema_definition(model_df = model_df)) + #path_to_data_model = helpers.get_data_path(path=data_model) + model_df = helpers.get_data_frame(path=data_model, data_model=True) + assert None == (csv_parser.check_schema_definition(model_df = model_df)) - @pytest.mark.parametrize("data_model", ['example.model.csv'], ids=["csv"]) - def test_gather_csv_attributes_relationships(self, helpers, data_model, dmcsvp:DataModelCSVParser): + def test_gather_csv_attributes_relationships(self, helpers, data_model:str, csv_parser:DataModelCSVParser): """The output of the function is a attributes relationship dictionary, check that it is formatted properly. """ path_to_data_model = helpers.get_data_path(path=data_model) model_df = load_df(path_to_data_model, data_model=True) # Get output of the function: - attr_rel_dict = dmcsvp.gather_csv_attributes_relationships(model_df=model_df) + attr_rel_dict = csv_parser.gather_csv_attributes_relationships(model_df=model_df) # Test the attr_rel_dict is formatted as expected: # Get a key in the model attribute_key = list(attr_rel_dict.keys())[0] # Check that the structure of the model dictionary conforms to expectations. - assert True == (type(attr_rel_dict) == dict) - assert True == (attribute_key in attr_rel_dict.keys()) - assert True == ('Relationships' in attr_rel_dict[attribute_key]) - assert True == ('Attribute' in attr_rel_dict[attribute_key]['Relationships']) + assert type(attr_rel_dict) == dict + assert attribute_key in attr_rel_dict.keys() + assert 'Relationships' in attr_rel_dict[attribute_key] + assert 'Attribute' in attr_rel_dict[attribute_key]['Relationships'] - @pytest.mark.parametrize("data_model", ['example.model.csv'], ids=["csv"]) - def test_parse_csv_model(self, helpers, data_model, dmcsvp:DataModelCSVParser): + def test_parse_csv_model(self, helpers, data_model:str, csv_parser:DataModelCSVParser): """The output of the function is a attributes relationship dictionary, check that it is formatted properly. """ path_to_data_model = helpers.get_data_path(path=data_model) model_df = load_df(path_to_data_model, data_model=True) # Get output of the function: - model_dict = dmcsvp.parse_csv_model(path_to_data_model=path_to_data_model) + attr_rel_dictionary = csv_parser.parse_csv_model(path_to_data_model=path_to_data_model) - # Test the model_dict is formatted as expected: + # Test the attr_rel_dictionary is formatted as expected: # Get a key in the model - attribute_key = list(model_dict.keys())[0] + attribute_key = list(attr_rel_dictionary.keys())[0] # Check that the structure of the model dictionary conforms to expectations. - assert True == (type(model_dict) == dict) - assert True == (attribute_key in model_dict.keys()) - assert True == ('Relationships' in model_dict[attribute_key]) - assert True == ('Attribute' in model_dict[attribute_key]['Relationships']) + assert type(attr_rel_dictionary) == dict + assert attribute_key in attr_rel_dictionary.keys() + assert 'Relationships' in attr_rel_dictionary[attribute_key] + assert 'Attribute' in attr_rel_dictionary[attribute_key]['Relationships'] + +@pytest.mark.parametrize("data_model", ['example.model.jsonld'], ids=["jsonld"]) class TestDataModelJsonLdParser: - @pytest.mark.parametrize("data_model", ['example.model.jsonld'], ids=["jsonld"]) - def test_gather_jsonld_attributes_relationships(self, helpers, data_model, dmjsonldp): + def test_gather_jsonld_attributes_relationships(self, helpers, data_model:str, jsonld_parser:DataModelJSONLDParser): """The output of the function is a attributes relationship dictionary, check that it is formatted properly. """ path_to_data_model = helpers.get_data_path(path=data_model) model_jsonld = load_json(path_to_data_model) # Get output of the function: - attr_rel_dict = dmjsonldp.gather_jsonld_attributes_relationships(model_jsonld=model_jsonld['@graph']) + attr_rel_dict = jsonld_parser.gather_jsonld_attributes_relationships(model_jsonld=model_jsonld['@graph']) # Test the attr_rel_dict is formatted as expected: # Get a key in the model @@ -220,29 +229,29 @@ def test_gather_jsonld_attributes_relationships(self, helpers, data_model, dmjso assert 'Relationships' in attr_rel_dict[attribute_key] assert 'Attribute' in attr_rel_dict[attribute_key]['Relationships'] - @pytest.mark.parametrize("data_model", ['example.model.jsonld'], ids=["jsonld"]) - def test_parse_jsonld_model(self, helpers, data_model, dmjsonldp): + def test_parse_jsonld_model(self, helpers, data_model:str, jsonld_parser:DataModelJSONLDParser): """The output of the function is a attributes relationship dictionary, check that it is formatted properly. """ path_to_data_model = helpers.get_data_path(path=data_model) model_jsonld = load_json(path_to_data_model) # Get output of the function: - model_dict = dmjsonldp.parse_jsonld_model(path_to_data_model=path_to_data_model) + attr_rel_dictionary = jsonld_parser.parse_jsonld_model(path_to_data_model=path_to_data_model) - # Test the model_dict is formatted as expected: + # Test the attr_rel_dictionary is formatted as expected: # Get a key in the model - attribute_key = list(model_dict.keys())[0] + attribute_key = list(attr_rel_dictionary.keys())[0] # Check that the structure of the model dictionary conforms to expectations. - assert type(model_dict) == dict - assert attribute_key in model_dict.keys() - assert 'Relationships' in model_dict[attribute_key] - assert 'Attribute' in model_dict[attribute_key]['Relationships'] + assert type(attr_rel_dictionary) == dict + assert attribute_key in attr_rel_dictionary.keys() + assert 'Relationships' in attr_rel_dictionary[attribute_key] + assert 'Attribute' in attr_rel_dictionary[attribute_key]['Relationships'] + class TestDataModelRelationships: """Tests for DataModelRelationships class""" - def test_define_data_model_relationships(self, dmr: DataModelRelationships): + def test_define_data_model_relationships(self, DMR: DataModelRelationships): """Tests relationships_dictionary created has correct keys""" required_keys = [ 'jsonld_key', @@ -254,7 +263,7 @@ def test_define_data_model_relationships(self, dmr: DataModelRelationships): required_edge_keys = ['edge_key', 'edge_dir'] required_node_keys = ['node_label', 'node_attr_dict'] - relationships = dmr.relationships_dictionary + relationships = DMR.relationships_dictionary for relationship in relationships.values(): for key in required_keys: @@ -266,9 +275,9 @@ def test_define_data_model_relationships(self, dmr: DataModelRelationships): for key in required_node_keys: assert key in relationship.keys() - def test_define_required_csv_headers(self, dmr: DataModelRelationships): + def test_define_required_csv_headers(self, DMR: DataModelRelationships): """Tests method returns correct values""" - assert dmr.define_required_csv_headers() == [ + assert DMR.define_required_csv_headers() == [ 'Attribute', 'Description', 'Valid Values', @@ -281,10 +290,10 @@ def test_define_required_csv_headers(self, dmr: DataModelRelationships): ] @pytest.mark.parametrize("edge", [True, False], ids=["True", "False"]) - def test_retreive_rel_headers_dict(self, dmr: DataModelRelationships, edge:bool): + def test_retreive_rel_headers_dict(self, DMR: DataModelRelationships, edge:bool): """Tests method returns correct values""" if edge: - assert dmr.retreive_rel_headers_dict(edge=edge) == { + assert DMR.retreive_rel_headers_dict(edge=edge) == { 'rangeIncludes': 'Valid Values', 'requiresDependency': 'DependsOn', 'requiresComponent': 'DependsOn Component', @@ -292,7 +301,7 @@ def test_retreive_rel_headers_dict(self, dmr: DataModelRelationships, edge:bool) 'domainIncludes': 'Properties' } else: - assert dmr.retreive_rel_headers_dict(edge=edge) == { + assert DMR.retreive_rel_headers_dict(edge=edge) == { 'displayName': 'Attribute', 'label': None, 'comment': 'Description', @@ -404,15 +413,15 @@ def test_full_schema_graph(self): return @pytest.mark.parametrize("class_name, expected_in_schema", [("Patient",True), ("ptaient",False), ("Biospecimen",True), ("InvalidComponent",False)]) - def test_is_class_in_schema(self, DME, class_name, expected_in_schema): + def test_is_class_in_schema(self, helpers, class_name, expected_in_schema): """ Test to cover checking if a given class is in a schema. `is_class_in_schema` should return `True` if the class is in the schema and `False` if it is not. """ - + DMGE = helpers.get_data_model_graph_explorer(path='example.model.csv') # Check if class is in schema - class_in_schema = DME.is_class_in_schema(class_name) + class_in_schema = DMGE.is_class_in_schema(class_name) # Assert value is as expected assert class_in_schema == expected_in_schema @@ -420,11 +429,11 @@ def test_is_class_in_schema(self, DME, class_name, expected_in_schema): def test_sub_schema_graph(self): return +@pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) class TestDataModelNodes: - @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) def test_gather_nodes(self, helpers, data_model): # Instantiate Parser - data_model_parser = helpers.get_data_model_parser(data_model_name=data_model) + data_model_parser = get_data_model_parser(helpers=helpers, data_model_name=data_model) # Parse Model attr_rel_dictionary = data_model_parser.parse_model() @@ -453,10 +462,10 @@ def test_gather_nodes(self, helpers, data_model): reordered_nodes.append('Patient') assert reordered_nodes != expected_nodes - @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) def test_gather_all_nodes(self, helpers, data_model): + ## TODO # Instantiate Parser - data_model_parser = helpers.get_data_model_parser(data_model_name=data_model) + data_model_parser = get_data_model_parser(helpers=helpers, data_model_name=data_model) # Parse Model attr_rel_dictionary = data_model_parser.parse_model() @@ -464,7 +473,7 @@ def test_gather_all_nodes(self, helpers, data_model): # Instantiate DataModelNodes data_model_nodes = generate_data_model_nodes(helpers, data_model_name=data_model) - all_nodes = data_model_nodes.gather_all_nodes(attr_rel_dict=attr_rel_dictionary) + all_nodes = data_model_nodes.gather_all_nodes_in_model(attr_rel_dict=attr_rel_dictionary) # Make sure there are no repeat nodes assert len(all_nodes) == len(set(all_nodes)) @@ -478,12 +487,12 @@ def test_gather_all_nodes(self, helpers, data_model): assert actual_starter_nodes == expected_starter_nodes - def test_get_rel_node_dict_info(self, helpers, relationships): + def test_get_rel_node_dict_info(self, helpers, data_model, relationships): # Instantiate Parser - data_model_parser = helpers.get_data_model_parser(data_model_name='example.model.csv') + data_model_parser = get_data_model_parser(helpers=helpers, data_model_name=data_model) # Instantiate DataModelNodes - data_model_nodes = generate_data_model_nodes(helpers, data_model_name='example.model.csv') + data_model_nodes = generate_data_model_nodes(helpers, data_model_name=data_model) for relationship in relationships: rel_dict_info = data_model_nodes.get_rel_node_dict_info(relationship) @@ -492,10 +501,9 @@ def test_get_rel_node_dict_info(self, helpers, relationships): assert type(rel_dict_info[1]) == dict assert 'default' in rel_dict_info[1].keys() - @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) def test_get_data_model_properties(self, helpers, data_model): # Instantiate Parser - data_model_parser = helpers.get_data_model_parser(data_model_name=data_model) + data_model_parser = get_data_model_parser(helpers=helpers, data_model_name=data_model) # Parse Model attr_rel_dictionary = data_model_parser.parse_model() @@ -523,11 +531,10 @@ def test_get_data_model_properties(self, helpers, data_model): assert data_model_properties == ['TestProperty'] - @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) def test_get_entry_type(self, helpers, data_model): # Instantiate Parser - data_model_parser = helpers.get_data_model_parser(data_model_name=data_model) + data_model_parser = get_data_model_parser(helpers=helpers, data_model_name=data_model) # Parse Model attr_rel_dictionary = data_model_parser.parse_model() @@ -553,7 +560,6 @@ def test_get_entry_type(self, helpers, data_model): # Check that the added property is properly loaded as a property assert data_model_nodes.get_entry_type('TestProperty') == 'property' - @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) @pytest.mark.parametrize("rel_func", list(REL_FUNC_DICT.values()), ids=list(REL_FUNC_DICT.keys())) @pytest.mark.parametrize("test_dn", list(TEST_DN_DICT.keys()), ids=list(TEST_DN_DICT.keys())) @pytest.mark.parametrize("test_bool", ['True', 'False', True, False, 'kldjk'], ids=['True_str', 'False_str', 'True_bool', 'False_bool', 'Random_str']) @@ -561,7 +567,7 @@ def test_run_rel_functions(self, helpers, data_model, rel_func, test_dn, test_bo # Call each relationship function to ensure that it is returning the desired result. # Note all the called functions will also be tested in other unit tests. # Instantiate Parser - data_model_parser = helpers.get_data_model_parser(data_model_name=data_model) + data_model_parser = get_data_model_parser(helpers=helpers, data_model_name=data_model) # Parse Model attr_rel_dictionary = data_model_parser.parse_model() @@ -643,11 +649,10 @@ def test_run_rel_functions(self, helpers, data_model, rel_func, test_dn, test_bo assert convert_worked==True return - @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) @pytest.mark.parametrize("node_display_name", list(NODE_DISPLAY_NAME_DICT.keys()), ids=[str(v) for v in NODE_DISPLAY_NAME_DICT.values()]) def test_generate_node_dict(self, helpers, data_model, node_display_name): # Instantiate Parser - data_model_parser = helpers.get_data_model_parser(data_model_name=data_model) + data_model_parser = get_data_model_parser(helpers=helpers, data_model_name=data_model) # Parse Model attr_rel_dictionary = data_model_parser.parse_model() @@ -668,40 +673,36 @@ def test_generate_node_dict(self, helpers, data_model, node_display_name): if not node_dict['required'] == False: assert DATA_MODEL_DICT[data_model] == 'JSONLD' - @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) def test_generate_node(self, helpers, data_model): # Test adding a dummy node node_dict = {'label': 'test_label'} - path_to_data_model = helpers.get_data_path(data_model) - # Get Graph - graph_data_model = generate_graph_data_model(helpers, data_model_name=path_to_data_model) + graph_data_model = generate_graph_data_model(helpers, data_model_name=data_model) # Instantiate DataModelNodes data_model_nodes = generate_data_model_nodes(helpers, data_model_name=data_model) # Assert the test node is not already in the graph - assert False == (node_dict['label'] in graph_data_model.nodes) + assert node_dict['label'] not in graph_data_model.nodes # Add test node data_model_nodes.generate_node(graph_data_model, node_dict) # Check that the test node has been added - assert True == (node_dict['label'] in graph_data_model.nodes) + assert node_dict['label'] in graph_data_model.nodes class TestDataModelEdges: def test_generate_edge(self,helpers): return - +@pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) class TestDataModelJsonSchema: - @pytest.mark.parametrize("data_model", ['example.model.csv', 'example.model.jsonld'], ids=["csv", "jsonld"]) @pytest.mark.parametrize("node_range", [[], ['healthy'], ['healthy', 'cancer']], ids=['empty_range', "single_range", "multi_range"]) @pytest.mark.parametrize("node_name", ['', 'Diagnosis'], ids=['empty_node_name', "Diagnosis_node_name"]) @pytest.mark.parametrize("blank", [True, False], ids=["True_blank", "False_blank"]) def test_get_array_schema(self, helpers, data_model, node_range, node_name, blank): - dmjs = helpers.get_data_model_json_schema(data_model_name=data_model) + dmjs = get_data_model_json_schema(helpers=helpers, data_model_name=data_model) array_schema = dmjs.get_array_schema(node_range=node_range, node_name=node_name, blank=blank) # check node_name is recoreded as the key to the array schema @@ -718,21 +719,19 @@ def test_get_array_schema(self, helpers, data_model, node_range, node_name, blan assert array_schema[node_name]['items']['enum']== node_range assert len(array_schema[node_name]['items']['enum'])==len(node_range) - @pytest.mark.parametrize("data_model", ['example.model.csv', 'example.model.jsonld'], ids=["csv", "jsonld"]) @pytest.mark.parametrize("node_name", ['', 'Diagnosis'], ids=['empty_node_name', "Diagnosis_node_name"]) def test_get_non_blank_schema(self, helpers, data_model, node_name): - dmjs = helpers.get_data_model_json_schema(data_model_name=data_model) + dmjs = get_data_model_json_schema(helpers=helpers, data_model_name=data_model) non_blank_schema = dmjs.get_non_blank_schema(node_name=node_name) # check node_name is recoreded as the key to the array schema assert node_name in non_blank_schema assert non_blank_schema[node_name] == {"not": {"type": "null"}, "minLength": 1} - @pytest.mark.parametrize("data_model", ['example.model.csv', 'example.model.jsonld'], ids=["csv", "jsonld"]) @pytest.mark.parametrize("node_range", [[], ['healthy'], ['healthy', 'cancer']], ids=['empty_range', "single_range", "multi_range"]) @pytest.mark.parametrize("node_name", ['', 'Diagnosis'], ids=['empty_node_name', "Diagnosis_node_name"]) @pytest.mark.parametrize("blank", [True, False], ids=["True_blank", "False_blank"]) def test_get_range_schema(self, helpers, data_model, node_range, node_name, blank): - dmjs = helpers.get_data_model_json_schema(data_model_name=data_model) + dmjs = get_data_model_json_schema(helpers=helpers, data_model_name=data_model) range_schema = dmjs.get_range_schema(node_range=node_range, node_name=node_name, blank=blank) # check node_name is recoreded as the key to the array schema @@ -746,11 +745,10 @@ def test_get_range_schema(self, helpers, data_model, node_range, node_name, blan assert range_schema[node_name]['enum']== node_range assert len(range_schema[node_name]['enum'])==len(node_range) - @pytest.mark.parametrize("data_model", ['example.model.csv', 'example.model.jsonld'], ids=["csv", "jsonld"]) @pytest.mark.parametrize("source_node", ['', 'Patient'], ids=['empty_node_name', "patient_source"]) @pytest.mark.parametrize("schema_name", ['', 'Test_Schema_Name'], ids=['empty_schema_name', "schema_name"]) def test_get_json_validation_schema(self, helpers, data_model, source_node, schema_name): - dmjs = helpers.get_data_model_json_schema(data_model_name=data_model) + dmjs = get_data_model_json_schema(helpers=helpers, data_model_name=data_model) try: # Get validation schema @@ -771,6 +769,7 @@ def test_get_json_validation_schema(self, helpers, data_model, source_node, sche # Should only fail if no source node is provided. assert source_node == '' + class TestDataModelJsonLd: @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) def test_init(self, helpers, data_model): @@ -788,8 +787,7 @@ def test_init(self, helpers, data_model): assert type(data_model_jsonld.DME) == DataModelGraphExplorer assert data_model_jsonld.output_path == '' - @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) - def test_base_jsonld_template(self, helpers, data_model): + def test_base_jsonld_template(self, helpers): # Gather the templates base_template = BaseTemplate() base_jsonld_template = json.loads(base_template.to_json()) @@ -799,6 +797,46 @@ def test_base_jsonld_template(self, helpers, data_model): assert '@graph' in base_jsonld_template assert '@id' in base_jsonld_template + def test_property_template(self, helpers): + # Get Property Template + empty_template = PropertyTemplate() + property_template = json.loads(empty_template.to_json()) + + expected_property_template = { + "@id": "", + "@type": "rdf:Property", + "rdfs:comment": "", + "rdfs:label": "", + "schema:domainIncludes": [], + "schema:rangeIncludes": [], + "schema:isPartOf": {}, + "sms:displayName": "", + "sms:required": "sms:false", + "sms:validationRules": [], + } + assert property_template == expected_property_template + + def test_class_template(self, helpers): + # Get Class Template + empty_template = ClassTemplate() + class_template = json.loads(empty_template.to_json()) + + expected_class_template = { + "@id": "", + "@type": "rdfs:Class", + "rdfs:comment": "", + "rdfs:label": "", + "rdfs:subClassOf": [], + "schema:isPartOf": {}, + "schema:rangeIncludes": [], + "sms:displayName": "", + "sms:required": "sms:false", + "sms:requiresDependency": [], + "sms:requiresComponent": [], + "sms:validationRules": [], + } + assert class_template == expected_class_template + @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) @pytest.mark.parametrize("template_type", ['property', 'class'], ids=['property', 'class']) @pytest.mark.parametrize("node", ['', 'Patient'], ids=['no node', 'Patient']) @@ -867,7 +905,7 @@ def test_add_contexts_to_entries(self, helpers, data_model, template_type): assert 'bts' in object_template['@id'] @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) - def test_clean_template(self, helpers, data_model): + def test_clean_template(self, helpers, data_model:str, DMR:DataModelRelationships): # TODO: This will need to change with contexts bc they are hard coded here. # Get Graph graph_data_model = generate_graph_data_model(helpers, data_model_name=data_model) @@ -887,7 +925,7 @@ def test_clean_template(self, helpers, data_model): # Fill out some mock entries in the template: template_copy['@id'] == 'bts:CheckURL' template_copy['rdfs:label'] == 'CheckURL' - data_model_relationships=data_model_jsonld.dmr.relationships_dictionary + data_model_relationships=DMR.relationships_dictionary # Clean template data_model_jsonld.clean_template(template=template_copy, data_model_relationships=data_model_relationships) @@ -931,46 +969,6 @@ def test_reorder_template_entries(self, helpers, data_model, valid_values): else: assert template_copy['schema:rangeIncludes'] == [] - def test_property_template(self, helpers): - # Get Property Template - empty_template = PropertyTemplate() - property_template = json.loads(empty_template.to_json()) - - expected_property_template = { - "@id": "", - "@type": "rdf:Property", - "rdfs:comment": "", - "rdfs:label": "", - "schema:domainIncludes": [], - "schema:rangeIncludes": [], - "schema:isPartOf": {}, - "sms:displayName": "", - "sms:required": "sms:false", - "sms:validationRules": [], - } - assert property_template == expected_property_template - - def test_class_template(self, helpers): - # Get Class Template - empty_template = ClassTemplate() - class_template = json.loads(empty_template.to_json()) - - expected_class_template = { - "@id": "", - "@type": "rdfs:Class", - "rdfs:comment": "", - "rdfs:label": "", - "rdfs:subClassOf": [], - "schema:isPartOf": {}, - "schema:rangeIncludes": [], - "sms:displayName": "", - "sms:required": "sms:false", - "sms:requiresDependency": [], - "sms:requiresComponent": [], - "sms:validationRules": [], - } - assert class_template == expected_class_template - @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) def test_generate_jsonld_object(self, helpers, data_model): # Check that JSONLD object is being made, and has some populated entries. From 163f096d3d73cebc01ba7391ef470e2c50831f8f Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 18 Oct 2023 20:49:27 -0700 Subject: [PATCH 184/239] run black on tests/test_schemas.py --- tests/test_schemas.py | 934 +++++++++++++++++++++++++++--------------- 1 file changed, 595 insertions(+), 339 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 6c5ec5b02..14de82301 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -9,7 +9,12 @@ import random from schematic.utils.df_utils import load_df -from schematic.utils.schema_utils import get_label_from_display_name, get_attribute_display_name_from_label, convert_bool_to_str, parse_validation_rules +from schematic.utils.schema_utils import ( + get_label_from_display_name, + get_attribute_display_name_from_label, + convert_bool_to_str, + parse_validation_rules, +) from schematic.utils.io_utils import load_json from schematic.schemas.data_model_graph import DataModelGraph @@ -17,37 +22,45 @@ from schematic.schemas.data_model_edges import DataModelEdges from schematic.schemas.data_model_graph import DataModelGraphExplorer from schematic.schemas.data_model_relationships import DataModelRelationships -from schematic.schemas.data_model_jsonld import DataModelJsonLD, convert_graph_to_jsonld, BaseTemplate, PropertyTemplate, ClassTemplate +from schematic.schemas.data_model_jsonld import ( + DataModelJsonLD, + convert_graph_to_jsonld, + BaseTemplate, + PropertyTemplate, + ClassTemplate, +) from schematic.schemas.data_model_json_schema import DataModelJSONSchema -from schematic.schemas.data_model_parser import DataModelParser, DataModelCSVParser, DataModelJSONLDParser +from schematic.schemas.data_model_parser import ( + DataModelParser, + DataModelCSVParser, + DataModelJSONLDParser, +) logging.basicConfig(level=logging.DEBUG) logger = logging.getLogger(__name__) -DATA_MODEL_DICT = { - 'example.model.csv': "CSV", - 'example.model.jsonld': "JSONLD" - } +DATA_MODEL_DICT = {"example.model.csv": "CSV", "example.model.jsonld": "JSONLD"} + def test_fake_func(): return + REL_FUNC_DICT = { - 'get_attribute_display_name_from_label':get_attribute_display_name_from_label, - 'parse_validation_rules': parse_validation_rules, - 'get_label_from_display_name': get_label_from_display_name, - 'convert_bool_to_str': convert_bool_to_str, - 'test_fake_func': test_fake_func, + "get_attribute_display_name_from_label": get_attribute_display_name_from_label, + "parse_validation_rules": parse_validation_rules, + "get_label_from_display_name": get_label_from_display_name, + "convert_bool_to_str": convert_bool_to_str, + "test_fake_func": test_fake_func, +} +TEST_DN_DICT = { + "Bio Things": {"class": "BioThings", "property": "bioThings"}, + "bio things": {"class": "Biothings", "property": "biothings"}, } -TEST_DN_DICT = {'Bio Things': {'class': 'BioThings', - 'property': 'bioThings'}, - 'bio things': {'class': 'Biothings', - 'property': 'biothings'}, - } -NODE_DISPLAY_NAME_DICT = {'Patient':False, - 'Sex': True} - -def get_data_model_parser(helpers, data_model_name:str=None): +NODE_DISPLAY_NAME_DICT = {"Patient": False, "Sex": True} + + +def get_data_model_parser(helpers, data_model_name: str = None): # Get path to data model fullpath = helpers.get_data_path(path=data_model_name) @@ -55,15 +68,18 @@ def get_data_model_parser(helpers, data_model_name:str=None): data_model_parser = DataModelParser(path_to_data_model=fullpath) return data_model_parser -def generate_graph_data_model(helpers, data_model_name:str) -> nx.MultiDiGraph: + +def generate_graph_data_model(helpers, data_model_name: str) -> nx.MultiDiGraph: """ Simple helper function to generate a networkx graph data model from a CSV or JSONLD data model """ # Instantiate Parser - data_model_parser = get_data_model_parser(helpers=helpers, data_model_name=data_model_name) + data_model_parser = get_data_model_parser( + helpers=helpers, data_model_name=data_model_name + ) - #Parse Model + # Parse Model parsed_data_model = data_model_parser.parse_model() # Convert parsed model to graph @@ -75,9 +91,12 @@ def generate_graph_data_model(helpers, data_model_name:str) -> nx.MultiDiGraph: return graph_data_model -def generate_data_model_nodes(helpers, data_model_name:str) -> DataModelNodes: + +def generate_data_model_nodes(helpers, data_model_name: str) -> DataModelNodes: # Instantiate Parser - data_model_parser = get_data_model_parser(helpers=helpers, data_model_name=data_model_name) + data_model_parser = get_data_model_parser( + helpers=helpers, data_model_name=data_model_name + ) # Parse Model parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelNodes @@ -85,68 +104,88 @@ def generate_data_model_nodes(helpers, data_model_name:str) -> DataModelNodes: return data_model_nodes -def get_data_model_json_schema(helpers, data_model_name:str=None): +def get_data_model_json_schema(helpers, data_model_name: str = None): # Get path to data model fullpath = helpers.get_data_path(path=data_model_name) # Get Data Model Graph - graph_data_model = generate_graph_data_model(helpers, data_model_name=data_model_name) + graph_data_model = generate_graph_data_model( + helpers, data_model_name=data_model_name + ) # Instantiate DataModelJsonSchema dmjs = DataModelJSONSchema(fullpath, graph=graph_data_model) return dmjs -@pytest.fixture(name='relationships') + +@pytest.fixture(name="relationships") def get_relationships(helpers): DMR = DataModelRelationships() relationships_dict = DMR.relationships_dictionary relationships = list(relationships_dict.keys()) yield relationships + @pytest.fixture(name="DMR") def fixture_dmr(): """Yields a data model relationships object for testing""" yield DataModelRelationships() -@pytest.fixture(name='csv_parser') + +@pytest.fixture(name="csv_parser") def fixture_dm_csv_parser(): yield DataModelCSVParser() -@pytest.fixture(name='jsonld_parser') + +@pytest.fixture(name="jsonld_parser") def fixture_dm_jsonld_parser(): yield DataModelJSONLDParser() + class TestDataModelParser: def test_get_base_schema_path(self, helpers): - '''Test that base schema path is returned properly. + """Test that base schema path is returned properly. Note: data model parser class does not currently accept an new path to a base schema, so just test that default BioThings data model path is returned. - ''' + """ # Instantiate Data model parser. - data_model_parser = get_data_model_parser(helpers=helpers, data_model_name='example.model.csv') + data_model_parser = get_data_model_parser( + helpers=helpers, data_model_name="example.model.csv" + ) # Get path to default biothings model. biothings_path = data_model_parser._get_base_schema_path(base_schema=None) assert os.path.basename(biothings_path) == "biothings.model.jsonld" - @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) - def test_get_model_type(self, helpers, data_model:str): + @pytest.mark.parametrize( + "data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values()) + ) + def test_get_model_type(self, helpers, data_model: str): # Instantiate Data model parser. - data_model_parser = get_data_model_parser(helpers=helpers, data_model_name=data_model) + data_model_parser = get_data_model_parser( + helpers=helpers, data_model_name=data_model + ) # Check the data model type - assert (data_model == 'example.model.csv') == (data_model_parser.model_type == 'CSV') - assert (data_model == 'example.model.jsonld') == (data_model_parser.model_type == 'JSONLD') - - @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) - def test_parse_model(self, helpers, data_model:str): - '''Test that the correct parser is called and that a dictionary is returned in the expected structure. - ''' + assert (data_model == "example.model.csv") == ( + data_model_parser.model_type == "CSV" + ) + assert (data_model == "example.model.jsonld") == ( + data_model_parser.model_type == "JSONLD" + ) + + @pytest.mark.parametrize( + "data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values()) + ) + def test_parse_model(self, helpers, data_model: str): + """Test that the correct parser is called and that a dictionary is returned in the expected structure.""" # Instantiate Data model parser. - data_model_parser = get_data_model_parser(helpers=helpers, data_model_name=data_model) - + data_model_parser = get_data_model_parser( + helpers=helpers, data_model_name=data_model + ) + # Parse Model attr_rel_dictionary = data_model_parser.parse_model() @@ -156,27 +195,31 @@ def test_parse_model(self, helpers, data_model:str): # Check that the structure of the model dictionary conforms to expectations. assert type(attr_rel_dictionary) == dict assert attribute_key in attr_rel_dictionary.keys() - assert 'Relationships' in attr_rel_dictionary[attribute_key] - assert 'Attribute' in attr_rel_dictionary[attribute_key]['Relationships'] + assert "Relationships" in attr_rel_dictionary[attribute_key] + assert "Attribute" in attr_rel_dictionary[attribute_key]["Relationships"] -@pytest.mark.parametrize("data_model", ['example.model.csv'], ids=["csv"]) +@pytest.mark.parametrize("data_model", ["example.model.csv"], ids=["csv"]) class TestDataModelCsvParser: - def test_check_schema_definition(self, helpers, data_model:str, csv_parser:DataModelCSVParser): - """If the csv schema contains the required headers, then this function should not return anything. Check that this is so. - """ - #path_to_data_model = helpers.get_data_path(path=data_model) + def test_check_schema_definition( + self, helpers, data_model: str, csv_parser: DataModelCSVParser + ): + """If the csv schema contains the required headers, then this function should not return anything. Check that this is so.""" + # path_to_data_model = helpers.get_data_path(path=data_model) model_df = helpers.get_data_frame(path=data_model, data_model=True) - assert None == (csv_parser.check_schema_definition(model_df = model_df)) + assert None == (csv_parser.check_schema_definition(model_df=model_df)) - def test_gather_csv_attributes_relationships(self, helpers, data_model:str, csv_parser:DataModelCSVParser): - """The output of the function is a attributes relationship dictionary, check that it is formatted properly. - """ + def test_gather_csv_attributes_relationships( + self, helpers, data_model: str, csv_parser: DataModelCSVParser + ): + """The output of the function is a attributes relationship dictionary, check that it is formatted properly.""" path_to_data_model = helpers.get_data_path(path=data_model) model_df = load_df(path_to_data_model, data_model=True) # Get output of the function: - attr_rel_dict = csv_parser.gather_csv_attributes_relationships(model_df=model_df) + attr_rel_dict = csv_parser.gather_csv_attributes_relationships( + model_df=model_df + ) # Test the attr_rel_dict is formatted as expected: # Get a key in the model @@ -185,17 +228,20 @@ def test_gather_csv_attributes_relationships(self, helpers, data_model:str, csv_ # Check that the structure of the model dictionary conforms to expectations. assert type(attr_rel_dict) == dict assert attribute_key in attr_rel_dict.keys() - assert 'Relationships' in attr_rel_dict[attribute_key] - assert 'Attribute' in attr_rel_dict[attribute_key]['Relationships'] + assert "Relationships" in attr_rel_dict[attribute_key] + assert "Attribute" in attr_rel_dict[attribute_key]["Relationships"] - def test_parse_csv_model(self, helpers, data_model:str, csv_parser:DataModelCSVParser): - """The output of the function is a attributes relationship dictionary, check that it is formatted properly. - """ + def test_parse_csv_model( + self, helpers, data_model: str, csv_parser: DataModelCSVParser + ): + """The output of the function is a attributes relationship dictionary, check that it is formatted properly.""" path_to_data_model = helpers.get_data_path(path=data_model) model_df = load_df(path_to_data_model, data_model=True) # Get output of the function: - attr_rel_dictionary = csv_parser.parse_csv_model(path_to_data_model=path_to_data_model) + attr_rel_dictionary = csv_parser.parse_csv_model( + path_to_data_model=path_to_data_model + ) # Test the attr_rel_dictionary is formatted as expected: # Get a key in the model @@ -204,20 +250,23 @@ def test_parse_csv_model(self, helpers, data_model:str, csv_parser:DataModelCSVP # Check that the structure of the model dictionary conforms to expectations. assert type(attr_rel_dictionary) == dict assert attribute_key in attr_rel_dictionary.keys() - assert 'Relationships' in attr_rel_dictionary[attribute_key] - assert 'Attribute' in attr_rel_dictionary[attribute_key]['Relationships'] + assert "Relationships" in attr_rel_dictionary[attribute_key] + assert "Attribute" in attr_rel_dictionary[attribute_key]["Relationships"] -@pytest.mark.parametrize("data_model", ['example.model.jsonld'], ids=["jsonld"]) +@pytest.mark.parametrize("data_model", ["example.model.jsonld"], ids=["jsonld"]) class TestDataModelJsonLdParser: - def test_gather_jsonld_attributes_relationships(self, helpers, data_model:str, jsonld_parser:DataModelJSONLDParser): - """The output of the function is a attributes relationship dictionary, check that it is formatted properly. - """ + def test_gather_jsonld_attributes_relationships( + self, helpers, data_model: str, jsonld_parser: DataModelJSONLDParser + ): + """The output of the function is a attributes relationship dictionary, check that it is formatted properly.""" path_to_data_model = helpers.get_data_path(path=data_model) model_jsonld = load_json(path_to_data_model) # Get output of the function: - attr_rel_dict = jsonld_parser.gather_jsonld_attributes_relationships(model_jsonld=model_jsonld['@graph']) + attr_rel_dict = jsonld_parser.gather_jsonld_attributes_relationships( + model_jsonld=model_jsonld["@graph"] + ) # Test the attr_rel_dict is formatted as expected: # Get a key in the model @@ -226,17 +275,20 @@ def test_gather_jsonld_attributes_relationships(self, helpers, data_model:str, j # Check that the structure of the model dictionary conforms to expectations. assert type(attr_rel_dict) == dict assert attribute_key in attr_rel_dict.keys() - assert 'Relationships' in attr_rel_dict[attribute_key] - assert 'Attribute' in attr_rel_dict[attribute_key]['Relationships'] + assert "Relationships" in attr_rel_dict[attribute_key] + assert "Attribute" in attr_rel_dict[attribute_key]["Relationships"] - def test_parse_jsonld_model(self, helpers, data_model:str, jsonld_parser:DataModelJSONLDParser): - """The output of the function is a attributes relationship dictionary, check that it is formatted properly. - """ + def test_parse_jsonld_model( + self, helpers, data_model: str, jsonld_parser: DataModelJSONLDParser + ): + """The output of the function is a attributes relationship dictionary, check that it is formatted properly.""" path_to_data_model = helpers.get_data_path(path=data_model) model_jsonld = load_json(path_to_data_model) # Get output of the function: - attr_rel_dictionary = jsonld_parser.parse_jsonld_model(path_to_data_model=path_to_data_model) + attr_rel_dictionary = jsonld_parser.parse_jsonld_model( + path_to_data_model=path_to_data_model + ) # Test the attr_rel_dictionary is formatted as expected: # Get a key in the model @@ -245,30 +297,31 @@ def test_parse_jsonld_model(self, helpers, data_model:str, jsonld_parser:DataMod # Check that the structure of the model dictionary conforms to expectations. assert type(attr_rel_dictionary) == dict assert attribute_key in attr_rel_dictionary.keys() - assert 'Relationships' in attr_rel_dictionary[attribute_key] - assert 'Attribute' in attr_rel_dictionary[attribute_key]['Relationships'] + assert "Relationships" in attr_rel_dictionary[attribute_key] + assert "Attribute" in attr_rel_dictionary[attribute_key]["Relationships"] class TestDataModelRelationships: """Tests for DataModelRelationships class""" + def test_define_data_model_relationships(self, DMR: DataModelRelationships): """Tests relationships_dictionary created has correct keys""" required_keys = [ - 'jsonld_key', - 'csv_header', - 'type', - 'edge_rel', - 'required_header' + "jsonld_key", + "csv_header", + "type", + "edge_rel", + "required_header", ] - required_edge_keys = ['edge_key', 'edge_dir'] - required_node_keys = ['node_label', 'node_attr_dict'] + required_edge_keys = ["edge_key", "edge_dir"] + required_node_keys = ["node_label", "node_attr_dict"] relationships = DMR.relationships_dictionary for relationship in relationships.values(): for key in required_keys: assert key in relationship.keys() - if relationship['edge_rel']: + if relationship["edge_rel"]: for key in required_edge_keys: assert key in relationship.keys() else: @@ -278,69 +331,82 @@ def test_define_data_model_relationships(self, DMR: DataModelRelationships): def test_define_required_csv_headers(self, DMR: DataModelRelationships): """Tests method returns correct values""" assert DMR.define_required_csv_headers() == [ - 'Attribute', - 'Description', - 'Valid Values', - 'DependsOn', - 'DependsOn Component', - 'Required', 'Parent', - 'Validation Rules', - 'Properties', - 'Source' + "Attribute", + "Description", + "Valid Values", + "DependsOn", + "DependsOn Component", + "Required", + "Parent", + "Validation Rules", + "Properties", + "Source", ] @pytest.mark.parametrize("edge", [True, False], ids=["True", "False"]) - def test_retreive_rel_headers_dict(self, DMR: DataModelRelationships, edge:bool): + def test_retreive_rel_headers_dict(self, DMR: DataModelRelationships, edge: bool): """Tests method returns correct values""" if edge: assert DMR.retreive_rel_headers_dict(edge=edge) == { - 'rangeIncludes': 'Valid Values', - 'requiresDependency': 'DependsOn', - 'requiresComponent': 'DependsOn Component', - 'subClassOf': 'Parent', - 'domainIncludes': 'Properties' + "rangeIncludes": "Valid Values", + "requiresDependency": "DependsOn", + "requiresComponent": "DependsOn Component", + "subClassOf": "Parent", + "domainIncludes": "Properties", } else: assert DMR.retreive_rel_headers_dict(edge=edge) == { - 'displayName': 'Attribute', - 'label': None, - 'comment': 'Description', - 'required': 'Required', - 'validationRules': 'Validation Rules', - 'isPartOf': None, - 'id': 'Source' + "displayName": "Attribute", + "label": None, + "comment": "Description", + "required": "Required", + "validationRules": "Validation Rules", + "isPartOf": None, + "id": "Source", } class TestDataModelGraph: - @pytest.mark.parametrize("data_model", ['example.model.csv', 'example.model.jsonld'], ids=["csv", "jsonld"]) + @pytest.mark.parametrize( + "data_model", + ["example.model.csv", "example.model.jsonld"], + ids=["csv", "jsonld"], + ) def test_generate_data_model_graph(self, helpers, data_model): - '''Check that data model graph is constructed properly, requires calling various classes. + """Check that data model graph is constructed properly, requires calling various classes. TODO: In another test, check conditional dependencies. - ''' + """ graph = generate_graph_data_model(helpers=helpers, data_model_name=data_model) - - #Check that some edges are present as expected: - assert ('FamilyHistory', 'Breast') in graph.edges('FamilyHistory') - assert ('BulkRNA-seqAssay', 'Biospecimen') in graph.edges('BulkRNA-seqAssay') - assert ['Ab', 'Cd', 'Ef', 'Gh'] == [k for k,v in graph['CheckList'].items() for vk, vv in v.items() if vk == 'rangeValue'] + + # Check that some edges are present as expected: + assert ("FamilyHistory", "Breast") in graph.edges("FamilyHistory") + assert ("BulkRNA-seqAssay", "Biospecimen") in graph.edges("BulkRNA-seqAssay") + assert ["Ab", "Cd", "Ef", "Gh"] == [ + k + for k, v in graph["CheckList"].items() + for vk, vv in v.items() + if vk == "rangeValue" + ] # Check that all relationships recorded between 'CheckList' and 'Ab' are present - assert 'rangeValue' and 'parentOf' in graph['CheckList']['Ab'] - assert 'requiresDependency' not in graph['CheckList']['Ab'] - - # Check nodes: - assert 'Patient' in graph.nodes - assert 'GRCh38' in graph.nodes + assert "rangeValue" and "parentOf" in graph["CheckList"]["Ab"] + assert "requiresDependency" not in graph["CheckList"]["Ab"] + # Check nodes: + assert "Patient" in graph.nodes + assert "GRCh38" in graph.nodes # Check weights - assert graph['Sex']['Female']['rangeValue']['weight'] == 0 - assert graph['MockComponent']['CheckRegexFormat']['requiresDependency']['weight'] == 4 + assert graph["Sex"]["Female"]["rangeValue"]["weight"] == 0 + assert ( + graph["MockComponent"]["CheckRegexFormat"]["requiresDependency"]["weight"] + == 4 + ) # Check Edge directions - assert 4 == (len(graph.out_edges('TissueStatus'))) - assert 2 == (len(graph.in_edges('TissueStatus'))) + assert 4 == (len(graph.out_edges("TissueStatus"))) + assert 2 == (len(graph.in_edges("TissueStatus"))) + class TestDataModelGraphExplorer: def test_find_properties(self): @@ -412,14 +478,22 @@ def test_find_parent_classes(self): def test_full_schema_graph(self): return - @pytest.mark.parametrize("class_name, expected_in_schema", [("Patient",True), ("ptaient",False), ("Biospecimen",True), ("InvalidComponent",False)]) + @pytest.mark.parametrize( + "class_name, expected_in_schema", + [ + ("Patient", True), + ("ptaient", False), + ("Biospecimen", True), + ("InvalidComponent", False), + ], + ) def test_is_class_in_schema(self, helpers, class_name, expected_in_schema): """ Test to cover checking if a given class is in a schema. `is_class_in_schema` should return `True` if the class is in the schema and `False` if it is not. """ - DMGE = helpers.get_data_model_graph_explorer(path='example.model.csv') + DMGE = helpers.get_data_model_graph_explorer(path="example.model.csv") # Check if class is in schema class_in_schema = DMGE.is_class_in_schema(class_name) @@ -429,19 +503,26 @@ def test_is_class_in_schema(self, helpers, class_name, expected_in_schema): def test_sub_schema_graph(self): return -@pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) + +@pytest.mark.parametrize( + "data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values()) +) class TestDataModelNodes: def test_gather_nodes(self, helpers, data_model): # Instantiate Parser - data_model_parser = get_data_model_parser(helpers=helpers, data_model_name=data_model) + data_model_parser = get_data_model_parser( + helpers=helpers, data_model_name=data_model + ) # Parse Model attr_rel_dictionary = data_model_parser.parse_model() # Instantiate DataModelNodes - data_model_nodes = generate_data_model_nodes(helpers, data_model_name=data_model) + data_model_nodes = generate_data_model_nodes( + helpers, data_model_name=data_model + ) - attr_info = ('Patient', attr_rel_dictionary['Patient']) + attr_info = ("Patient", attr_rel_dictionary["Patient"]) nodes = data_model_nodes.gather_nodes(attr_info=attr_info) # Make sure there are no repeat nodes @@ -449,31 +530,52 @@ def test_gather_nodes(self, helpers, data_model): # Make sure the nodes returned conform to expectations (values and order) ## The parsing records display names for relationships for CSV and labels for JSONLD, so the expectations are different between the two. - if DATA_MODEL_DICT[data_model]=='CSV': - expected_nodes = ['Patient', 'Patient ID', 'Sex', 'Year of Birth', 'Diagnosis', 'Component', 'DataType'] - elif DATA_MODEL_DICT[data_model] == 'JSONLD': - expected_nodes = ['Patient', 'PatientID', 'Sex', 'YearofBirth', 'Diagnosis', 'Component', 'DataType'] + if DATA_MODEL_DICT[data_model] == "CSV": + expected_nodes = [ + "Patient", + "Patient ID", + "Sex", + "Year of Birth", + "Diagnosis", + "Component", + "DataType", + ] + elif DATA_MODEL_DICT[data_model] == "JSONLD": + expected_nodes = [ + "Patient", + "PatientID", + "Sex", + "YearofBirth", + "Diagnosis", + "Component", + "DataType", + ] assert nodes == expected_nodes # Ensure order is tested. reordered_nodes = nodes.copy() - reordered_nodes.remove('Patient') - reordered_nodes.append('Patient') + reordered_nodes.remove("Patient") + reordered_nodes.append("Patient") assert reordered_nodes != expected_nodes def test_gather_all_nodes(self, helpers, data_model): - ## TODO # Instantiate Parser - data_model_parser = get_data_model_parser(helpers=helpers, data_model_name=data_model) + data_model_parser = get_data_model_parser( + helpers=helpers, data_model_name=data_model + ) # Parse Model attr_rel_dictionary = data_model_parser.parse_model() # Instantiate DataModelNodes - data_model_nodes = generate_data_model_nodes(helpers, data_model_name=data_model) + data_model_nodes = generate_data_model_nodes( + helpers, data_model_name=data_model + ) - all_nodes = data_model_nodes.gather_all_nodes_in_model(attr_rel_dict=attr_rel_dictionary) + all_nodes = data_model_nodes.gather_all_nodes_in_model( + attr_rel_dict=attr_rel_dictionary + ) # Make sure there are no repeat nodes assert len(all_nodes) == len(set(all_nodes)) @@ -483,58 +585,71 @@ def test_gather_all_nodes(self, helpers, data_model): first_attribute = list(attr_rel_dictionary.keys())[0] attr_info = (first_attribute, attr_rel_dictionary[first_attribute]) expected_starter_nodes = data_model_nodes.gather_nodes(attr_info=attr_info) - actual_starter_nodes = all_nodes[0:len(expected_starter_nodes)] + actual_starter_nodes = all_nodes[0 : len(expected_starter_nodes)] assert actual_starter_nodes == expected_starter_nodes def test_get_rel_node_dict_info(self, helpers, data_model, relationships): # Instantiate Parser - data_model_parser = get_data_model_parser(helpers=helpers, data_model_name=data_model) + data_model_parser = get_data_model_parser( + helpers=helpers, data_model_name=data_model + ) # Instantiate DataModelNodes - data_model_nodes = generate_data_model_nodes(helpers, data_model_name=data_model) + data_model_nodes = generate_data_model_nodes( + helpers, data_model_name=data_model + ) for relationship in relationships: rel_dict_info = data_model_nodes.get_rel_node_dict_info(relationship) if rel_dict_info: assert type(rel_dict_info[0]) == str assert type(rel_dict_info[1]) == dict - assert 'default' in rel_dict_info[1].keys() + assert "default" in rel_dict_info[1].keys() def test_get_data_model_properties(self, helpers, data_model): # Instantiate Parser - data_model_parser = get_data_model_parser(helpers=helpers, data_model_name=data_model) + data_model_parser = get_data_model_parser( + helpers=helpers, data_model_name=data_model + ) # Parse Model attr_rel_dictionary = data_model_parser.parse_model() # Instantiate DataModelNodes - data_model_nodes = generate_data_model_nodes(helpers, data_model_name=data_model) + data_model_nodes = generate_data_model_nodes( + helpers, data_model_name=data_model + ) # Get properties in the data model - data_model_properties = data_model_nodes.get_data_model_properties(attr_rel_dictionary) - + data_model_properties = data_model_nodes.get_data_model_properties( + attr_rel_dictionary + ) + # In the current example model, there are no properties, would need to update this section if properties are added. assert data_model_properties == [] # Update the attr_rel_dictionary to add a property, then see if its found. # Get a random relationship key from the attr_rel_dictionary: all_keys = list(attr_rel_dictionary.keys()) - random_index = len(all_keys)-1 + random_index = len(all_keys) - 1 rel_key = all_keys[random.randint(0, random_index)] # Modify the contents of that relationship - attr_rel_dictionary[rel_key]['Relationships']['Properties'] = ['TestProperty'] - + attr_rel_dictionary[rel_key]["Relationships"]["Properties"] = ["TestProperty"] + # Get properties in the modified data model - data_model_properties = data_model_nodes.get_data_model_properties(attr_rel_dictionary) + data_model_properties = data_model_nodes.get_data_model_properties( + attr_rel_dictionary + ) - assert data_model_properties == ['TestProperty'] + assert data_model_properties == ["TestProperty"] def test_get_entry_type(self, helpers, data_model): - # Instantiate Parser - data_model_parser = get_data_model_parser(helpers=helpers, data_model_name=data_model) + data_model_parser = get_data_model_parser( + helpers=helpers, data_model_name=data_model + ) # Parse Model attr_rel_dictionary = data_model_parser.parse_model() @@ -542,47 +657,64 @@ def test_get_entry_type(self, helpers, data_model): # Update the attr_rel_dictionary to add a property, then see if it is assigned the correct entry type. # Get a random relationship key from the attr_rel_dictionary: all_keys = list(attr_rel_dictionary.keys()) - random_index = len(all_keys)-1 + random_index = len(all_keys) - 1 rel_key = all_keys[random.randint(0, random_index)] # Modify the contents of that relationship - attr_rel_dictionary[rel_key]['Relationships']['Properties'] = ['TestProperty'] + attr_rel_dictionary[rel_key]["Relationships"]["Properties"] = ["TestProperty"] # Instantiate DataModelNodes # Note: Get entry type uses self, so I will have to instantiate DataModelNodes outside of the generate_data_model_nodes function - data_model_nodes = DataModelNodes(attribute_relationships_dict=attr_rel_dictionary) + data_model_nodes = DataModelNodes( + attribute_relationships_dict=attr_rel_dictionary + ) # In the example data model all attributes should be classes. for attr in attr_rel_dictionary.keys(): entry_type = data_model_nodes.get_entry_type(attr) - assert entry_type == 'class' + assert entry_type == "class" # Check that the added property is properly loaded as a property - assert data_model_nodes.get_entry_type('TestProperty') == 'property' - - @pytest.mark.parametrize("rel_func", list(REL_FUNC_DICT.values()), ids=list(REL_FUNC_DICT.keys())) - @pytest.mark.parametrize("test_dn", list(TEST_DN_DICT.keys()), ids=list(TEST_DN_DICT.keys())) - @pytest.mark.parametrize("test_bool", ['True', 'False', True, False, 'kldjk'], ids=['True_str', 'False_str', 'True_bool', 'False_bool', 'Random_str']) + assert data_model_nodes.get_entry_type("TestProperty") == "property" + + @pytest.mark.parametrize( + "rel_func", list(REL_FUNC_DICT.values()), ids=list(REL_FUNC_DICT.keys()) + ) + @pytest.mark.parametrize( + "test_dn", list(TEST_DN_DICT.keys()), ids=list(TEST_DN_DICT.keys()) + ) + @pytest.mark.parametrize( + "test_bool", + ["True", "False", True, False, "kldjk"], + ids=["True_str", "False_str", "True_bool", "False_bool", "Random_str"], + ) def test_run_rel_functions(self, helpers, data_model, rel_func, test_dn, test_bool): # Call each relationship function to ensure that it is returning the desired result. # Note all the called functions will also be tested in other unit tests. # Instantiate Parser - data_model_parser = get_data_model_parser(helpers=helpers, data_model_name=data_model) + data_model_parser = get_data_model_parser( + helpers=helpers, data_model_name=data_model + ) # Parse Model attr_rel_dictionary = data_model_parser.parse_model() # Instantiate DataModelNodes - data_model_nodes = generate_data_model_nodes(helpers, data_model_name=data_model) + data_model_nodes = generate_data_model_nodes( + helpers, data_model_name=data_model + ) # Run functions the same way they are called in run_rel_functions: if rel_func == get_attribute_display_name_from_label: expected_display_names = list(attr_rel_dictionary.keys()) - returned_display_names = [data_model_nodes.run_rel_functions( - rel_func=get_attribute_display_name_from_label, - node_display_name=ndn, - attr_relationships=attr_rel_dictionary) - for ndn in expected_display_names] + returned_display_names = [ + data_model_nodes.run_rel_functions( + rel_func=get_attribute_display_name_from_label, + node_display_name=ndn, + attr_relationships=attr_rel_dictionary, + ) + for ndn in expected_display_names + ] assert expected_display_names == returned_display_names @@ -591,29 +723,32 @@ def test_run_rel_functions(self, helpers, data_model, rel_func, test_dn, test_bo # Gather Validation Rules vrs = [] for k, v in attr_rel_dictionary.items(): - if 'Validation Rules' in v['Relationships'].keys(): - vrs.append(v['Relationships']['Validation Rules']) - parsed_vrs= [] + if "Validation Rules" in v["Relationships"].keys(): + vrs.append(v["Relationships"]["Validation Rules"]) + parsed_vrs = [] for attr in attr_rel_dictionary.keys(): - attr_relationships = attr_rel_dictionary[attr]['Relationships'] - if 'Validation Rules' in attr_relationships: - parsed_vrs.append(data_model_nodes.run_rel_functions( - rel_func=parse_validation_rules, - attr_relationships=attr_relationships, - csv_header='Validation Rules')) + attr_relationships = attr_rel_dictionary[attr]["Relationships"] + if "Validation Rules" in attr_relationships: + parsed_vrs.append( + data_model_nodes.run_rel_functions( + rel_func=parse_validation_rules, + attr_relationships=attr_relationships, + csv_header="Validation Rules", + ) + ) assert len(vrs) == len(parsed_vrs) - if DATA_MODEL_DICT[data_model]=='CSV': + if DATA_MODEL_DICT[data_model] == "CSV": assert vrs != parsed_vrs - elif DATA_MODEL_DICT[data_model]=='JSONLD': + elif DATA_MODEL_DICT[data_model] == "JSONLD": # JSONLDs already contain parsed validaiton rules so the raw vrs will match the parsed_vrs assert vrs == parsed_vrs # For all validation rules where there are multiple rules, make sure they have been split as expected. for i, pvr in enumerate(parsed_vrs): - delim_count = vrs[i][0].count('::') + delim_count = vrs[i][0].count("::") if delim_count: - assert len(pvr) == delim_count+1 + assert len(pvr) == delim_count + 1 elif rel_func == get_label_from_display_name: # For a limited set check label is returned as expected. @@ -622,23 +757,23 @@ def test_run_rel_functions(self, helpers, data_model, rel_func, test_dn, test_bo rel_func=get_label_from_display_name, node_display_name=test_dn, entry_type=entry_type, - ) + ) assert actual_value == expected_value elif rel_func == convert_bool_to_str: # return nothing if random string provided. - csv_header='Required' - attr_relationships = {csv_header:test_bool} + csv_header = "Required" + attr_relationships = {csv_header: test_bool} actual_conversion = data_model_nodes.run_rel_functions( - rel_func=convert_bool_to_str, - csv_header=csv_header, - attr_relationships=attr_relationships, - ) - if 'true' in str(test_bool).lower(): - assert actual_conversion==True - elif 'false' in str(test_bool).lower(): - assert actual_conversion==False + rel_func=convert_bool_to_str, + csv_header=csv_header, + attr_relationships=attr_relationships, + ) + if "true" in str(test_bool).lower(): + assert actual_conversion == True + elif "false" in str(test_bool).lower(): + assert actual_conversion == False else: - assert actual_conversion==None + assert actual_conversion == None else: # If the function passed is not currently supported, should hit an error. try: @@ -646,146 +781,202 @@ def test_run_rel_functions(self, helpers, data_model, rel_func, test_dn, test_bo convert_worked = False except: convert_worked = True - assert convert_worked==True + assert convert_worked == True return - @pytest.mark.parametrize("node_display_name", list(NODE_DISPLAY_NAME_DICT.keys()), ids=[str(v) for v in NODE_DISPLAY_NAME_DICT.values()]) + @pytest.mark.parametrize( + "node_display_name", + list(NODE_DISPLAY_NAME_DICT.keys()), + ids=[str(v) for v in NODE_DISPLAY_NAME_DICT.values()], + ) def test_generate_node_dict(self, helpers, data_model, node_display_name): # Instantiate Parser - data_model_parser = get_data_model_parser(helpers=helpers, data_model_name=data_model) + data_model_parser = get_data_model_parser( + helpers=helpers, data_model_name=data_model + ) # Parse Model attr_rel_dictionary = data_model_parser.parse_model() # Instantiate DataModelNodes - data_model_nodes = generate_data_model_nodes(helpers, data_model_name=data_model) + data_model_nodes = generate_data_model_nodes( + helpers, data_model_name=data_model + ) node_dict = data_model_nodes.generate_node_dict( - node_display_name=node_display_name, - attr_rel_dict=attr_rel_dictionary, - ) + node_display_name=node_display_name, + attr_rel_dict=attr_rel_dictionary, + ) # Check that the output is as expected for the required key. if NODE_DISPLAY_NAME_DICT[node_display_name]: - assert node_dict['required'] == True + assert node_dict["required"] == True else: - #Looking up this way, in case we add empty defaults back to JSONLD it wont fail, but will only be absent in JSONLD not CSV. - if not node_dict['required'] == False: - assert DATA_MODEL_DICT[data_model] == 'JSONLD' + # Looking up this way, in case we add empty defaults back to JSONLD it wont fail, but will only be absent in JSONLD not CSV. + if not node_dict["required"] == False: + assert DATA_MODEL_DICT[data_model] == "JSONLD" def test_generate_node(self, helpers, data_model): # Test adding a dummy node - node_dict = {'label': 'test_label'} + node_dict = {"label": "test_label"} # Get Graph - graph_data_model = generate_graph_data_model(helpers, data_model_name=data_model) + graph_data_model = generate_graph_data_model( + helpers, data_model_name=data_model + ) # Instantiate DataModelNodes - data_model_nodes = generate_data_model_nodes(helpers, data_model_name=data_model) + data_model_nodes = generate_data_model_nodes( + helpers, data_model_name=data_model + ) # Assert the test node is not already in the graph - assert node_dict['label'] not in graph_data_model.nodes + assert node_dict["label"] not in graph_data_model.nodes # Add test node data_model_nodes.generate_node(graph_data_model, node_dict) # Check that the test node has been added - assert node_dict['label'] in graph_data_model.nodes + assert node_dict["label"] in graph_data_model.nodes + class TestDataModelEdges: - def test_generate_edge(self,helpers): + def test_generate_edge(self, helpers): return -@pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) + +@pytest.mark.parametrize( + "data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values()) +) class TestDataModelJsonSchema: - @pytest.mark.parametrize("node_range", [[], ['healthy'], ['healthy', 'cancer']], ids=['empty_range', "single_range", "multi_range"]) - @pytest.mark.parametrize("node_name", ['', 'Diagnosis'], ids=['empty_node_name', "Diagnosis_node_name"]) + @pytest.mark.parametrize( + "node_range", + [[], ["healthy"], ["healthy", "cancer"]], + ids=["empty_range", "single_range", "multi_range"], + ) + @pytest.mark.parametrize( + "node_name", ["", "Diagnosis"], ids=["empty_node_name", "Diagnosis_node_name"] + ) @pytest.mark.parametrize("blank", [True, False], ids=["True_blank", "False_blank"]) def test_get_array_schema(self, helpers, data_model, node_range, node_name, blank): dmjs = get_data_model_json_schema(helpers=helpers, data_model_name=data_model) - array_schema = dmjs.get_array_schema(node_range=node_range, node_name=node_name, blank=blank) + array_schema = dmjs.get_array_schema( + node_range=node_range, node_name=node_name, blank=blank + ) # check node_name is recoreded as the key to the array schema assert node_name in array_schema # Check maxItems is the lenghth of node_range - assert len(node_range) == array_schema[node_name]['maxItems'] + assert len(node_range) == array_schema[node_name]["maxItems"] # Check that blank value is added at the end of node_range, if true if blank: - assert array_schema[node_name]['items']['enum'][-1]== '' - assert len(array_schema[node_name]['items']['enum'])==len(node_range)+1 + assert array_schema[node_name]["items"]["enum"][-1] == "" + assert len(array_schema[node_name]["items"]["enum"]) == len(node_range) + 1 else: - assert array_schema[node_name]['items']['enum']== node_range - assert len(array_schema[node_name]['items']['enum'])==len(node_range) + assert array_schema[node_name]["items"]["enum"] == node_range + assert len(array_schema[node_name]["items"]["enum"]) == len(node_range) - @pytest.mark.parametrize("node_name", ['', 'Diagnosis'], ids=['empty_node_name', "Diagnosis_node_name"]) + @pytest.mark.parametrize( + "node_name", ["", "Diagnosis"], ids=["empty_node_name", "Diagnosis_node_name"] + ) def test_get_non_blank_schema(self, helpers, data_model, node_name): dmjs = get_data_model_json_schema(helpers=helpers, data_model_name=data_model) non_blank_schema = dmjs.get_non_blank_schema(node_name=node_name) # check node_name is recoreded as the key to the array schema assert node_name in non_blank_schema assert non_blank_schema[node_name] == {"not": {"type": "null"}, "minLength": 1} - - @pytest.mark.parametrize("node_range", [[], ['healthy'], ['healthy', 'cancer']], ids=['empty_range', "single_range", "multi_range"]) - @pytest.mark.parametrize("node_name", ['', 'Diagnosis'], ids=['empty_node_name', "Diagnosis_node_name"]) + + @pytest.mark.parametrize( + "node_range", + [[], ["healthy"], ["healthy", "cancer"]], + ids=["empty_range", "single_range", "multi_range"], + ) + @pytest.mark.parametrize( + "node_name", ["", "Diagnosis"], ids=["empty_node_name", "Diagnosis_node_name"] + ) @pytest.mark.parametrize("blank", [True, False], ids=["True_blank", "False_blank"]) def test_get_range_schema(self, helpers, data_model, node_range, node_name, blank): dmjs = get_data_model_json_schema(helpers=helpers, data_model_name=data_model) - range_schema = dmjs.get_range_schema(node_range=node_range, node_name=node_name, blank=blank) + range_schema = dmjs.get_range_schema( + node_range=node_range, node_name=node_name, blank=blank + ) # check node_name is recoreded as the key to the array schema assert node_name in range_schema # Check that blank value is added at the end of node_range, if true if blank: - assert range_schema[node_name]['enum'][-1]== '' - assert len(range_schema[node_name]['enum'])==len(node_range)+1 + assert range_schema[node_name]["enum"][-1] == "" + assert len(range_schema[node_name]["enum"]) == len(node_range) + 1 else: - assert range_schema[node_name]['enum']== node_range - assert len(range_schema[node_name]['enum'])==len(node_range) - - @pytest.mark.parametrize("source_node", ['', 'Patient'], ids=['empty_node_name', "patient_source"]) - @pytest.mark.parametrize("schema_name", ['', 'Test_Schema_Name'], ids=['empty_schema_name', "schema_name"]) - def test_get_json_validation_schema(self, helpers, data_model, source_node, schema_name): + assert range_schema[node_name]["enum"] == node_range + assert len(range_schema[node_name]["enum"]) == len(node_range) + + @pytest.mark.parametrize( + "source_node", ["", "Patient"], ids=["empty_node_name", "patient_source"] + ) + @pytest.mark.parametrize( + "schema_name", + ["", "Test_Schema_Name"], + ids=["empty_schema_name", "schema_name"], + ) + def test_get_json_validation_schema( + self, helpers, data_model, source_node, schema_name + ): dmjs = get_data_model_json_schema(helpers=helpers, data_model_name=data_model) try: # Get validation schema - json_validation_schema = dmjs.get_json_validation_schema(source_node=source_node, schema_name=schema_name) + json_validation_schema = dmjs.get_json_validation_schema( + source_node=source_node, schema_name=schema_name + ) # Check Keys in Schema - expected_jvs_keys = ['$schema', '$id', 'title', 'type', 'properties', 'required', 'allOf'] - actual_jvs_keys = list( json_validation_schema.keys()) + expected_jvs_keys = [ + "$schema", + "$id", + "title", + "type", + "properties", + "required", + "allOf", + ] + actual_jvs_keys = list(json_validation_schema.keys()) assert expected_jvs_keys == actual_jvs_keys # Check title - assert schema_name == json_validation_schema['title'] + assert schema_name == json_validation_schema["title"] # Check contents of validation schema - assert 'Diagnosis' in json_validation_schema['properties'] - assert 'Cancer' in json_validation_schema['properties']['Diagnosis']['enum'] + assert "Diagnosis" in json_validation_schema["properties"] + assert "Cancer" in json_validation_schema["properties"]["Diagnosis"]["enum"] except: # Should only fail if no source node is provided. - assert source_node == '' + assert source_node == "" class TestDataModelJsonLd: - @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) + @pytest.mark.parametrize( + "data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values()) + ) def test_init(self, helpers, data_model): - # Test that __init__ is being set up properly + # Test that __init__ is being set up properly # Get Graph - graph_data_model = generate_graph_data_model(helpers, data_model_name=data_model) - + graph_data_model = generate_graph_data_model( + helpers, data_model_name=data_model + ) + # Instantiate DataModelJsonLD data_model_jsonld = DataModelJsonLD(Graph=graph_data_model) - # Test that __init__ is being set up properly + # Test that __init__ is being set up properly assert type(data_model_jsonld.graph) == nx.MultiDiGraph assert type(data_model_jsonld.rel_dict) == dict - assert 'required' in data_model_jsonld.rel_dict + assert "required" in data_model_jsonld.rel_dict assert type(data_model_jsonld.DME) == DataModelGraphExplorer - assert data_model_jsonld.output_path == '' + assert data_model_jsonld.output_path == "" def test_base_jsonld_template(self, helpers): # Gather the templates @@ -793,27 +984,27 @@ def test_base_jsonld_template(self, helpers): base_jsonld_template = json.loads(base_template.to_json()) # Test base template is constructed as expected - assert '@context' in base_jsonld_template - assert '@graph' in base_jsonld_template - assert '@id' in base_jsonld_template + assert "@context" in base_jsonld_template + assert "@graph" in base_jsonld_template + assert "@id" in base_jsonld_template def test_property_template(self, helpers): # Get Property Template empty_template = PropertyTemplate() property_template = json.loads(empty_template.to_json()) - expected_property_template = { - "@id": "", - "@type": "rdf:Property", - "rdfs:comment": "", - "rdfs:label": "", - "schema:domainIncludes": [], - "schema:rangeIncludes": [], - "schema:isPartOf": {}, - "sms:displayName": "", - "sms:required": "sms:false", - "sms:validationRules": [], - } + expected_property_template = { + "@id": "", + "@type": "rdf:Property", + "rdfs:comment": "", + "rdfs:label": "", + "schema:domainIncludes": [], + "schema:rangeIncludes": [], + "schema:isPartOf": {}, + "sms:displayName": "", + "sms:required": "sms:false", + "sms:validationRules": [], + } assert property_template == expected_property_template def test_class_template(self, helpers): @@ -822,36 +1013,42 @@ def test_class_template(self, helpers): class_template = json.loads(empty_template.to_json()) expected_class_template = { - "@id": "", - "@type": "rdfs:Class", - "rdfs:comment": "", - "rdfs:label": "", - "rdfs:subClassOf": [], - "schema:isPartOf": {}, - "schema:rangeIncludes": [], - "sms:displayName": "", - "sms:required": "sms:false", - "sms:requiresDependency": [], - "sms:requiresComponent": [], - "sms:validationRules": [], - } + "@id": "", + "@type": "rdfs:Class", + "rdfs:comment": "", + "rdfs:label": "", + "rdfs:subClassOf": [], + "schema:isPartOf": {}, + "schema:rangeIncludes": [], + "sms:displayName": "", + "sms:required": "sms:false", + "sms:requiresDependency": [], + "sms:requiresComponent": [], + "sms:validationRules": [], + } assert class_template == expected_class_template - @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) - @pytest.mark.parametrize("template_type", ['property', 'class'], ids=['property', 'class']) - @pytest.mark.parametrize("node", ['', 'Patient'], ids=['no node', 'Patient']) + @pytest.mark.parametrize( + "data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values()) + ) + @pytest.mark.parametrize( + "template_type", ["property", "class"], ids=["property", "class"] + ) + @pytest.mark.parametrize("node", ["", "Patient"], ids=["no node", "Patient"]) def test_fill_entry_template(self, helpers, data_model, template_type, node): # Get Graph - graph_data_model = generate_graph_data_model(helpers, data_model_name=data_model) + graph_data_model = generate_graph_data_model( + helpers, data_model_name=data_model + ) # Instantiate DataModelJsonLD data_model_jsonld = DataModelJsonLD(Graph=graph_data_model) # Get empty template - if template_type == 'property': + if template_type == "property": property_template = PropertyTemplate() template = json.loads(property_template.to_json()) - elif template_type == 'class': + elif template_type == "class": class_template = ClassTemplate() template = json.loads(class_template.to_json()) @@ -860,36 +1057,66 @@ def test_fill_entry_template(self, helpers, data_model, template_type, node): try: # Fill out template for given node. - object_template = data_model_jsonld.fill_entry_template(template=template_copy, node=node) + object_template = data_model_jsonld.fill_entry_template( + template=template_copy, node=node + ) # Ensure template keys are present (not all original keys will be present due to cleaning empty values): except: # Should only fail if no node is given - assert node == '' + assert node == "" - if 'object_template' in locals(): + if "object_template" in locals(): # Check that object template keys match the expected keys actual_keys = list(object_template.keys()) - if template_type == 'property': - expected_keys = ['@id', '@type', 'rdfs:comment', 'rdfs:label', 'schema:isPartOf', 'sms:displayName', 'sms:required', 'sms:validationRules'] - elif template_type == 'class': - expected_keys = ['@id', '@type', 'rdfs:comment', 'rdfs:label', 'rdfs:subClassOf', 'schema:isPartOf', 'sms:displayName', 'sms:required', 'sms:requiresDependency', 'sms:validationRules'] - assert (set(actual_keys) - set(expected_keys)) == (set(expected_keys) - set(actual_keys)) - - @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) - @pytest.mark.parametrize("template_type", ['property', 'class'], ids=['property', 'class']) + if template_type == "property": + expected_keys = [ + "@id", + "@type", + "rdfs:comment", + "rdfs:label", + "schema:isPartOf", + "sms:displayName", + "sms:required", + "sms:validationRules", + ] + elif template_type == "class": + expected_keys = [ + "@id", + "@type", + "rdfs:comment", + "rdfs:label", + "rdfs:subClassOf", + "schema:isPartOf", + "sms:displayName", + "sms:required", + "sms:requiresDependency", + "sms:validationRules", + ] + assert (set(actual_keys) - set(expected_keys)) == ( + set(expected_keys) - set(actual_keys) + ) + + @pytest.mark.parametrize( + "data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values()) + ) + @pytest.mark.parametrize( + "template_type", ["property", "class"], ids=["property", "class"] + ) def test_add_contexts_to_entries(self, helpers, data_model, template_type): # Will likely need to change when contexts added to model. # Get Graph - graph_data_model = generate_graph_data_model(helpers, data_model_name=data_model) + graph_data_model = generate_graph_data_model( + helpers, data_model_name=data_model + ) # Instantiate DataModelJsonLD data_model_jsonld = DataModelJsonLD(Graph=graph_data_model) # Get empty template - if template_type == 'property': + if template_type == "property": property_template = PropertyTemplate() template = json.loads(property_template.to_json()) - elif template_type == 'class': + elif template_type == "class": class_template = ClassTemplate() template = json.loads(class_template.to_json()) @@ -897,18 +1124,26 @@ def test_add_contexts_to_entries(self, helpers, data_model, template_type): template_copy = copy.deepcopy(template) # Fill out template for given node. - object_template = data_model_jsonld.fill_entry_template(template=template_copy, node='Patient') - - if 'sms:required' in object_template: - assert 'sms' in object_template['sms:required'] - if '@id' in object_template: - assert 'bts' in object_template['@id'] - - @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) - def test_clean_template(self, helpers, data_model:str, DMR:DataModelRelationships): + object_template = data_model_jsonld.fill_entry_template( + template=template_copy, node="Patient" + ) + + if "sms:required" in object_template: + assert "sms" in object_template["sms:required"] + if "@id" in object_template: + assert "bts" in object_template["@id"] + + @pytest.mark.parametrize( + "data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values()) + ) + def test_clean_template( + self, helpers, data_model: str, DMR: DataModelRelationships + ): # TODO: This will need to change with contexts bc they are hard coded here. # Get Graph - graph_data_model = generate_graph_data_model(helpers, data_model_name=data_model) + graph_data_model = generate_graph_data_model( + helpers, data_model_name=data_model + ) # Instantiate DataModelJsonLD data_model_jsonld = DataModelJsonLD(Graph=graph_data_model) @@ -920,31 +1155,41 @@ def test_clean_template(self, helpers, data_model:str, DMR:DataModelRelationship # Make a copy of the template, since template is mutable template_copy = copy.deepcopy(template) - assert 'sms:requiresDependency' in template_copy + assert "sms:requiresDependency" in template_copy # Fill out some mock entries in the template: - template_copy['@id'] == 'bts:CheckURL' - template_copy['rdfs:label'] == 'CheckURL' - data_model_relationships=DMR.relationships_dictionary + template_copy["@id"] == "bts:CheckURL" + template_copy["rdfs:label"] == "CheckURL" + data_model_relationships = DMR.relationships_dictionary # Clean template - data_model_jsonld.clean_template(template=template_copy, data_model_relationships=data_model_relationships) - + data_model_jsonld.clean_template( + template=template_copy, data_model_relationships=data_model_relationships + ) + # Look for expected changes after cleaning # Check that expected JSONLD default is added - assert template_copy['sms:required'] == 'sms:false' - assert template_copy['sms:validationRules'] == [] - - # Check that non-required JSONLD keys are removed. - assert 'sms:requiresDependency' not in template_copy + assert template_copy["sms:required"] == "sms:false" + assert template_copy["sms:validationRules"] == [] - @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) - @pytest.mark.parametrize("valid_values", [[], ['Other', 'Female', 'Male'], ['A', 'Bad', 'Entry']], ids=['Empty List', 'Disordered List', 'Incorrect List']) + # Check that non-required JSONLD keys are removed. + assert "sms:requiresDependency" not in template_copy + + @pytest.mark.parametrize( + "data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values()) + ) + @pytest.mark.parametrize( + "valid_values", + [[], ["Other", "Female", "Male"], ["A", "Bad", "Entry"]], + ids=["Empty List", "Disordered List", "Incorrect List"], + ) def test_reorder_template_entries(self, helpers, data_model, valid_values): # Note the way test_reorder_template_entries works, is that as long as an entry has recordings in the template # even if they are incorrect, they will be corrected within this function. # Get Graph - graph_data_model = generate_graph_data_model(helpers, data_model_name=data_model) + graph_data_model = generate_graph_data_model( + helpers, data_model_name=data_model + ) # Instantiate DataModelJsonLD data_model_jsonld = DataModelJsonLD(Graph=graph_data_model) @@ -957,39 +1202,50 @@ def test_reorder_template_entries(self, helpers, data_model, valid_values): template_copy = copy.deepcopy(template) # Fill out template with 'Sex' attribute from example model - template_copy['@id'] = 'Sex' - template_copy['rdfs:label'] = 'Sex' - template_copy['sms:required'] = 'sms:false' - template_copy['schema:rangeIncludes'] = valid_values + template_copy["@id"] = "Sex" + template_copy["rdfs:label"] = "Sex" + template_copy["sms:required"] = "sms:false" + template_copy["schema:rangeIncludes"] = valid_values # Now reorder: data_model_jsonld.reorder_template_entries(template=template_copy) if valid_values: - assert template_copy['schema:rangeIncludes'] == [{'@id': 'bts:Female'}, {'@id': 'bts:Male'}, {'@id': 'bts:Other'}] + assert template_copy["schema:rangeIncludes"] == [ + {"@id": "bts:Female"}, + {"@id": "bts:Male"}, + {"@id": "bts:Other"}, + ] else: - assert template_copy['schema:rangeIncludes'] == [] + assert template_copy["schema:rangeIncludes"] == [] - @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) + @pytest.mark.parametrize( + "data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values()) + ) def test_generate_jsonld_object(self, helpers, data_model): # Check that JSONLD object is being made, and has some populated entries. # Get Graph - graph_data_model = generate_graph_data_model(helpers, data_model_name=data_model) + graph_data_model = generate_graph_data_model( + helpers, data_model_name=data_model + ) # Instantiate DataModelJsonLD data_model_jsonld = DataModelJsonLD(Graph=graph_data_model) jsonld_dm = data_model_jsonld.generate_jsonld_object() - assert list(jsonld_dm.keys()) == ['@context', '@graph', '@id'] - assert len(jsonld_dm['@graph']) > 1 + assert list(jsonld_dm.keys()) == ["@context", "@graph", "@id"] + assert len(jsonld_dm["@graph"]) > 1 - @pytest.mark.parametrize("data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values())) + @pytest.mark.parametrize( + "data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values()) + ) def test_convert_graph_to_jsonld(self, helpers, data_model): # Get Graph - graph_data_model = generate_graph_data_model(helpers, data_model_name=data_model) + graph_data_model = generate_graph_data_model( + helpers, data_model_name=data_model + ) # Generate JSONLD jsonld_dm = convert_graph_to_jsonld(Graph=graph_data_model) - assert list(jsonld_dm.keys()) == ['@context', '@graph', '@id'] - assert len(jsonld_dm['@graph']) > 1 - + assert list(jsonld_dm.keys()) == ["@context", "@graph", "@id"] + assert len(jsonld_dm["@graph"]) > 1 From 278cf31acb44c237efea68dc59f01ad87036d345 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 18 Oct 2023 20:53:48 -0700 Subject: [PATCH 185/239] remove base_schema from help --- schematic/help.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/schematic/help.py b/schematic/help.py index c738df1bc..c243a10ab 100644 --- a/schematic/help.py +++ b/schematic/help.py @@ -166,9 +166,6 @@ "short_help": ( "Convert specification from CSV data model to JSON-LD data model." ), - "base_schema": ( - "Path to base data model. BioThings data model is loaded by default." - ), "output_jsonld": ( "Path to where the generated JSON-LD file needs to be outputted." ), From 63a887c597ba0f93b3de1ca6cc8fda39fa754d76 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 18 Oct 2023 20:55:22 -0700 Subject: [PATCH 186/239] run black on schematic/schemas/commands.py --- schematic/schemas/commands.py | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/schematic/schemas/commands.py b/schematic/schemas/commands.py index bd400ed4b..3707d7409 100644 --- a/schematic/schemas/commands.py +++ b/schematic/schemas/commands.py @@ -16,11 +16,12 @@ from schematic.utils.schema_utils import export_schema from schematic.help import schema_commands -logger = logging.getLogger('schematic') +logger = logging.getLogger("schematic") click_log.basic_config(logger) CONTEXT_SETTINGS = dict(help_option_names=["--help", "-h"]) # help options + # invoke_without_command=True -> forces the application not to show aids before losing them with a --h @click.group(context_settings=CONTEXT_SETTINGS, invoke_without_command=True) def schema(): # use as `schematic model ...` @@ -40,7 +41,6 @@ def schema(): # use as `schematic model ...` @click.argument( "schema", type=click.Path(exists=True), metavar="", nargs=1 ) - @click.option( "--output_jsonld", "-o", @@ -54,14 +54,14 @@ def convert(schema, output_jsonld): Note: Currently, not configured to build off of base model, so removing --base_schema argument for now """ - + # get the start time st = time.time() # Instantiate Parser data_model_parser = DataModelParser(schema) - #Parse Model + # Parse Model logger.info("Parsing data model.") parsed_data_model = data_model_parser.parse_model() @@ -77,7 +77,7 @@ def convert(schema, output_jsonld): logger.info("Validating the data model internally.") data_model_validator = DataModelValidator(graph=graph_data_model) data_model_errors, data_model_warnings = data_model_validator.run_checks() - + # If there are errors log them. if data_model_errors: for err in data_model_errors: @@ -96,7 +96,6 @@ def convert(schema, output_jsonld): for w in war: logger.warning(w) - logger.info("Converting data model to JSON-LD") jsonld_data_model = convert_graph_to_jsonld(Graph=graph_data_model) @@ -114,9 +113,13 @@ def convert(schema, output_jsonld): # saving updated schema.org schema try: export_schema(jsonld_data_model, output_jsonld) - click.echo(f"The Data Model was created and saved to '{output_jsonld}' location.") + click.echo( + f"The Data Model was created and saved to '{output_jsonld}' location." + ) except: - click.echo(f"The Data Model could not be created by using '{output_jsonld}' location. Please check your file path again") + click.echo( + f"The Data Model could not be created by using '{output_jsonld}' location. Please check your file path again" + ) # get the end time et = time.time() From 6819c2f8e8d0db5bfc9acffd4437a5b09d6ae736 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 18 Oct 2023 20:57:26 -0700 Subject: [PATCH 187/239] run black on schematic/schemas/data_model_edges.py --- schematic/schemas/data_model_edges.py | 58 +++++++++++++++++++-------- 1 file changed, 42 insertions(+), 16 deletions(-) diff --git a/schematic/schemas/data_model_edges.py b/schematic/schemas/data_model_edges.py index 5940c029d..5acbb5e7b 100644 --- a/schematic/schemas/data_model_edges.py +++ b/schematic/schemas/data_model_edges.py @@ -1,15 +1,21 @@ import networkx as nx -from schematic.schemas.data_model_relationships import ( - DataModelRelationships - ) +from schematic.schemas.data_model_relationships import DataModelRelationships -class DataModelEdges(): + +class DataModelEdges: def __init__(self): self.dmr = DataModelRelationships() self.data_model_relationships = self.dmr.relationships_dictionary - def generate_edge(self, G: nx.MultiDiGraph, node: str, all_node_dict: dict, attr_rel_dict: dict, edge_relationships: dict) -> nx.MultiDiGraph: + def generate_edge( + self, + G: nx.MultiDiGraph, + node: str, + all_node_dict: dict, + attr_rel_dict: dict, + edge_relationships: dict, + ) -> nx.MultiDiGraph: """Generate an edge between a target node and relevant other nodes the data model. In short, does this current node belong to a recorded relationship in the attribute, relationshps dictionary. Go through each attribute and relationship to find where the node may be. Args: G, nx.MultiDiGraph: networkx graph representation of the data model, that is in the process of being fully built. At this point, all the nodes would have been added, and edges are being added per target node. @@ -29,20 +35,25 @@ def generate_edge(self, G: nx.MultiDiGraph, node: str, all_node_dict: dict, attr # For each attribute in the model. for attribute_display_name, relationship in attr_rel_dict.items(): # Get the relationships associated with the current attribute - relationships = relationship['Relationships'] + relationships = relationship["Relationships"] # Add edge relationships one at a time for rel_key, csv_header in edge_relationships.items(): # If the attribute has a relationship that matches the current edge being added if csv_header in relationships.keys(): # If the current node is part of that relationship and is not the current node # Connect node to attribute as an edge. - if node in relationships[csv_header] and node != attribute_display_name: - # Generate weights based on relationship type. + if ( + node in relationships[csv_header] + and node != attribute_display_name + ): + # Generate weights based on relationship type. # Weights will allow us to preserve the order of entries order in the data model in later steps. - if rel_key == 'domainIncludes': + if rel_key == "domainIncludes": # For 'domainIncludes'/properties relationship, users do not explicitly provide a list order (like for valid values, or dependsOn) # so we pull the order/weight from the order of the attributes. - weight = list(attr_rel_dict.keys()).index(attribute_display_name) + weight = list(attr_rel_dict.keys()).index( + attribute_display_name + ) elif type(relationships[csv_header]) == list: # For other relationships that pull in lists of values, we can explicilty pull the weight by their order in the provided list weight = relationships[csv_header].index(node) @@ -50,15 +61,30 @@ def generate_edge(self, G: nx.MultiDiGraph, node: str, all_node_dict: dict, attr # For single (non list) entries, add weight of 0 weight = 0 # Get the edge_key for the edge relationship we are adding at this step - edge_key = self.data_model_relationships[rel_key]['edge_key'] + edge_key = self.data_model_relationships[rel_key]["edge_key"] # Add edges, in a manner that preserves directionality # TODO: rewrite to use edge_dir - if rel_key in ['subClassOf', 'domainIncludes']: - G.add_edge(all_node_dict[node]['label'], all_node_dict[attribute_display_name]['label'], key=edge_key, weight=weight) + if rel_key in ["subClassOf", "domainIncludes"]: + G.add_edge( + all_node_dict[node]["label"], + all_node_dict[attribute_display_name]["label"], + key=edge_key, + weight=weight, + ) else: - G.add_edge(all_node_dict[attribute_display_name]['label'], all_node_dict[node]['label'], key=edge_key, weight=weight) + G.add_edge( + all_node_dict[attribute_display_name]["label"], + all_node_dict[node]["label"], + key=edge_key, + weight=weight, + ) # Add add rangeIncludes/valid value relationships in reverse as well, making the attribute the parent of the valid value. - if rel_key == 'rangeIncludes': - G.add_edge(all_node_dict[attribute_display_name]['label'], all_node_dict[node]['label'], key='parentOf', weight=weight) + if rel_key == "rangeIncludes": + G.add_edge( + all_node_dict[attribute_display_name]["label"], + all_node_dict[node]["label"], + key="parentOf", + weight=weight, + ) return G From 480f4bf645a925b6fb9e6db4989ca9f2566cd9ba Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 18 Oct 2023 20:58:19 -0700 Subject: [PATCH 188/239] run black on schematic/schemas/data_model_graph.py --- schematic/schemas/data_model_graph.py | 318 ++++++++++++++++---------- 1 file changed, 192 insertions(+), 126 deletions(-) diff --git a/schematic/schemas/data_model_graph.py b/schematic/schemas/data_model_graph.py index d0aff3c94..e63cd4137 100644 --- a/schematic/schemas/data_model_graph.py +++ b/schematic/schemas/data_model_graph.py @@ -5,19 +5,19 @@ from rdflib import Namespace from schematic.schemas.data_model_edges import DataModelEdges -from schematic.schemas.data_model_nodes import DataModelNodes -from schematic.schemas.data_model_relationships import ( - DataModelRelationships - ) +from schematic.schemas.data_model_nodes import DataModelNodes +from schematic.schemas.data_model_relationships import DataModelRelationships -from schematic.utils.schema_utils import get_property_label_from_display_name, get_class_label_from_display_name +from schematic.utils.schema_utils import ( + get_property_label_from_display_name, + get_class_label_from_display_name, +) from schematic.utils.general import unlist from schematic.utils.viz_utils import visualize logger = logging.getLogger(__name__) - class DataModelGraphMeta(object): _instances = {} @@ -32,17 +32,18 @@ def __call__(cls, *args, **kwargs): return cls._instances[cls] -class DataModelGraph(): - ''' +class DataModelGraph: + """ Generate graph network (networkx) from the attributes and relationships returned from the data model parser. Create a singleton. - ''' + """ + __metaclass__ = DataModelGraphMeta def __init__(self, attribute_relationships_dict: dict) -> None: - '''Load parsed data model. + """Load parsed data model. Args: attributes_relationship_dict, dict: generated in data_model_parser {Attribute Display Name: { @@ -50,7 +51,7 @@ def __init__(self, attribute_relationships_dict: dict) -> None: CSV Header: Value}}} Raises: ValueError, attribute_relationship_dict not loaded. - ''' + """ self.attribute_relationships_dict = attribute_relationships_dict self.dmn = DataModelNodes(self.attribute_relationships_dict) self.dme = DataModelEdges() @@ -58,31 +59,34 @@ def __init__(self, attribute_relationships_dict: dict) -> None: if not self.attribute_relationships_dict: raise ValueError( - "Something has gone wrong, a data model was not loaded into the DataModelGraph Class. Please check that your paths are correct" - ) + "Something has gone wrong, a data model was not loaded into the DataModelGraph Class. Please check that your paths are correct" + ) self.graph = self.generate_data_model_graph() - def generate_data_model_graph(self) -> nx.MultiDiGraph: - '''Generate NetworkX Graph from the Relationships/attributes dictionary, the graph is built by first adding all nodes to the graph, then connecting nodes by the relationships defined in the attributes_relationship dictionary. + """Generate NetworkX Graph from the Relationships/attributes dictionary, the graph is built by first adding all nodes to the graph, then connecting nodes by the relationships defined in the attributes_relationship dictionary. Returns: G: nx.MultiDiGraph, networkx graph representation of the data model - ''' + """ # Get all relationships with edges edge_relationships = self.dmr.retreive_rel_headers_dict(edge=True) # Find all nodes - all_nodes = self.dmn.gather_all_nodes_in_model(attr_rel_dict=self.attribute_relationships_dict) + all_nodes = self.dmn.gather_all_nodes_in_model( + attr_rel_dict=self.attribute_relationships_dict + ) # Instantiate NetworkX MultiDigraph G = nx.MultiDiGraph() - + all_node_dict = {} - + ## Fill in MultiDigraph with nodes for node in all_nodes: # Gather information for each node - node_dict = self.dmn.generate_node_dict(node, self.attribute_relationships_dict) + node_dict = self.dmn.generate_node_dict( + node, self.attribute_relationships_dict + ) # Add each node to the all_node_dict to be used for generating edges all_node_dict[node] = node_dict @@ -93,17 +97,26 @@ def generate_data_model_graph(self) -> nx.MultiDiGraph: ## Connect nodes via edges for node in all_nodes: # Generate edges - G = self.dme.generate_edge(G, node, all_node_dict, self.attribute_relationships_dict, edge_relationships) + G = self.dme.generate_edge( + G, + node, + all_node_dict, + self.attribute_relationships_dict, + edge_relationships, + ) return G -class DataModelGraphExplorer(): - def __init__(self, - G,): - ''' Load data model graph as a singleton. + +class DataModelGraphExplorer: + def __init__( + self, + G, + ): + """Load data model graph as a singleton. Args: G: nx.MultiDiGraph, networkx graph representation of the data model - ''' - self.graph = G #At this point the graph is expected to be fully formed. + """ + self.graph = G # At this point the graph is expected to be fully formed. self.dmr = DataModelRelationships() self.rel_dict = self.dmr.relationships_dictionary @@ -112,9 +125,9 @@ def find_properties(self) -> set[str]: Returns: properties, set: All properties defined in the data model, each property name is defined by its label. """ - properties=[] + properties = [] for node_1, node_2, rel in self.graph.edges: - if rel == self.rel_dict['domainIncludes']['edge_key']: + if rel == self.rel_dict["domainIncludes"]["edge_key"]: properties.append(node_1) properties = set(properties) return properties @@ -129,10 +142,12 @@ def find_classes(self) -> set[str]: classes = nodes - properties return classes - def find_node_range(self, node_label:Optional[str]=None, node_display_name:Optional[str]=None) -> list: + def find_node_range( + self, node_label: Optional[str] = None, node_display_name: Optional[str] = None + ) -> list: """Get valid values for the given node (attribute) Args: - node_label, str, Optional[str]: label of the node for which to retrieve valid values + node_label, str, Optional[str]: label of the node for which to retrieve valid values node_display_name, str, Optional[str]: Display Name of the node for which to retrieve valid values Returns: valid_values, list: List of valid values associated with the provided node. @@ -140,17 +155,19 @@ def find_node_range(self, node_label:Optional[str]=None, node_display_name:Optio if not node_label: node_label = self.get_node_label(node_display_name) - valid_values=[] + valid_values = [] for node_1, node_2, rel in self.graph.edges: - if node_1 == node_label and rel == self.rel_dict['rangeIncludes']['edge_key']: + if ( + node_1 == node_label + and rel == self.rel_dict["rangeIncludes"]["edge_key"] + ): valid_values.append(node_2) valid_values = list(set(valid_values)) return valid_values - - def get_adjacent_nodes_by_relationship(self, - node_label: str, - relationship: str) -> list[str]: + def get_adjacent_nodes_by_relationship( + self, node_label: str, relationship: str + ) -> list[str]: """Get a list of nodes that is / are adjacent to a given node, based on a relationship type. Args: @@ -162,15 +179,18 @@ def get_adjacent_nodes_by_relationship(self, #checked """ nodes = set() - for (node_1, node_2, key, _) in self.graph.out_edges(node_label, data=True, keys=True): + for node_1, node_2, key, _ in self.graph.out_edges( + node_label, data=True, keys=True + ): if key == relationship: nodes.add(node_2) return list(nodes) - def get_component_requirements(self, - source_component: str, - ) -> list[str]: + def get_component_requirements( + self, + source_component: str, + ) -> list[str]: """Get all components that are associated with a given source component and are required by it. Args: @@ -183,16 +203,19 @@ def get_component_requirements(self, req_components = list( reversed( self.get_descendants_by_edge_type( - source_component, self.rel_dict['requiresComponent']['edge_key'], ordered=True + source_component, + self.rel_dict["requiresComponent"]["edge_key"], + ordered=True, ) ) ) return req_components - def get_component_requirements_graph(self, - source_component: str, - ) -> nx.DiGraph: + def get_component_requirements_graph( + self, + source_component: str, + ) -> nx.DiGraph: """Get all components that are associated with a given source component and are required by it; return the components as a dependency graph (i.e. a DAG). Args: @@ -207,17 +230,18 @@ def get_component_requirements_graph(self, # get the subgraph induced on required component nodes req_components_graph = self.get_subgraph_by_edge_type( - self.rel_dict['requiresComponent']['edge_key'], + self.rel_dict["requiresComponent"]["edge_key"], ).subgraph(req_components) return req_components_graph - def get_descendants_by_edge_type(self, - source_node: str, - relationship: str, - connected: bool = True, - ordered: bool = False, - ) -> list[str]: + def get_descendants_by_edge_type( + self, + source_node: str, + relationship: str, + connected: bool = True, + ordered: bool = False, + ) -> list[str]: """Get all nodes that are descendants of a given source node, based on a specific type of edge / relationship type. Args: @@ -241,7 +265,7 @@ def get_descendants_by_edge_type(self, # prune the descendants subgraph so as to include only those edges that match the relationship type rel_edges = [] - for (node_1, node_2, key, _) in descendants_subgraph.edges(data=True, keys=True): + for node_1, node_2, key, _ in descendants_subgraph.edges(data=True, keys=True): if key == relationship: rel_edges.append((node_1, node_2)) @@ -277,23 +301,24 @@ def get_descendants_by_edge_type(self, return list(descendants) - def get_digraph_by_edge_type(self, edge_type:str) -> nx.DiGraph: - '''Get a networkx digraph of the nodes connected via a given edge_type. + def get_digraph_by_edge_type(self, edge_type: str) -> nx.DiGraph: + """Get a networkx digraph of the nodes connected via a given edge_type. Args: edge_type: Edge type to search for, possible types are defined by 'edge_key' in relationship class Returns: - ''' + """ digraph = nx.DiGraph() - for (node_1, node_2, key, _) in self.graph.edges(data=True, keys=True): + for node_1, node_2, key, _ in self.graph.edges(data=True, keys=True): if key == edge_type: digraph.add_edge(node_1, node_2) return digraph - def get_edges_by_relationship(self, - node: str, - relationship: str, - ) -> list[str]: + def get_edges_by_relationship( + self, + node: str, + relationship: str, + ) -> list[str]: """Get a list of out-edges of a node where the edges match a specifc type of relationship. i.e., the edges connecting a node to its neighbors are of relationship type -- "parentOf" (set of edges to children / sub-class nodes). @@ -307,13 +332,13 @@ def get_edges_by_relationship(self, """ edges = [] - for (node_1, node_2, key, _) in self.graph.out_edges(node, data=True, keys=True): + for node_1, node_2, key, _ in self.graph.out_edges(node, data=True, keys=True): if key == relationship: edges.append((node_1, node_2)) return edges - def get_ordered_entry(self, key: str, source_node_label:str) -> list[str]: + def get_ordered_entry(self, key: str, source_node_label: str) -> list[str]: """Order the values associated with a particular node and edge_key to match original ordering in schema. Args: key: a key representing and edge relationship in DataModelRelationships.relationships_dictionary @@ -327,41 +352,57 @@ def get_ordered_entry(self, key: str, source_node_label:str) -> list[str]: """ # Check if node is in the graph, if not throw an error. if not self.is_class_in_schema(node_label=source_node_label): - raise KeyError(f"Cannot find node: {source_node_label} in the graph, please check entry.") - - edge_key = self.rel_dict[key]['edge_key'] - if self.rel_dict[key]['jsonld_direction'] == 'out': - #use outedges - - original_edge_weights_dict = {attached_node:self.graph[source_node][attached_node][edge_key]['weight'] - for source_node, attached_node in self.graph.out_edges(source_node_label) - if edge_key in self.graph[source_node][attached_node] - } + raise KeyError( + f"Cannot find node: {source_node_label} in the graph, please check entry." + ) + + edge_key = self.rel_dict[key]["edge_key"] + if self.rel_dict[key]["jsonld_direction"] == "out": + # use outedges + + original_edge_weights_dict = { + attached_node: self.graph[source_node][attached_node][edge_key][ + "weight" + ] + for source_node, attached_node in self.graph.out_edges( + source_node_label + ) + if edge_key in self.graph[source_node][attached_node] + } else: - #use inedges - original_edge_weights_dict = {attached_node:self.graph[attached_node][source_node][edge_key]['weight'] - for attached_node, source_node in self.graph.in_edges(source_node_label) - if edge_key in self.graph[attached_node][source_node] - } - - sorted_nodes = list(dict(sorted(original_edge_weights_dict.items(), key=lambda item: item[1])).keys()) - + # use inedges + original_edge_weights_dict = { + attached_node: self.graph[attached_node][source_node][edge_key][ + "weight" + ] + for attached_node, source_node in self.graph.in_edges(source_node_label) + if edge_key in self.graph[attached_node][source_node] + } + + sorted_nodes = list( + dict( + sorted(original_edge_weights_dict.items(), key=lambda item: item[1]) + ).keys() + ) + return sorted_nodes # Get values associated with a node - def get_nodes_ancestors(self, subgraph:nx.DiGraph, node_label:str) -> list[str]: - """Get a list of nodes reachable from source component in graph + def get_nodes_ancestors(self, subgraph: nx.DiGraph, node_label: str) -> list[str]: + """Get a list of nodes reachable from source component in graph Args: subgraph: networkx graph object node_label, str: label of node to find ancestors for Returns: - all_ancestors, list: nodes reachable from source in graph + all_ancestors, list: nodes reachable from source in graph """ all_ancestors = list(nx.ancestors(subgraph, node_label)) return all_ancestors - def get_node_comment(self, node_display_name: str = None, node_label: str= None) -> str: + def get_node_comment( + self, node_display_name: str = None, node_label: str = None + ) -> str: """Get the node definition, i.e., the "comment" associated with a given node display name. Args: @@ -376,14 +417,16 @@ def get_node_comment(self, node_display_name: str = None, node_label: str= None) if not node_label: return "" - node_definition = self.graph.nodes[node_label][self.rel_dict['comment']['node_label']] + node_definition = self.graph.nodes[node_label][ + self.rel_dict["comment"]["node_label"] + ] return node_definition - - def get_node_dependencies(self, - source_node: str, - display_names: bool = True, - schema_ordered: bool = True, + def get_node_dependencies( + self, + source_node: str, + display_names: bool = True, + schema_ordered: bool = True, ) -> list[str]: """Get the immediate dependencies that are related to a given source node. @@ -400,23 +443,30 @@ def get_node_dependencies(self, if schema_ordered: # get dependencies in the same order in which they are defined in the schema - required_dependencies = self.get_ordered_entry(key=self.rel_dict['requiresDependency']['edge_key'], source_node_label=source_node) + required_dependencies = self.get_ordered_entry( + key=self.rel_dict["requiresDependency"]["edge_key"], + source_node_label=source_node, + ) else: required_dependencies = self.get_adjacent_nodes_by_relationship( - node_label = source_node, relationship = self.rel_dict['requiresDependency']['edge_key']) + node_label=source_node, + relationship=self.rel_dict["requiresDependency"]["edge_key"], + ) if display_names: # get display names of dependencies dependencies_display_names = [] for req in required_dependencies: - dependencies_display_names.append(self.graph.nodes[req][self.rel_dict['displayName']['node_label']]) + dependencies_display_names.append( + self.graph.nodes[req][self.rel_dict["displayName"]["node_label"]] + ) return dependencies_display_names return required_dependencies - def get_nodes_descendants(self, node_label:str) -> list[str]: + def get_nodes_descendants(self, node_label: str) -> list[str]: """Return a list of nodes reachable from source in graph Args: node_label, str: any given node @@ -428,7 +478,8 @@ def get_nodes_descendants(self, node_label:str) -> list[str]: return all_descendants def get_nodes_display_names( - self, node_list: list[str], + self, + node_list: list[str], ) -> list[str]: """Get display names associated with the given list of nodes. @@ -439,7 +490,8 @@ def get_nodes_display_names( List of display names. """ node_list_display_names = [ - self.graph.nodes[node][self.rel_dict['displayName']['node_label']] for node in node_list + self.graph.nodes[node][self.rel_dict["displayName"]["node_label"]] + for node in node_list ] return node_list_display_names @@ -454,9 +506,11 @@ def get_node_label(self, node_display_name: str) -> str: If display name not part of schema, return an empty string. """ - node_class_label = get_class_label_from_display_name(display_name = node_display_name) + node_class_label = get_class_label_from_display_name( + display_name=node_display_name + ) node_property_label = get_property_label_from_display_name( - display_name = node_display_name + display_name=node_display_name ) if node_class_label in self.graph.nodes: @@ -464,11 +518,16 @@ def get_node_label(self, node_display_name: str) -> str: elif node_property_label in self.graph.nodes: node_label = node_property_label else: - node_label="" - + node_label = "" + return node_label - def get_node_range(self, node_label: Optional[str] = None, node_display_name: Optional[str] = None, display_names: bool=False) -> list[str]: + def get_node_range( + self, + node_label: Optional[str] = None, + node_display_name: Optional[str] = None, + display_names: bool = False, + ) -> list[str]: """Get the range, i.e., all the valid values that are associated with a node label. Args: @@ -476,7 +535,7 @@ def get_node_range(self, node_label: Optional[str] = None, node_display_name: Op display_names, bool: True Returns: required_range: Returned if display_names=False, list of valid values (labels) associated with a given node. - dependencies_display_name: Returned if display_names=True, + dependencies_display_name: Returned if display_names=True, List of valid values (display names) associated with a given node Raises: ValueError: If the node cannot be found in the graph. @@ -486,7 +545,7 @@ def get_node_range(self, node_label: Optional[str] = None, node_display_name: Op try: # get node range in the order defined in schema for given node - required_range = self.find_node_range(node_label = node_label) + required_range = self.find_node_range(node_label=node_label) except KeyError: raise ValueError( f"The source node {node_label} does not exist in the graph. " @@ -504,7 +563,9 @@ def get_node_range(self, node_label: Optional[str] = None, node_display_name: Op return required_range - def get_node_required(self, node_label:Optional[str]=None, node_display_name: Optional[str]=None) -> bool: + def get_node_required( + self, node_label: Optional[str] = None, node_display_name: Optional[str] = None + ) -> bool: """Check if a given node is required or not. Note: The possible options that a node can be associated with -- "required" / "optional". @@ -523,7 +584,9 @@ def get_node_required(self, node_label:Optional[str]=None, node_display_name: Op node_required = self.graph.nodes[node_label][rel_node_label] return node_required - def get_node_validation_rules(self, node_label: Optional[str]=None, node_display_name: Optional[str]=None) -> str: + def get_node_validation_rules( + self, node_label: Optional[str] = None, node_display_name: Optional[str] = None + ) -> str: """Get validation rules associated with a node, Args: @@ -542,9 +605,7 @@ def get_node_validation_rules(self, node_label: Optional[str]=None, node_display return node_validation_rules - def get_subgraph_by_edge_type( - self, relationship: str - ) -> nx.DiGraph: + def get_subgraph_by_edge_type(self, relationship: str) -> nx.DiGraph: """Get a subgraph containing all edges of a given type (aka relationship). Args: @@ -556,7 +617,7 @@ def get_subgraph_by_edge_type( # prune the metadata model graph so as to include only those edges that match the relationship type rel_edges = [] - for (node_1, node_2, key, _) in self.graph.out_edges(data=True, keys=True): + for node_1, node_2, key, _ in self.graph.out_edges(data=True, keys=True): if key == relationship: rel_edges.append((node_1, node_2)) @@ -565,19 +626,22 @@ def get_subgraph_by_edge_type( return relationship_subgraph - - def find_adjacent_child_classes(self, node_label: Optional[str]=None, node_display_name: Optional[str]=None)->list[str]: - '''Find child classes of a given node. + def find_adjacent_child_classes( + self, node_label: Optional[str] = None, node_display_name: Optional[str] = None + ) -> list[str]: + """Find child classes of a given node. Args: node_display_name: Display name of the node to look up. node_label: Label of the node to look up. Returns: List of nodes that are adjacent to the given node, by SubclassOf relationship. - ''' + """ if not node_label: node_label = self.get_node_label(node_display_name) - return self.get_adjacent_nodes_by_relationship(node_label = node_label, relationship = self.rel_dict['subClassOf']['edge_key']) + return self.get_adjacent_nodes_by_relationship( + node_label=node_label, relationship=self.rel_dict["subClassOf"]["edge_key"] + ) def find_child_classes(self, schema_class: str) -> list: """Find schema classes that inherit from the given class @@ -588,7 +652,7 @@ def find_child_classes(self, schema_class: str) -> list: """ return unlist(list(self.graph.successors(schema_class))) - def find_class_specific_properties(self, schema_class:str) -> list[str]: + def find_class_specific_properties(self, schema_class: str) -> list[str]: """Find properties specifically associated with a given class Args: schema_class, str: node/class label, to identify properties for. @@ -597,17 +661,19 @@ def find_class_specific_properties(self, schema_class:str) -> list[str]: Raises: KeyError: Key error is raised if the provded schema_class is not in the graph """ - + if not self.is_class_in_schema(schema_class): - raise KeyError(f"Schema_class provided: {schema_class} is not in the data model, please check that you are providing the proper class/node label") + raise KeyError( + f"Schema_class provided: {schema_class} is not in the data model, please check that you are providing the proper class/node label" + ) properties = [] for n1, n2 in self.graph.edges(): - if n2==schema_class and 'domainValue' in self.graph[n1][schema_class]: + if n2 == schema_class and "domainValue" in self.graph[n1][schema_class]: properties.append(n1) return properties - def find_parent_classes(self, node_label:str) -> list[list[str]]: + def find_parent_classes(self, node_label: str) -> list[list[str]]: """Find all parents of the provided node Args: node_label: label of the node to find parents of @@ -621,13 +687,11 @@ def find_parent_classes(self, node_label:str) -> list[list[str]]: root_node = list(nx.topological_sort(digraph))[0] # Get paths between root_node and the target node. - paths = nx.all_simple_paths( - self.graph, source=root_node, target=node_label - ) + paths = nx.all_simple_paths(self.graph, source=root_node, target=node_label) return [_path[:-1] for _path in paths] - def full_schema_graph(self, size:Optional[int]=None)-> graphviz.Digraph: + def full_schema_graph(self, size: Optional[int] = None) -> graphviz.Digraph: """Create a graph of the data model. Args: size, float: max height and width of the graph, if one value provided it is used for both. @@ -650,12 +714,14 @@ def is_class_in_schema(self, node_label: str) -> bool: else: return False - def sub_schema_graph(self, source:str, direction:str, size=None) -> Optional[graphviz.Digraph]: + def sub_schema_graph( + self, source: str, direction: str, size=None + ) -> Optional[graphviz.Digraph]: """Create a sub-schema graph Args: source, str: source node label to start graph direction, str: direction to create the vizualization, choose from "up", "down", "both" - size, float: max height and width of the graph, if one value provided it is used for both. + size, float: max height and width of the graph, if one value provided it is used for both. Returns: Sub-schema graph viz """ From 4a44c1608bd67e37614385ff28eb71d14736bed5 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 18 Oct 2023 20:59:08 -0700 Subject: [PATCH 189/239] run black on schematic/schemas/data_model_json_schema.py --- schematic/schemas/data_model_json_schema.py | 701 ++++++++++---------- 1 file changed, 364 insertions(+), 337 deletions(-) diff --git a/schematic/schemas/data_model_json_schema.py b/schematic/schemas/data_model_json_schema.py index 4adba3816..d8610f1d5 100644 --- a/schematic/schemas/data_model_json_schema.py +++ b/schematic/schemas/data_model_json_schema.py @@ -7,344 +7,371 @@ from schematic.schemas.data_model_relationships import DataModelRelationships from schematic.utils.validate_utils import rule_in_rule_list -logger = logging.getLogger(__name__) - -class DataModelJSONSchema: - def __init__(self, jsonld_path: str, graph:nx.MultiDiGraph, - ): - # TODO: Change jsonld_path to data_model_path (can work with CSV too) - self.jsonld_path = jsonld_path - self.graph = graph # Graph would be fully made at this point. - self.DME = DataModelGraphExplorer(self.graph) - self.dmr = DataModelRelationships() - self.rel_dict = self.dmr.relationships_dictionary - - def get_array_schema( - self, node_range: List[str], node_name: str, blank=False - ) -> Dict[str, Dict[str, List[str]]]: - """Add a list of nodes to the "enum" key in a given JSON schema object. - Allow a node to be mapped to any subset of the list - - Args: - node_name: Name of the "main" / "head" key in the JSON schema / object. - node_range: List of nodes to be added to the JSON object. - blank: If True, add empty node to end of node list. - If False, do not add empty node to end of node list. - - Returns: - JSON object with array validation rule. - """ - - schema_node_range_array = { - node_name: { - "type": "array", - "items": {"enum": node_range + [""] if blank else node_range}, - "maxItems": len(node_range), - } - } - return schema_node_range_array - - def get_non_blank_schema( - self, node_name: str - ) -> Dict[str, dict[str, Any]]: # can't define heterogenous Dict generic types - """Get a schema rule that does not allow null or empty values. - - Args: - node_name: Name of the node on which the schema rule is to be applied. - - Returns: - Schema rule as a JSON object. - """ - non_blank_schema = {node_name: {"not": {"type": "null"}, "minLength": 1}} - - return non_blank_schema - - def get_range_schema( - self, node_range: List[str], node_name: str, blank=False - ) -> Dict[str, Dict[str, List[str]]]: - """Add a list of nodes to the "enum" key in a given JSON schema object. - - Args: - node_name: Name of the "main" / "head" key in the JSON schema / object. - node_range: List of nodes to be added to the JSON object. - blank: If True, add empty node to end of node list. - If False, do not add empty node to end of node list. +logger = logging.getLogger(__name__) - Returns: - JSON object with nodes. - """ - if blank: - schema_node_range = {node_name: {"enum": node_range + [""]}} - else: - schema_node_range = {node_name: {"enum": node_range}} - - return schema_node_range - - def get_json_validation_schema(self, source_node: str, schema_name: str) -> Dict[str, dict[str, Any]]: - ''' - Consolidated method that aims to gather dependencies and value constraints across terms / nodes in a schema.org schema and store them in a jsonschema /JSON Schema schema. - - It does so for any given node in the schema.org schema (recursively) using the given node as starting point in the following manner: - 1) Find all the nodes / terms this node depends on (which are required as "additional metadata" given this node is "required"). - 2) Find all the allowable metadata values / nodes that can be assigned to a particular node (if such a constraint is specified on the schema). - - Args: - source_node: Node from which we can start recursive dependancy traversal (as mentioned above). - schema_name: Name assigned to JSON-LD schema (to uniquely identify it via URI when it is hosted on the Internet). - - Returns: - JSON Schema as a dictionary. - ''' - json_schema = { - "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "http://example.com/" + schema_name, - "title": schema_name, - "type": "object", - "properties": {}, - "required": [], - "allOf": [], - } - - nodes_to_process = ( - [] - ) # list of nodes to be checked for dependencies, starting with the source node - processed_nodes = ( - [] - ) # keep of track of nodes whose dependencies have been processed - reverse_dependencies = ( - {} - ) # maintain a map between conditional nodes and their dependencies (reversed) -- {dependency : conditional_node} - range_domain_map = ( - {} - ) # maintain a map between range nodes and their domain nodes {range_value : domain_value} - # the domain node is very likely the parentof ("parentOf" relationship) of the range node - - root_dependencies = self.DME.get_adjacent_nodes_by_relationship( - node_label = source_node, relationship=self.rel_dict['requiresDependency']['edge_key'], - ) - # if root_dependencies is empty it means that a class with name 'source_node' exists - # in the schema, but it is not a valid component - if not root_dependencies: - raise ValueError(f"'{source_node}' is not a valid component in the schema.") - - nodes_to_process += root_dependencies - - process_node = nodes_to_process.pop(0) - - while process_node: - - if not process_node in processed_nodes: - # node is being processed - node_is_processed = True - - node_range = self.DME.get_adjacent_nodes_by_relationship( - node_label=process_node, relationship=self.rel_dict['rangeIncludes']['edge_key'], - ) - - - # get node range display name - node_range_d = self.DME.get_nodes_display_names(node_list=node_range) - - node_dependencies = self.DME.get_adjacent_nodes_by_relationship( - node_label=process_node, relationship=self.rel_dict['requiresDependency']['edge_key'], - ) - - # get process node display name - node_display_name = self.graph.nodes[process_node][self.rel_dict['displayName']['node_label']] - - # updating map between node and node's valid values - for n in node_range_d: - if not n in range_domain_map: - range_domain_map[n] = [] - range_domain_map[n].append(node_display_name) - - # can this node be map to the empty set (if required no; if not required yes) - # TODO: change "required" to different term, required may be a bit misleading (i.e. is the node required in the schema) - node_required = self.DME.get_node_required(node_label=process_node) - - # get any additional validation rules associated with this node (e.g. can this node be mapped to a list of other nodes) - node_validation_rules = self.DME.get_node_validation_rules( - node_display_name = node_display_name - ) - - if node_display_name in reverse_dependencies: - # if node has conditionals set schema properties and conditional dependencies - # set schema properties - if node_range: - # if process node has valid value range set it in schema properties - schema_valid_vals = self.get_range_schema( - node_range = node_range_d, node_name=node_display_name, blank=True - ) - - if node_validation_rules: - # if this node has extra validation rules process them - # TODO: abstract this into its own validation rule constructor/generator module/class - if rule_in_rule_list("list", node_validation_rules): - # if this node can be mapped to a list of nodes - # set its schema accordingly - schema_valid_vals = self.get_array_schema( - node_range=node_range_d, node_name=node_display_name, blank=True - ) - - else: - # otherwise, by default allow any values - schema_valid_vals = {node_display_name: {}} - - json_schema["properties"].update(schema_valid_vals) - - # set schema conditional dependencies - for node in reverse_dependencies[node_display_name]: - # set all of the conditional nodes that require this process node - - # get node domain if any - # ow this node is a conditional requirement - if node in range_domain_map: - domain_nodes = range_domain_map[node] - conditional_properties = {} - - for domain_node in domain_nodes: - - # set range of conditional node schema - conditional_properties.update( - { - "properties": {domain_node: {"enum": [node]}}, - "required": [domain_node], - } - ) - - # given node conditional are satisfied, this process node (which is dependent on these conditionals) has to be set or not depending on whether it is required - if node_range: - dependency_properties = self.get_range_schema( - node_range=node_range_d, - node_name=node_display_name, - blank=not node_required, - ) - - if node_validation_rules: - if rule_in_rule_list("list", node_validation_rules): - # TODO: get_range_schema and get_range_schema have similar behavior - combine in one module - dependency_properties = self.get_array_schema( - node_range=node_range_d, - node_name=node_display_name, - blank=not node_required, - ) - - else: - if node_required: - dependency_properties = self.get_non_blank_schema( - node_name=node_display_name - ) - else: - dependency_properties = {node_display_name: {}} - schema_conditional_dependencies = { - "if": conditional_properties, - "then": { - "properties": dependency_properties, - "required": [node_display_name], - }, - } - - # update conditional-dependency rules in json schema - json_schema["allOf"].append( - schema_conditional_dependencies - ) - - else: - # node doesn't have conditionals - if node_required: - if node_range: - schema_valid_vals = self.get_range_schema( - node_range=node_range_d, node_name=node_display_name, blank=False - ) - - if node_validation_rules: - # If there are valid values AND they are expected to be a list, - # reformat the Valid Values. - if rule_in_rule_list("list", node_validation_rules): - schema_valid_vals = self.get_array_schema( - node_range=node_range_d, node_name=node_display_name, blank=False - ) - else: - schema_valid_vals = self.get_non_blank_schema( - node_name=node_display_name - ) - - json_schema["properties"].update(schema_valid_vals) - # add node to required fields - json_schema["required"] += [node_display_name] - - elif process_node in root_dependencies: - # node doesn't have conditionals and is not required; it belongs in the schema only if it is in root's dependencies - - if node_range: - schema_valid_vals = self.get_range_schema( - node_range=node_range_d, node_name=node_display_name, blank=True - ) - - if node_validation_rules: - if rule_in_rule_list("list", node_validation_rules): - schema_valid_vals = self.get_array_schema( - node_range=node_range_d, node_name=node_display_name, blank=True - ) - - else: - schema_valid_vals = {node_display_name: {}} - - json_schema["properties"].update(schema_valid_vals) - - else: - # node doesn't have conditionals and it is not required and it is not a root dependency - # the node doesn't belong in the schema - # do not add to processed nodes since its conditional may be traversed at a later iteration (though unlikely for most schemas we consider) - node_is_processed = False - - # add process node as a conditional to its dependencies - node_dependencies_d = self.DME.get_nodes_display_names( - node_list=node_dependencies - ) - - for dep in node_dependencies_d: - if not dep in reverse_dependencies: - reverse_dependencies[dep] = [] - - reverse_dependencies[dep].append(node_display_name) - - # add nodes found as dependencies and range of this processed node - # to the list of nodes to be processed - nodes_to_process += node_range - nodes_to_process += node_dependencies - - # if the node is processed add it to the processed nodes set - if node_is_processed: - processed_nodes.append(process_node) - - # if the list of nodes to process is not empty - # set the process node the next remaining node to process - if nodes_to_process: - process_node = nodes_to_process.pop(0) - else: - # no more nodes to process - # exit the loop - break - - logger.info("JSON schema successfully generated from schema.org schema!") - - # if no conditional dependencies were added we can't have an empty 'AllOf' block in the schema, so remove it - if not json_schema["allOf"]: - del json_schema["allOf"] - - # If no config value and SchemaGenerator was initialized with - # a JSON-LD path, construct - if self.jsonld_path is not None: - self.jsonld_path_root, jsonld_ext = os.path.splitext(self.jsonld_path) - prefix = self.jsonld_path_root - prefix_root, prefix_ext = os.path.splitext(prefix) - if prefix_ext == ".model": - prefix = prefix_root - json_schema_log_file = f"{prefix}.{source_node}.schema.json" - ''' +class DataModelJSONSchema: + def __init__( + self, + jsonld_path: str, + graph: nx.MultiDiGraph, + ): + # TODO: Change jsonld_path to data_model_path (can work with CSV too) + self.jsonld_path = jsonld_path + self.graph = graph # Graph would be fully made at this point. + self.DME = DataModelGraphExplorer(self.graph) + self.dmr = DataModelRelationships() + self.rel_dict = self.dmr.relationships_dictionary + + def get_array_schema( + self, node_range: List[str], node_name: str, blank=False + ) -> Dict[str, Dict[str, List[str]]]: + """Add a list of nodes to the "enum" key in a given JSON schema object. + Allow a node to be mapped to any subset of the list + + Args: + node_name: Name of the "main" / "head" key in the JSON schema / object. + node_range: List of nodes to be added to the JSON object. + blank: If True, add empty node to end of node list. + If False, do not add empty node to end of node list. + + Returns: + JSON object with array validation rule. + """ + + schema_node_range_array = { + node_name: { + "type": "array", + "items": {"enum": node_range + [""] if blank else node_range}, + "maxItems": len(node_range), + } + } + + return schema_node_range_array + + def get_non_blank_schema( + self, node_name: str + ) -> Dict[str, dict[str, Any]]: # can't define heterogenous Dict generic types + """Get a schema rule that does not allow null or empty values. + + Args: + node_name: Name of the node on which the schema rule is to be applied. + + Returns: + Schema rule as a JSON object. + """ + non_blank_schema = {node_name: {"not": {"type": "null"}, "minLength": 1}} + + return non_blank_schema + + def get_range_schema( + self, node_range: List[str], node_name: str, blank=False + ) -> Dict[str, Dict[str, List[str]]]: + """Add a list of nodes to the "enum" key in a given JSON schema object. + + Args: + node_name: Name of the "main" / "head" key in the JSON schema / object. + node_range: List of nodes to be added to the JSON object. + blank: If True, add empty node to end of node list. + If False, do not add empty node to end of node list. + + Returns: + JSON object with nodes. + """ + if blank: + schema_node_range = {node_name: {"enum": node_range + [""]}} + else: + schema_node_range = {node_name: {"enum": node_range}} + + return schema_node_range + + def get_json_validation_schema( + self, source_node: str, schema_name: str + ) -> Dict[str, dict[str, Any]]: + """ + Consolidated method that aims to gather dependencies and value constraints across terms / nodes in a schema.org schema and store them in a jsonschema /JSON Schema schema. + + It does so for any given node in the schema.org schema (recursively) using the given node as starting point in the following manner: + 1) Find all the nodes / terms this node depends on (which are required as "additional metadata" given this node is "required"). + 2) Find all the allowable metadata values / nodes that can be assigned to a particular node (if such a constraint is specified on the schema). + + Args: + source_node: Node from which we can start recursive dependancy traversal (as mentioned above). + schema_name: Name assigned to JSON-LD schema (to uniquely identify it via URI when it is hosted on the Internet). + + Returns: + JSON Schema as a dictionary. + """ + json_schema = { + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "http://example.com/" + schema_name, + "title": schema_name, + "type": "object", + "properties": {}, + "required": [], + "allOf": [], + } + + nodes_to_process = ( + [] + ) # list of nodes to be checked for dependencies, starting with the source node + processed_nodes = ( + [] + ) # keep of track of nodes whose dependencies have been processed + reverse_dependencies = ( + {} + ) # maintain a map between conditional nodes and their dependencies (reversed) -- {dependency : conditional_node} + range_domain_map = ( + {} + ) # maintain a map between range nodes and their domain nodes {range_value : domain_value} + # the domain node is very likely the parentof ("parentOf" relationship) of the range node + + root_dependencies = self.DME.get_adjacent_nodes_by_relationship( + node_label=source_node, + relationship=self.rel_dict["requiresDependency"]["edge_key"], + ) + + # if root_dependencies is empty it means that a class with name 'source_node' exists + # in the schema, but it is not a valid component + if not root_dependencies: + raise ValueError(f"'{source_node}' is not a valid component in the schema.") + + nodes_to_process += root_dependencies + + process_node = nodes_to_process.pop(0) + + while process_node: + if not process_node in processed_nodes: + # node is being processed + node_is_processed = True + + node_range = self.DME.get_adjacent_nodes_by_relationship( + node_label=process_node, + relationship=self.rel_dict["rangeIncludes"]["edge_key"], + ) + + # get node range display name + node_range_d = self.DME.get_nodes_display_names(node_list=node_range) + + node_dependencies = self.DME.get_adjacent_nodes_by_relationship( + node_label=process_node, + relationship=self.rel_dict["requiresDependency"]["edge_key"], + ) + + # get process node display name + node_display_name = self.graph.nodes[process_node][ + self.rel_dict["displayName"]["node_label"] + ] + + # updating map between node and node's valid values + for n in node_range_d: + if not n in range_domain_map: + range_domain_map[n] = [] + range_domain_map[n].append(node_display_name) + + # can this node be map to the empty set (if required no; if not required yes) + # TODO: change "required" to different term, required may be a bit misleading (i.e. is the node required in the schema) + node_required = self.DME.get_node_required(node_label=process_node) + + # get any additional validation rules associated with this node (e.g. can this node be mapped to a list of other nodes) + node_validation_rules = self.DME.get_node_validation_rules( + node_display_name=node_display_name + ) + + if node_display_name in reverse_dependencies: + # if node has conditionals set schema properties and conditional dependencies + # set schema properties + if node_range: + # if process node has valid value range set it in schema properties + schema_valid_vals = self.get_range_schema( + node_range=node_range_d, + node_name=node_display_name, + blank=True, + ) + + if node_validation_rules: + # if this node has extra validation rules process them + # TODO: abstract this into its own validation rule constructor/generator module/class + if rule_in_rule_list("list", node_validation_rules): + # if this node can be mapped to a list of nodes + # set its schema accordingly + schema_valid_vals = self.get_array_schema( + node_range=node_range_d, + node_name=node_display_name, + blank=True, + ) + + else: + # otherwise, by default allow any values + schema_valid_vals = {node_display_name: {}} + + json_schema["properties"].update(schema_valid_vals) + + # set schema conditional dependencies + for node in reverse_dependencies[node_display_name]: + # set all of the conditional nodes that require this process node + + # get node domain if any + # ow this node is a conditional requirement + if node in range_domain_map: + domain_nodes = range_domain_map[node] + conditional_properties = {} + + for domain_node in domain_nodes: + # set range of conditional node schema + conditional_properties.update( + { + "properties": {domain_node: {"enum": [node]}}, + "required": [domain_node], + } + ) + + # given node conditional are satisfied, this process node (which is dependent on these conditionals) has to be set or not depending on whether it is required + if node_range: + dependency_properties = self.get_range_schema( + node_range=node_range_d, + node_name=node_display_name, + blank=not node_required, + ) + + if node_validation_rules: + if rule_in_rule_list( + "list", node_validation_rules + ): + # TODO: get_range_schema and get_range_schema have similar behavior - combine in one module + dependency_properties = ( + self.get_array_schema( + node_range=node_range_d, + node_name=node_display_name, + blank=not node_required, + ) + ) + + else: + if node_required: + dependency_properties = ( + self.get_non_blank_schema( + node_name=node_display_name + ) + ) + else: + dependency_properties = {node_display_name: {}} + schema_conditional_dependencies = { + "if": conditional_properties, + "then": { + "properties": dependency_properties, + "required": [node_display_name], + }, + } + + # update conditional-dependency rules in json schema + json_schema["allOf"].append( + schema_conditional_dependencies + ) + + else: + # node doesn't have conditionals + if node_required: + if node_range: + schema_valid_vals = self.get_range_schema( + node_range=node_range_d, + node_name=node_display_name, + blank=False, + ) + + if node_validation_rules: + # If there are valid values AND they are expected to be a list, + # reformat the Valid Values. + if rule_in_rule_list("list", node_validation_rules): + schema_valid_vals = self.get_array_schema( + node_range=node_range_d, + node_name=node_display_name, + blank=False, + ) + else: + schema_valid_vals = self.get_non_blank_schema( + node_name=node_display_name + ) + + json_schema["properties"].update(schema_valid_vals) + # add node to required fields + json_schema["required"] += [node_display_name] + + elif process_node in root_dependencies: + # node doesn't have conditionals and is not required; it belongs in the schema only if it is in root's dependencies + + if node_range: + schema_valid_vals = self.get_range_schema( + node_range=node_range_d, + node_name=node_display_name, + blank=True, + ) + + if node_validation_rules: + if rule_in_rule_list("list", node_validation_rules): + schema_valid_vals = self.get_array_schema( + node_range=node_range_d, + node_name=node_display_name, + blank=True, + ) + + else: + schema_valid_vals = {node_display_name: {}} + + json_schema["properties"].update(schema_valid_vals) + + else: + # node doesn't have conditionals and it is not required and it is not a root dependency + # the node doesn't belong in the schema + # do not add to processed nodes since its conditional may be traversed at a later iteration (though unlikely for most schemas we consider) + node_is_processed = False + + # add process node as a conditional to its dependencies + node_dependencies_d = self.DME.get_nodes_display_names( + node_list=node_dependencies + ) + + for dep in node_dependencies_d: + if not dep in reverse_dependencies: + reverse_dependencies[dep] = [] + + reverse_dependencies[dep].append(node_display_name) + + # add nodes found as dependencies and range of this processed node + # to the list of nodes to be processed + nodes_to_process += node_range + nodes_to_process += node_dependencies + + # if the node is processed add it to the processed nodes set + if node_is_processed: + processed_nodes.append(process_node) + + # if the list of nodes to process is not empty + # set the process node the next remaining node to process + if nodes_to_process: + process_node = nodes_to_process.pop(0) + else: + # no more nodes to process + # exit the loop + break + + logger.info("JSON schema successfully generated from schema.org schema!") + + # if no conditional dependencies were added we can't have an empty 'AllOf' block in the schema, so remove it + if not json_schema["allOf"]: + del json_schema["allOf"] + + # If no config value and SchemaGenerator was initialized with + # a JSON-LD path, construct + if self.jsonld_path is not None: + self.jsonld_path_root, jsonld_ext = os.path.splitext(self.jsonld_path) + prefix = self.jsonld_path_root + prefix_root, prefix_ext = os.path.splitext(prefix) + if prefix_ext == ".model": + prefix = prefix_root + json_schema_log_file = f"{prefix}.{source_node}.schema.json" + """ # Commenting out loggins since the JSON Schema file is not currently saved. logger.info( "The JSON schema file can be inspected by setting the following " @@ -352,5 +379,5 @@ def get_json_validation_schema(self, source_node: str, schema_name: str) -> Dict ) logger.info(f"JSON schema file log stored as {json_schema_log_file}") - ''' - return json_schema \ No newline at end of file + """ + return json_schema From e64e03a5ebf919ee1ba438fdcbb9662f84154010 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 18 Oct 2023 21:00:20 -0700 Subject: [PATCH 190/239] run black on schematic/schemas/data_model_jsonld.py --- schematic/schemas/data_model_jsonld.py | 304 ++++++++++++++++--------- 1 file changed, 196 insertions(+), 108 deletions(-) diff --git a/schematic/schemas/data_model_jsonld.py b/schematic/schemas/data_model_jsonld.py index 2353d8f8c..346fe04c9 100644 --- a/schematic/schemas/data_model_jsonld.py +++ b/schematic/schemas/data_model_jsonld.py @@ -9,23 +9,34 @@ from schematic.schemas.data_model_graph import DataModelGraphExplorer from schematic.schemas.data_model_relationships import DataModelRelationships -from schematic.utils.schema_utils import get_label_from_display_name, convert_bool_to_str, strip_context +from schematic.utils.schema_utils import ( + get_label_from_display_name, + convert_bool_to_str, + strip_context, +) logging.basicConfig() logger = logging.getLogger(__name__) + @dataclass_json @dataclass class BaseTemplate: - magic_context: str = field(default_factory=lambda: {"bts": "http://schema.biothings.io/", - "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#", - "rdfs": "http://www.w3.org/2000/01/rdf-schema#", - "schema": "http://schema.org/", - "xsd": "http://www.w3.org/2001/XMLSchema#", - }, - metadata=config(field_name="@context")) + magic_context: str = field( + default_factory=lambda: { + "bts": "http://schema.biothings.io/", + "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#", + "rdfs": "http://www.w3.org/2000/01/rdf-schema#", + "schema": "http://schema.org/", + "xsd": "http://www.w3.org/2001/XMLSchema#", + }, + metadata=config(field_name="@context"), + ) magic_graph: str = field(default_factory=list, metadata=config(field_name="@graph")) - magic_id: str = field(default="http://schema.biothings.io/#0.1", metadata=config(field_name="@id")) + magic_id: str = field( + default="http://schema.biothings.io/#0.1", metadata=config(field_name="@id") + ) + @dataclass_json @dataclass @@ -34,12 +45,25 @@ class PropertyTemplate: magic_type: str = field(default="rdf:Property", metadata=config(field_name="@type")) magic_comment: str = field(default="", metadata=config(field_name="rdfs:comment")) magic_label: str = field(default="", metadata=config(field_name="rdfs:label")) - magic_domain_includes: list = field(default_factory=list, metadata=config(field_name="schema:domainIncludes")) - magic_range_includes: list = field(default_factory=list, metadata=config(field_name="schema:rangeIncludes")) - magic_isPartOf: dict = field(default_factory=dict, metadata=config(field_name="schema:isPartOf")) - magic_displayName:str = field(default="", metadata=config(field_name="sms:displayName")) - magic_required: str = field(default="sms:false", metadata=config(field_name="sms:required")) - magic_validationRules: list = field(default_factory=list, metadata=config(field_name="sms:validationRules")) + magic_domain_includes: list = field( + default_factory=list, metadata=config(field_name="schema:domainIncludes") + ) + magic_range_includes: list = field( + default_factory=list, metadata=config(field_name="schema:rangeIncludes") + ) + magic_isPartOf: dict = field( + default_factory=dict, metadata=config(field_name="schema:isPartOf") + ) + magic_displayName: str = field( + default="", metadata=config(field_name="sms:displayName") + ) + magic_required: str = field( + default="sms:false", metadata=config(field_name="sms:required") + ) + magic_validationRules: list = field( + default_factory=list, metadata=config(field_name="sms:validationRules") + ) + @dataclass_json @dataclass @@ -48,23 +72,40 @@ class ClassTemplate: magic_type: str = field(default="rdfs:Class", metadata=config(field_name="@type")) magic_comment: str = field(default="", metadata=config(field_name="rdfs:comment")) magic_label: str = field(default="", metadata=config(field_name="rdfs:label")) - magic_subClassOf: list = field(default_factory=list, metadata=config(field_name="rdfs:subClassOf")) - magic_range_includes: list = field(default_factory=list, metadata=config(field_name="schema:rangeIncludes")) - magic_isPartOf: dict = field(default_factory=dict, metadata=config(field_name="schema:isPartOf")) - magic_displayName:str = field(default="", metadata=config(field_name="sms:displayName")) - magic_required: str = field(default="sms:false", metadata=config(field_name="sms:required")) - magic_requiresDependency: list = field(default_factory=list, metadata=config(field_name="sms:requiresDependency")) - magic_requiresComponent: list = field(default_factory=list, metadata=config(field_name="sms:requiresComponent")) - magic_validationRules: list = field(default_factory=list, metadata=config(field_name="sms:validationRules")) + magic_subClassOf: list = field( + default_factory=list, metadata=config(field_name="rdfs:subClassOf") + ) + magic_range_includes: list = field( + default_factory=list, metadata=config(field_name="schema:rangeIncludes") + ) + magic_isPartOf: dict = field( + default_factory=dict, metadata=config(field_name="schema:isPartOf") + ) + magic_displayName: str = field( + default="", metadata=config(field_name="sms:displayName") + ) + magic_required: str = field( + default="sms:false", metadata=config(field_name="sms:required") + ) + magic_requiresDependency: list = field( + default_factory=list, metadata=config(field_name="sms:requiresDependency") + ) + magic_requiresComponent: list = field( + default_factory=list, metadata=config(field_name="sms:requiresComponent") + ) + magic_validationRules: list = field( + default_factory=list, metadata=config(field_name="sms:validationRules") + ) + class DataModelJsonLD(object): - ''' + """ #Interface to JSONLD_object - ''' + """ - def __init__(self, Graph: nx.MultiDiGraph, output_path:str = ''): + def __init__(self, Graph: nx.MultiDiGraph, output_path: str = ""): # Setup - self.graph = Graph # Graph would be fully made at this point. + self.graph = Graph # Graph would be fully made at this point. self.dmr = DataModelRelationships() self.rel_dict = self.dmr.relationships_dictionary self.DME = DataModelGraphExplorer(self.graph) @@ -80,7 +121,9 @@ def __init__(self, Graph: nx.MultiDiGraph, output_path:str = ''): class_template = ClassTemplate() self.class_template = json.loads(class_template.to_json()) - def get_edges_associated_with_node(self, node:str)->List[tuple[str,str,dict[str,int]]]: + def get_edges_associated_with_node( + self, node: str + ) -> List[tuple[str, str, dict[str, int]]]: """Retrieve all edges traveling in and out of a node. Args: node, str: Label of node in the graph to look for assiciated edges @@ -88,13 +131,13 @@ def get_edges_associated_with_node(self, node:str)->List[tuple[str,str,dict[str, node_edges, list: List of Tuples of edges associated with the given node, tuple contains the two nodes, plus the weight dict associated with the edge connection. """ node_edges = list(self.graph.in_edges(node, data=True)) - node_edges.extend(list(self.graph.out_edges(node,data=True))) + node_edges.extend(list(self.graph.out_edges(node, data=True))) return node_edges - def add_edge_rels_to_template(self, template: dict, rel_vals:dict, node:str): + def add_edge_rels_to_template(self, template: dict, rel_vals: dict, node: str): """ Args: - template, dict: single class or property JSONLD template that is in the process of being filled. + template, dict: single class or property JSONLD template that is in the process of being filled. rel_vals, dict: sub relationship dict for a given relationship (contains informtion like, 'edge_rel', 'jsonld_key' etc..) node, str: node whose edge information is presently being added to the JSONLD Returns: @@ -104,12 +147,11 @@ def add_edge_rels_to_template(self, template: dict, rel_vals:dict, node:str): # Get node pairs and weights for each edge for node_1, node_2, weight in node_edges: - # Retrieve the relationship(s) and related info between the two nodes node_edge_relationships = self.graph[node_1][node_2] # Get the relationship edge key - edge_key = rel_vals['edge_key'] + edge_key = rel_vals["edge_key"] # Check if edge_key is even one of the relationships for this node pair. if edge_key in node_edge_relationships: @@ -118,51 +160,65 @@ def add_edge_rels_to_template(self, template: dict, rel_vals:dict, node:str): # If the relationship defined and edge_key if relationship == edge_key: # TODO: rewrite to use edge_dir - if edge_key in ['domainIncludes', 'parentOf']: + if edge_key in ["domainIncludes", "parentOf"]: if node_2 == node: # Make sure the key is in the template (differs between properties and classes) - if rel_vals['jsonld_key'] in template.keys(): - node_1_id = {'@id': 'bts:'+node_1} + if rel_vals["jsonld_key"] in template.keys(): + node_1_id = {"@id": "bts:" + node_1} # TODO Move this to a helper function to clear up. - if (isinstance(template[rel_vals['jsonld_key']], list) and - node_1_id not in template[rel_vals['jsonld_key']]): - template[rel_vals['jsonld_key']].append(node_1_id) + if ( + isinstance( + template[rel_vals["jsonld_key"]], list + ) + and node_1_id + not in template[rel_vals["jsonld_key"]] + ): + template[rel_vals["jsonld_key"]].append( + node_1_id + ) else: - template[rel_vals['jsonld_key']] == node_1 + template[rel_vals["jsonld_key"]] == node_1 else: if node_1 == node: # Make sure the key is in the template (differs between properties and classes) - if rel_vals['jsonld_key'] in template.keys(): - node_2_id = {'@id': 'bts:'+node_2} + if rel_vals["jsonld_key"] in template.keys(): + node_2_id = {"@id": "bts:" + node_2} # TODO Move this to a helper function to clear up. - if (isinstance(template[rel_vals['jsonld_key']], list) and - node_2_id not in template[rel_vals['jsonld_key']]): - template[rel_vals['jsonld_key']].append(node_2_id) + if ( + isinstance( + template[rel_vals["jsonld_key"]], list + ) + and node_2_id + not in template[rel_vals["jsonld_key"]] + ): + template[rel_vals["jsonld_key"]].append( + node_2_id + ) else: - template[rel_vals['jsonld_key']] == node_2 + template[rel_vals["jsonld_key"]] == node_2 return template def add_node_info_to_template(self, template, rel_vals, node): - """ For a given node and relationship, add relevant value to template + """For a given node and relationship, add relevant value to template Args: - template, dict: single class or property JSONLD template that is in the process of being filled. + template, dict: single class or property JSONLD template that is in the process of being filled. rel_vals, dict: sub relationship dict for a given relationship (contains informtion like, 'edge_rel', 'jsonld_key' etc..) node, str: node whose information is presently being added to the JSONLD Returns: template, dict: single class or property JSONLD template that is in the process of being filled, and now has had additional node information added. """ # Get label for relationship used in the graph - node_label = rel_vals['node_label'] - + node_label = rel_vals["node_label"] + # Get recorded info for current node, and the attribute type node_info = nx.get_node_attributes(self.graph, node_label)[node] - + # Add this information to the template - template[rel_vals['jsonld_key']] = node_info + template[rel_vals["jsonld_key"]] = node_info return template - def fill_entry_template(self, template:dict, node:str)->dict: - """ Fill in a blank JSONLD template with information for each node. All relationships are filled from the graph, based on the type of information (node or edge) + def fill_entry_template(self, template: dict, node: str) -> dict: + """Fill in a blank JSONLD template with information for each node. All relationships are filled from the graph, based on the type of information (node or edge) Args: template, dict: empty class or property template to be filled with information for the given node. node, str: target node to fill the template out for. @@ -173,32 +229,39 @@ def fill_entry_template(self, template:dict, node:str)->dict: # For each field in template fill out with information from the graph for rel, rel_vals in data_model_relationships.items(): - - key_context, key_rel = strip_context(context_value=rel_vals['jsonld_key']) + key_context, key_rel = strip_context(context_value=rel_vals["jsonld_key"]) # Fill in the JSONLD template for this node, with data from the graph by looking up the nodes edge relationships, and the value information attached to the node. - + # Fill edge information (done per edge type) - if rel_vals['edge_rel']: - template = self.add_edge_rels_to_template(template=template, rel_vals=rel_vals, node=node) + if rel_vals["edge_rel"]: + template = self.add_edge_rels_to_template( + template=template, rel_vals=rel_vals, node=node + ) # Fill in node value information else: - template = self.add_node_info_to_template(template=template, rel_vals=rel_vals, node=node) - - + template = self.add_node_info_to_template( + template=template, rel_vals=rel_vals, node=node + ) + # Clean up template - template = self.clean_template(template=template, - data_model_relationships=data_model_relationships, - ) + template = self.clean_template( + template=template, + data_model_relationships=data_model_relationships, + ) # Reorder lists based on weights: - template = self.reorder_template_entries(template=template,) + template = self.reorder_template_entries( + template=template, + ) # Add contexts to certain values - template = self.add_contexts_to_entries(template=template,) + template = self.add_contexts_to_entries( + template=template, + ) return template - def add_contexts_to_entries(self, template:dict) -> dict: + def add_contexts_to_entries(self, template: dict) -> dict: """ Args: template, dict: JSONLD template that has been filled up to the current node, with information @@ -207,56 +270,68 @@ def add_contexts_to_entries(self, template:dict) -> dict: Note: This will likely need to be modified when Contexts are truly added to the model """ for jsonld_key, entry in template.items(): - # Retrieve the relationships key using the jsonld_key rel_key = [] - + for rel, rel_vals in self.rel_dict.items(): - if 'jsonld_key' in rel_vals and jsonld_key == rel_vals['jsonld_key']: + if "jsonld_key" in rel_vals and jsonld_key == rel_vals["jsonld_key"]: rel_key.append(rel) if rel_key: - rel_key=rel_key[0] + rel_key = rel_key[0] # If the current relationship can be defined with a 'node_attr_dict' - if 'node_attr_dict' in self.rel_dict[rel_key].keys(): + if "node_attr_dict" in self.rel_dict[rel_key].keys(): try: # if possible pull standard function to get node information - rel_func = self.rel_dict[rel_key]['node_attr_dict']['standard'] + rel_func = self.rel_dict[rel_key]["node_attr_dict"]["standard"] except: # if not pull default function to get node information - rel_func = self.rel_dict[rel_key]['node_attr_dict']['default'] + rel_func = self.rel_dict[rel_key]["node_attr_dict"]["default"] # Add appropritae contexts that have been removed in previous steps (for JSONLD) or did not exist to begin with (csv) - if rel_key == 'id' and rel_func == get_label_from_display_name and 'bts' not in str(template[jsonld_key]).lower(): - template[jsonld_key] = 'bts:' + template[jsonld_key] - elif rel_key == 'required' and rel_func == convert_bool_to_str and 'sms' not in str(template[jsonld_key]).lower(): - template[jsonld_key] = 'sms:' + str(template[jsonld_key]).lower() + if ( + rel_key == "id" + and rel_func == get_label_from_display_name + and "bts" not in str(template[jsonld_key]).lower() + ): + template[jsonld_key] = "bts:" + template[jsonld_key] + elif ( + rel_key == "required" + and rel_func == convert_bool_to_str + and "sms" not in str(template[jsonld_key]).lower() + ): + template[jsonld_key] = ( + "sms:" + str(template[jsonld_key]).lower() + ) return template def clean_template(self, template: dict, data_model_relationships: dict) -> dict: - '''Get rid of empty k:v pairs. Fill with a default if specified in the relationships dictionary. + """Get rid of empty k:v pairs. Fill with a default if specified in the relationships dictionary. Args: template, dict: JSONLD template for a single entry, keys specified in property and class templates. data_model_relationships, dict: dictionary containing information for each relationship type supported. Returns: template: JSONLD template where unfilled entries have been removed, or filled with default depending on specifications in the relationships dictionary. - ''' + """ for rels in data_model_relationships.values(): # Get the current relationships, jsonld key - relationship_jsonld_key = rels['jsonld_key'] + relationship_jsonld_key = rels["jsonld_key"] # Check if the relationship_relationship_key is part of the template, and if it is, look to see if it has an entry - if relationship_jsonld_key in template.keys() and not template[rels['jsonld_key']]: + if ( + relationship_jsonld_key in template.keys() + and not template[rels["jsonld_key"]] + ): # If there is no value recorded, fill out the template with the default relationship value (if recorded.) - if 'jsonld_default' in rels.keys(): - template[relationship_jsonld_key] = rels['jsonld_default'] + if "jsonld_default" in rels.keys(): + template[relationship_jsonld_key] = rels["jsonld_default"] else: # If there is no default specified in the relationships dictionary, delete the empty value from the template. del template[relationship_jsonld_key] return template - def reorder_template_entries(self, template:dict) -> dict: - '''In JSONLD some classes or property keys have list values. We want to make sure these lists are ordered according to the order supplied by the user. + def reorder_template_entries(self, template: dict) -> dict: + """In JSONLD some classes or property keys have list values. We want to make sure these lists are ordered according to the order supplied by the user. This will look specically in lists and reorder those. Args: template, dict: JSONLD template for a single entry, keys specified in property and class templates. @@ -264,58 +339,71 @@ def reorder_template_entries(self, template:dict) -> dict: template, dict: list entries re-ordered to match user supplied order. Note: User order only matters for nodes that are also attributes - ''' - template_label = template['rdfs:label'] + """ + template_label = template["rdfs:label"] for jsonld_key, entry in template.items(): # Make sure dealing with an edge relationship: - is_edge = ['True' for rel_key, rel_vals in self.rel_dict.items() if rel_vals['jsonld_key']==jsonld_key if rel_vals['edge_rel'] == True] - - #if the entry is of type list and theres more than one value in the list attempt to reorder - if is_edge and isinstance(entry, list) and len(entry)>1: + is_edge = [ + "True" + for rel_key, rel_vals in self.rel_dict.items() + if rel_vals["jsonld_key"] == jsonld_key + if rel_vals["edge_rel"] == True + ] + + # if the entry is of type list and theres more than one value in the list attempt to reorder + if is_edge and isinstance(entry, list) and len(entry) > 1: # Get edge key from data_model_relationships using the jsonld_key: - key, edge_key = [(rel_key, rel_vals['edge_key']) for rel_key, rel_vals in self.rel_dict.items() if jsonld_key == rel_vals['jsonld_key']][0] + key, edge_key = [ + (rel_key, rel_vals["edge_key"]) + for rel_key, rel_vals in self.rel_dict.items() + if jsonld_key == rel_vals["jsonld_key"] + ][0] # Order edges - sorted_edges = self.DME.get_ordered_entry(key=key, source_node_label=template_label) - edge_weights_dict={edge:i for i, edge in enumerate(sorted_edges)} - ordered_edges = [0]*len(edge_weights_dict.keys()) - for edge,normalized_weight in edge_weights_dict.items(): - ordered_edges[normalized_weight] = {'@id': 'bts:' + edge} - + sorted_edges = self.DME.get_ordered_entry( + key=key, source_node_label=template_label + ) + edge_weights_dict = {edge: i for i, edge in enumerate(sorted_edges)} + ordered_edges = [0] * len(edge_weights_dict.keys()) + for edge, normalized_weight in edge_weights_dict.items(): + ordered_edges[normalized_weight] = {"@id": "bts:" + edge} + # Throw an error if ordered_edges does not get fully filled as expected. if 0 in ordered_edges: - logger.error("There was an issue getting values to match order specified in the data model, please submit a help request.") + logger.error( + "There was an issue getting values to match order specified in the data model, please submit a help request." + ) template[jsonld_key] = ordered_edges return template def generate_jsonld_object(self): - '''Create the JSONLD object. + """Create the JSONLD object. Returns: jsonld_object, dict: JSONLD object containing all nodes and related information - ''' + """ # Get properties. properties = self.DME.find_properties() # Get JSONLD Template json_ld_template = self.base_jsonld_template - + # Iterativly add graph nodes to json_ld_template as properties or classes for node in self.graph.nodes: if node in properties: # Get property template property_template = copy.deepcopy(self.property_template) - obj = self.fill_entry_template(template = property_template, node = node) + obj = self.fill_entry_template(template=property_template, node=node) else: # Get class template class_template = copy.deepcopy(self.class_template) - obj = self.fill_entry_template(template = class_template, node = node) - json_ld_template['@graph'].append(obj) + obj = self.fill_entry_template(template=class_template, node=node) + json_ld_template["@graph"].append(obj) return json_ld_template + def convert_graph_to_jsonld(Graph): # Make the JSONLD object data_model_jsonld_converter = DataModelJsonLD(Graph=Graph) jsonld_dm = data_model_jsonld_converter.generate_jsonld_object() return jsonld_dm - From 8f35606dbd01587034ec95173a62f335ccf5ef6c Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 18 Oct 2023 21:01:01 -0700 Subject: [PATCH 191/239] run black on schematic/schemas/data_model_nodes.py --- schematic/schemas/data_model_nodes.py | 154 +++++++++++++++++--------- 1 file changed, 101 insertions(+), 53 deletions(-) diff --git a/schematic/schemas/data_model_nodes.py b/schematic/schemas/data_model_nodes.py index ae162dda4..a1681c469 100644 --- a/schematic/schemas/data_model_nodes.py +++ b/schematic/schemas/data_model_nodes.py @@ -3,24 +3,35 @@ from rdflib import Namespace from typing import Any, Dict, Optional, Text, List, Callable -from schematic.schemas.data_model_relationships import ( - DataModelRelationships - ) +from schematic.schemas.data_model_relationships import DataModelRelationships -from schematic.utils.schema_utils import get_label_from_display_name, get_attribute_display_name_from_label, convert_bool_to_str, parse_validation_rules +from schematic.utils.schema_utils import ( + get_label_from_display_name, + get_attribute_display_name_from_label, + convert_bool_to_str, + parse_validation_rules, +) from schematic.utils.validate_rules_utils import validate_schema_rules from schematic.schemas.curie import uri2curie, curie2uri -class DataModelNodes(): +class DataModelNodes: def __init__(self, attribute_relationships_dict): - self.namespaces = dict(rdf=Namespace("http://www.w3.org/1999/02/22-rdf-syntax-ns#")) + self.namespaces = dict( + rdf=Namespace("http://www.w3.org/1999/02/22-rdf-syntax-ns#") + ) self.data_model_relationships = DataModelRelationships() - self.value_relationships = self.data_model_relationships.retreive_rel_headers_dict(edge=False) - self.edge_relationships_dictionary = self.data_model_relationships.retreive_rel_headers_dict(edge=True) - self.properties = self.get_data_model_properties(attr_rel_dict=attribute_relationships_dict) + self.value_relationships = ( + self.data_model_relationships.retreive_rel_headers_dict(edge=False) + ) + self.edge_relationships_dictionary = ( + self.data_model_relationships.retreive_rel_headers_dict(edge=True) + ) + self.properties = self.get_data_model_properties( + attr_rel_dict=attribute_relationships_dict + ) # retrieve a list of relationship types that will produce nodes. - self.node_relationships =list(self.edge_relationships_dictionary.values()) + self.node_relationships = list(self.edge_relationships_dictionary.values()) def gather_nodes(self, attr_info: tuple) -> list: """Take in a tuple containing attriute name and relationship dictionary, and find all nodes defined in attribute information. @@ -34,18 +45,17 @@ def gather_nodes(self, attr_info: tuple) -> list: # Extract attribute and relationship dictionary attribute, relationship = attr_info - relationships = relationship['Relationships'] + relationships = relationship["Relationships"] nodes = [] if attribute not in nodes: nodes.append(attribute) for rel in self.node_relationships: if rel in relationships.keys(): - nodes.extend([node.strip() - for node in relationships[rel]]) + nodes.extend([node.strip() for node in relationships[rel]]) return nodes - def gather_all_nodes_in_model(self, attr_rel_dict: dict)->list: + def gather_all_nodes_in_model(self, attr_rel_dict: dict) -> list: """Gather all nodes in the data model, in order. Args: attr_rel_dict, dict: generated in data_model_parser @@ -74,11 +84,11 @@ def get_rel_node_dict_info(self, relationship: str) -> Optional[tuple[str, dict] rel_node_dict, dict: node_attr_dict, from relationships dictionary for a given relationship TODO: Move to data_model_relationships. """ - for k,v in self.data_model_relationships.relationships_dictionary.items(): + for k, v in self.data_model_relationships.relationships_dictionary.items(): if k == relationship: - if 'node_attr_dict' in v.keys(): - rel_key = v['node_label'] - rel_node_dict = v['node_attr_dict'] + if "node_attr_dict" in v.keys(): + rel_key = v["node_label"] + rel_node_dict = v["node_attr_dict"] return rel_key, rel_node_dict def get_data_model_properties(self, attr_rel_dict: dict) -> list: @@ -91,14 +101,14 @@ def get_data_model_properties(self, attr_rel_dict: dict) -> list: Returns: properties,list: properties defined in the data model """ - properties=[] + properties = [] for attribute, relationships in attr_rel_dict.items(): - if 'Properties' in relationships['Relationships'].keys(): - properties.extend(relationships['Relationships']['Properties']) + if "Properties" in relationships["Relationships"].keys(): + properties.extend(relationships["Relationships"]["Properties"]) properties = list(set(properties)) return properties - def get_entry_type(self, node_display_name:str) -> str: + def get_entry_type(self, node_display_name: str) -> str: """Get the entry type of the node, property or class. Args: node_display_name, str: display name of target node. @@ -106,50 +116,64 @@ def get_entry_type(self, node_display_name:str) -> str: entry_type, str: returns 'property' or 'class' based on data model specifications. """ if node_display_name in self.properties: - entry_type = 'property' + entry_type = "property" else: - entry_type = 'class' + entry_type = "class" return entry_type - def run_rel_functions(self, rel_func:callable, node_display_name:str='', key:str='', attr_relationships={}, csv_header='', entry_type=''): - ''' This function exists to centralzie handling of functions for filling out node information, makes sure all the proper parameters are passed to each function. + def run_rel_functions( + self, + rel_func: callable, + node_display_name: str = "", + key: str = "", + attr_relationships={}, + csv_header="", + entry_type="", + ): + """This function exists to centralzie handling of functions for filling out node information, makes sure all the proper parameters are passed to each function. Args: rel_func, callable: Function to call to get information to attach to the node node_display_name, str: node display name key, str: relationship key attr_relationships, dict: relationships portion of attributes_relationships dictionary csv_header, str: csv header - entry_type, str: 'class' or 'property' defines how + entry_type, str: 'class' or 'property' defines how Returns: Outputs of specified rel_func (relationship function) - + For legacy: elif key == 'id' and rel_func == get_label_from_display_name: func_output = get_label_from_display_name(display_name =node_display_name, entry_type=entry_type) - ''' + """ if rel_func == get_attribute_display_name_from_label: - return get_attribute_display_name_from_label(node_display_name, attr_relationships) - + return get_attribute_display_name_from_label( + node_display_name, attr_relationships + ) + elif rel_func == parse_validation_rules: return parse_validation_rules(attr_relationships[csv_header]) elif rel_func == get_label_from_display_name: - return get_label_from_display_name(display_name =node_display_name, entry_type=entry_type) - + return get_label_from_display_name( + display_name=node_display_name, entry_type=entry_type + ) + elif rel_func == convert_bool_to_str: if type(attr_relationships[csv_header]) == str: - if attr_relationships[csv_header].lower() == 'true': + if attr_relationships[csv_header].lower() == "true": return True - elif attr_relationships[csv_header].lower() == 'false': + elif attr_relationships[csv_header].lower() == "false": return False - + elif type(attr_relationships[csv_header]) == bool: return attr_relationships[csv_header] - + else: # Raise Error if the rel_func provided is not captured. - raise ValueError(f"The function provided ({rel_func}) to define the relationship {key} is not captured in the function run_rel_functions, please update.") + raise ValueError( + f"The function provided ({rel_func}) to define the relationship {key} is not captured in the function run_rel_functions, please update." + ) def generate_node_dict(self, node_display_name: str, attr_rel_dict: dict) -> dict: """Gather information to be attached to each node. @@ -173,38 +197,63 @@ def generate_node_dict(self, node_display_name: str, attr_rel_dict: dict) -> dic # Determine if property or class entry_type = self.get_entry_type(node_display_name=node_display_name) - + # If the node is an attribute, find its relationships. attr_relationships = {} if node_display_name in attr_rel_dict.keys(): - attr_relationships = attr_rel_dict[node_display_name]['Relationships'] - + attr_relationships = attr_rel_dict[node_display_name]["Relationships"] + # Initialize node_dict node_dict = {} # Look through relationship types that represent values (i.e. do not define edges) for key, csv_header in self.value_relationships.items(): - # Get key and defalt values current relationship type. rel_key, rel_node_dict = self.get_rel_node_dict_info(key) # If we have information to add about this particular node, get it if csv_header in attr_relationships.keys(): # Check if the 'standard' specifies calling a function. - if 'standard' in rel_node_dict.keys() and isfunction(rel_node_dict['standard']): - # Add to node_dict The value comes from the standard function call. - node_dict.update({rel_key: self.run_rel_functions(rel_node_dict['standard'], node_display_name=node_display_name, key=key, attr_relationships=attr_relationships, csv_header=csv_header, entry_type=entry_type)}) + if "standard" in rel_node_dict.keys() and isfunction( + rel_node_dict["standard"] + ): + # Add to node_dict The value comes from the standard function call. + node_dict.update( + { + rel_key: self.run_rel_functions( + rel_node_dict["standard"], + node_display_name=node_display_name, + key=key, + attr_relationships=attr_relationships, + csv_header=csv_header, + entry_type=entry_type, + ) + } + ) else: # For standard entries, get information from attr_relationship dictionary node_dict.update({rel_key: attr_relationships[csv_header]}) # else, add default values - else: + else: # Check if the default specifies calling a function. - if 'default' in rel_node_dict.keys() and isfunction(rel_node_dict['default']): - node_dict.update({rel_key: self.run_rel_functions(rel_node_dict['default'], node_display_name=node_display_name, key=key, attr_relationships=attr_relationships, csv_header=csv_header, entry_type=entry_type)}) + if "default" in rel_node_dict.keys() and isfunction( + rel_node_dict["default"] + ): + node_dict.update( + { + rel_key: self.run_rel_functions( + rel_node_dict["default"], + node_display_name=node_display_name, + key=key, + attr_relationships=attr_relationships, + csv_header=csv_header, + entry_type=entry_type, + ) + } + ) else: # Set value to defaults. - node_dict.update({rel_key: rel_node_dict['default']}) + node_dict.update({rel_key: rel_node_dict["default"]}) return node_dict @@ -216,10 +265,9 @@ def generate_node(self, G: nx.MultiDiGraph, node_dict: dict) -> nx.MultiDiGraph: Returns: G, nx.MultiDigraph: networkx multidigraph object, that has had an additional node added to it. """ - G.add_node(node_dict['label'], **node_dict) + G.add_node(node_dict["label"], **node_dict) return G def edit_node(self): - """Stub for future node editor. - """ - return \ No newline at end of file + """Stub for future node editor.""" + return From ef6252abfc2b76ba04bd7b40a52d68d9133edae0 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 18 Oct 2023 21:01:52 -0700 Subject: [PATCH 192/239] run black on schematic/schemas/data_model_parser.py --- schematic/schemas/data_model_parser.py | 189 ++++++++++++++----------- 1 file changed, 106 insertions(+), 83 deletions(-) diff --git a/schematic/schemas/data_model_parser.py b/schematic/schemas/data_model_parser.py index 60c2a49d7..2134dc7f1 100644 --- a/schematic/schemas/data_model_parser.py +++ b/schematic/schemas/data_model_parser.py @@ -7,24 +7,24 @@ from schematic.utils.io_utils import load_json from schematic.utils.schema_utils import attr_dict_template -from schematic.schemas.data_model_relationships import ( - DataModelRelationships - ) +from schematic.schemas.data_model_relationships import DataModelRelationships from schematic import LOADER logger = logging.getLogger("Synapse storage") -class DataModelParser(): - ''' + +class DataModelParser: + """ This class takes in a path to a data model and will convert it to an - attributes:relationship dictionarythat can then be further converted into a graph data model. + attributes:relationship dictionarythat can then be further converted into a graph data model. Other data model types may be added in the future. - ''' + """ + def __init__( self, path_to_data_model: str, - ) -> None: + ) -> None: """ Args: path_to_data_model, str: path to data model. @@ -44,29 +44,31 @@ def _get_base_schema_path(self, base_schema: str = None) -> str: base_schema_path: Path to base schema based on provided argument. """ biothings_schema_path = LOADER.filename("data_models/biothings.model.jsonld") - self.base_schema_path = biothings_schema_path if base_schema is None else base_schema + self.base_schema_path = ( + biothings_schema_path if base_schema is None else base_schema + ) return self.base_schema_path def get_model_type(self) -> str: - '''Parses the path to the data model to extract the extension and determine the data model type. + """Parses the path to the data model to extract the extension and determine the data model type. Args: path_to_data_model, str: path to data model Returns: str: uppercase, data model file extension. Note: Consider moving this to Utils. - ''' - return pathlib.Path(self.path_to_data_model).suffix.replace('.', '').upper() + """ + return pathlib.Path(self.path_to_data_model).suffix.replace(".", "").upper() - def parse_base_model(self)-> Dict: - '''Parse base data model that new model could be built upon. + def parse_base_model(self) -> Dict: + """Parse base data model that new model could be built upon. Returns: base_model, dict: {Attribute Display Name: { Relationships: { CSV Header: Value}}} Note: Not configured yet to successfully parse biothings. - ''' + """ # Determine base schema path base_model_path = self._get_base_schema_path(self.base_schema_path) @@ -76,8 +78,8 @@ def parse_base_model(self)-> Dict: base_model = jsonld_parser.parse_jsonld_model(base_model_path) return base_model - def parse_model(self)->Dict[str, dict[str, Any]]: - '''Given a data model type, instantiate and call the appropriate data model parser. + def parse_model(self) -> Dict[str, dict[str, Any]]: + """Given a data model type, instantiate and call the appropriate data model parser. Returns: model_dict, dict: {Attribute Display Name: { @@ -86,35 +88,42 @@ def parse_model(self)->Dict[str, dict[str, Any]]: Raises: Value Error if an incorrect model type is passed. Note: in future will add base model parsing in this step too and extend new model off base model. - ''' - #base_model = self.parse_base_model() + """ + # base_model = self.parse_base_model() # Call appropriate data model parser and return parsed model. - if self.model_type == 'CSV': + if self.model_type == "CSV": csv_parser = DataModelCSVParser() model_dict = csv_parser.parse_csv_model(self.path_to_data_model) - elif self.model_type == 'JSONLD': + elif self.model_type == "JSONLD": jsonld_parser = DataModelJSONLDParser() model_dict = jsonld_parser.parse_jsonld_model(self.path_to_data_model) else: - raise ValueError(f"Schematic only accepts models of type CSV or JSONLD, you provided a model type {self.model_type}, please resubmit in the proper format.") + raise ValueError( + f"Schematic only accepts models of type CSV or JSONLD, you provided a model type {self.model_type}, please resubmit in the proper format." + ) return model_dict - -class DataModelCSVParser(): - def __init__( - self - ): + + +class DataModelCSVParser: + def __init__(self): # Instantiate DataModelRelationships self.dmr = DataModelRelationships() # Load relationships dictionary. self.rel_dict = self.dmr.define_data_model_relationships() # Get edge relationships - self.edge_relationships_dictionary = self.dmr.retreive_rel_headers_dict(edge=True) + self.edge_relationships_dictionary = self.dmr.retreive_rel_headers_dict( + edge=True + ) # Load required csv headers self.required_headers = self.dmr.define_required_csv_headers() # Get the type for each value that needs to be submitted. # using csv_headers as keys to match required_headers/relationship_types - self.rel_val_types = {v['csv_header']:v['type']for k, v in self.rel_dict.items() if 'type' in v.keys()} + self.rel_val_types = { + v["csv_header"]: v["type"] + for k, v in self.rel_dict.items() + if "type" in v.keys() + } def check_schema_definition(self, model_df: pd.DataFrame) -> bool: """Checks if a schema definition data frame contains the right required headers. @@ -140,14 +149,14 @@ def check_schema_definition(self, model_df: pd.DataFrame) -> bool: ) return - def parse_entry(self, attr:dict, relationship:str)->Any: - '''Parse attr entry baed on type + def parse_entry(self, attr: dict, relationship: str) -> Any: + """Parse attr entry baed on type Args: attr, dict: single row of a csv model in dict form, where only the required headers are keys. Values are the entries under each header. relationship, str: one of the header relationships to parse the entry of. Returns: parsed_rel_entry, any: parsed entry for downstream processing based on the entry type. - ''' + """ rel_val_type = self.rel_val_types[relationship] # Parse entry based on type: @@ -156,59 +165,66 @@ def parse_entry(self, attr:dict, relationship:str)->Any: parsed_rel_entry = attr[relationship] # Move strings to list if they are comma separated. Schema order is preserved. elif rel_val_type == list: - parsed_rel_entry = attr[relationship].strip().split(',') + parsed_rel_entry = attr[relationship].strip().split(",") parsed_rel_entry = [r.strip() for r in parsed_rel_entry] # Convert value string if dictated by rel_val_type, strip whitespace. elif rel_val_type == str: parsed_rel_entry = str(attr[relationship]).strip() else: - raise ValueError("The value type recorded for this relationship, is not currently supported for CSV parsing. Please check with your DCC.") + raise ValueError( + "The value type recorded for this relationship, is not currently supported for CSV parsing. Please check with your DCC." + ) return parsed_rel_entry - def gather_csv_attributes_relationships(self, model_df: pd.DataFrame) -> Dict[str, dict[str, Any]]: - '''Parse csv into a attributes:relationshps dictionary to be used in downstream efforts. + def gather_csv_attributes_relationships( + self, model_df: pd.DataFrame + ) -> Dict[str, dict[str, Any]]: + """Parse csv into a attributes:relationshps dictionary to be used in downstream efforts. Args: model_df: pd.DataFrame, data model that has been loaded into pandas DataFrame. Returns: - attr_rel_dictionary: dict, + attr_rel_dictionary: dict, {Attribute Display Name: { Relationships: { CSV Header: Value}}} - ''' + """ # Check csv schema follows expectations. self.check_schema_definition(model_df) # get attributes from Attribute column attributes = model_df[list(self.required_headers)].to_dict("records") - + # Build attribute/relationship dictionary relationship_types = self.required_headers attr_rel_dictionary = {} for attr in attributes: - attribute_name=attr['Attribute'] - # Add attribute to dictionary + attribute_name = attr["Attribute"] + # Add attribute to dictionary attr_rel_dictionary.update(attr_dict_template(attribute_name)) # Fill in relationship info for each attribute. for relationship in relationship_types: if not pd.isnull(attr[relationship]): - parsed_rel_entry = self.parse_entry(attr=attr, relationship=relationship) - attr_rel_dictionary[attribute_name]['Relationships'].update({relationship:parsed_rel_entry}) + parsed_rel_entry = self.parse_entry( + attr=attr, relationship=relationship + ) + attr_rel_dictionary[attribute_name]["Relationships"].update( + {relationship: parsed_rel_entry} + ) return attr_rel_dictionary - def parse_csv_model( self, path_to_data_model: str, - ): - '''Load csv data model and parse into an attributes:relationships dictionary + ): + """Load csv data model and parse into an attributes:relationships dictionary Args: path_to_data_model, str: path to data model Returns: model_dict, dict:{Attribute Display Name: { Relationships: { CSV Header: Value}}} - ''' + """ # Load the csv data model to DF model_df = load_df(path_to_data_model, data_model=True) @@ -217,16 +233,17 @@ def parse_csv_model( return model_dict -class DataModelJSONLDParser(): + +class DataModelJSONLDParser: def __init__( self, - ): + ): # Instantiate DataModelRelationships self.dmr = DataModelRelationships() # Load relationships dictionary. self.rel_dict = self.dmr.define_data_model_relationships() - def parse_entry(self, rel_entry:any, id_jsonld_key:str)->Any: + def parse_entry(self, rel_entry: any, id_jsonld_key: str) -> Any: """Parse an input entry based on certain attributes Args: rel_entry: Given a single entry and relationship in a JSONLD data model, the recorded value @@ -236,19 +253,19 @@ def parse_entry(self, rel_entry:any, id_jsonld_key:str)->Any: """ # Retrieve ID from single value dictionary if type(rel_entry) == dict and len(rel_entry.keys()) == 1: - parsed_rel_entry = rel_entry['@id'] + parsed_rel_entry = rel_entry["@id"] # Parse list of dictionaries to make a list of entries with context stripped (will update this section when contexts added.) - elif type(rel_entry)==list and type(rel_entry[0]) == dict: - parsed_rel_entry = [r[id_jsonld_key].split(':')[1] for r in rel_entry] + elif type(rel_entry) == list and type(rel_entry[0]) == dict: + parsed_rel_entry = [r[id_jsonld_key].split(":")[1] for r in rel_entry] # Strip context from string and convert true/false to bool elif type(rel_entry) == str: # Remove contexts and treat strings as appropriate. - if ':' in rel_entry and 'http:' not in rel_entry: - parsed_rel_entry = rel_entry.split(':')[1] + if ":" in rel_entry and "http:" not in rel_entry: + parsed_rel_entry = rel_entry.split(":")[1] # Convert true/false strings to boolean - if parsed_rel_entry.lower() =='true': + if parsed_rel_entry.lower() == "true": parsed_rel_entry = True - elif parsed_rel_entry.lower == 'false': + elif parsed_rel_entry.lower == "false": parsed_rel_entry = False else: parsed_rel_entry = rel_entry @@ -256,15 +273,14 @@ def parse_entry(self, rel_entry:any, id_jsonld_key:str)->Any: else: parsed_rel_entry = rel_entry return parsed_rel_entry - def gather_jsonld_attributes_relationships( - self, - model_jsonld: List[dict]) -> Dict: - ''' + + def gather_jsonld_attributes_relationships(self, model_jsonld: List[dict]) -> Dict: + """ Args: model_jsonld: list of dictionaries, each dictionary is an entry in the jsonld data model Returns: attr_rel_dictionary: dict, - {Node Display Name: + {Node Display Name: {Relationships: { CSV Header: Value}}} Notes: @@ -272,22 +288,22 @@ def gather_jsonld_attributes_relationships( So the attributes:relationship dictionary for importing a CSV vs JSONLD may not match. - It is also just about impossible to extract attributes explicitly. Using a dictionary should avoid duplications. - This is a promiscuous capture and will create an attribute for each model entry. - - Currently only designed to capture the same information that would be encoded in CSV, + - Currently only designed to capture the same information that would be encoded in CSV, can be updated in the future. - TODO: + TODO: - Find a way to delete non-attribute keys, is there a way to reliable distinguish after the fact? - Right now, here we are stripping contexts, will need to track them in the future. - ''' - + """ + # Retrieve relevant JSONLD keys. - jsonld_keys_to_extract = ['label', 'subClassOf', 'id', 'displayName'] - label_jsonld_key, subclassof_jsonld_key, id_jsonld_key, dn_jsonld_key = [self.rel_dict[key]['jsonld_key'] - for key in jsonld_keys_to_extract ] + jsonld_keys_to_extract = ["label", "subClassOf", "id", "displayName"] + label_jsonld_key, subclassof_jsonld_key, id_jsonld_key, dn_jsonld_key = [ + self.rel_dict[key]["jsonld_key"] for key in jsonld_keys_to_extract + ] # Build the attr_rel_dictionary attr_rel_dictionary = {} # Move through each entry in the jsonld model for entry in model_jsonld: - # Get the attr key for the dictionary if dn_jsonld_key in entry: # The attr_key is the entry display name if one was recorded @@ -304,35 +320,42 @@ def gather_jsonld_attributes_relationships( # Go through each defined relationship type (rel_key) and its attributes (rel_vals) for rel_key, rel_vals in self.rel_dict.items(): # Determine if current entry in the for loop, can be described by the current relationship that is being cycled through. - if rel_vals['jsonld_key'] in entry.keys() and 'csv_header' in rel_vals.keys(): + if ( + rel_vals["jsonld_key"] in entry.keys() + and "csv_header" in rel_vals.keys() + ): # Retrieve entry value associated with the given relationship - rel_entry = entry[rel_vals['jsonld_key']] + rel_entry = entry[rel_vals["jsonld_key"]] # If there is an entry parset it by type and add to the attr:relationships dictionary. if rel_entry: - parsed_rel_entry = self.parse_entry(rel_entry=rel_entry, id_jsonld_key=id_jsonld_key) + parsed_rel_entry = self.parse_entry( + rel_entry=rel_entry, id_jsonld_key=id_jsonld_key + ) # Add relationships for each attribute and relationship to the dictionary - attr_rel_dictionary[ - attr_key]['Relationships'].update( - {self.rel_dict[rel_key]['csv_header']: parsed_rel_entry}) + attr_rel_dictionary[attr_key]["Relationships"].update( + {self.rel_dict[rel_key]["csv_header"]: parsed_rel_entry} + ) return attr_rel_dictionary def parse_jsonld_model( self, - path_to_data_model:str, - ): - '''Convert raw JSONLD data model to attributes relationship dictionary. + path_to_data_model: str, + ): + """Convert raw JSONLD data model to attributes relationship dictionary. Args: path_to_data_model: str, path to JSONLD data model Returns: model_dict: dict, - {Node Display Name: + {Node Display Name: {Relationships: { CSV Header: Value}}} - ''' + """ # Log warning that JSONLD parsing is in beta mode. - logger.warning('JSONLD parsing is in Beta Mode. Please inspect outputs carefully and report any errors.') + logger.warning( + "JSONLD parsing is in Beta Mode. Please inspect outputs carefully and report any errors." + ) # Load the json_ld model to df json_load = load_json(path_to_data_model) # Convert dataframe to attributes relationship dictionary. - model_dict = self.gather_jsonld_attributes_relationships(json_load['@graph']) + model_dict = self.gather_jsonld_attributes_relationships(json_load["@graph"]) return model_dict From 16a2b310a90b317912cd48104a44367a9ddacc0b Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 18 Oct 2023 21:03:46 -0700 Subject: [PATCH 193/239] run black on schematic/schemas/data_model_validator.py --- schematic/schemas/data_model_validator.py | 264 ++++++++++++---------- 1 file changed, 142 insertions(+), 122 deletions(-) diff --git a/schematic/schemas/data_model_validator.py b/schematic/schemas/data_model_validator.py index a83944627..40911e6a9 100644 --- a/schematic/schemas/data_model_validator.py +++ b/schematic/schemas/data_model_validator.py @@ -1,136 +1,156 @@ import networkx as nx -from typing import Any, Dict, Optional, Text, List,Tuple +from typing import Any, Dict, Optional, Text, List, Tuple -from schematic.schemas.data_model_relationships import ( - DataModelRelationships - ) +from schematic.schemas.data_model_relationships import DataModelRelationships -class DataModelValidator(): - ''' - Check for consistency within data model. - ''' - def __init__( - self, - graph: nx.MultiDiGraph, - ): - ''' - Args: - graph, nx.MultiDiGraph: Graph representation of the data model. - TODO: put blacklisted chars and reserved_names in some global space where they can be accessed centrally - ''' - self.graph = graph - self.DMR = DataModelRelationships() - # Define blacklisted characters, taken from store.synapse - self.blacklisted_chars = ['(', ')', '.', '-'] - # Define reserved_names, taken from Documentation - self.reserved_names = {'entityId'} - def run_checks(self)->Tuple[list,list]: - """ Run all validation checks on the data model graph. - Returns, tuple(list, list): Returns a tuple of errors and warnings generated. - TODO: In future could design a way for groups to customize tests run for their groups, run additional tests, or move some to issuing only warnings, vice versa. - """ - error_checks = [ - self.check_graph_has_required_node_fields(), - self.check_is_dag(), - self.check_reserved_names() - ] - warning_checks = [ - self.check_blacklisted_characters(), - ] - errors = [error for error in error_checks if error] - warnings = [warning for warning in warning_checks if warning] - return errors, warnings +class DataModelValidator: + """ + Check for consistency within data model. + """ - def check_graph_has_required_node_fields(self)-> List[str]: - '''Checks that the graph has the required node fields for all nodes. - Returns: - error, list: List of error messages for each missing field. - ''' - # Get all the fields that should be recorded per node - rel_dict = self.DMR.relationships_dictionary - node_fields = [] - for k, v in rel_dict.items(): - if 'node_label' in v.keys(): - node_fields.append(v['node_label']) + def __init__( + self, + graph: nx.MultiDiGraph, + ): + """ + Args: + graph, nx.MultiDiGraph: Graph representation of the data model. + TODO: put blacklisted chars and reserved_names in some global space where they can be accessed centrally + """ + self.graph = graph + self.DMR = DataModelRelationships() + # Define blacklisted characters, taken from store.synapse + self.blacklisted_chars = ["(", ")", ".", "-"] + # Define reserved_names, taken from Documentation + self.reserved_names = {"entityId"} - error = [] - missing_fields = [] - # Check that required fields are present for each node. - for node, node_dict in self.graph.nodes(data=True): - missing_fields.extend([(node, f) for f in node_fields if f not in node_dict.keys()]) + def run_checks(self) -> Tuple[list, list]: + """Run all validation checks on the data model graph. + Returns, tuple(list, list): Returns a tuple of errors and warnings generated. + TODO: In future could design a way for groups to customize tests run for their groups, run additional tests, or move some to issuing only warnings, vice versa. + """ + error_checks = [ + self.check_graph_has_required_node_fields(), + self.check_is_dag(), + self.check_reserved_names(), + ] + warning_checks = [ + self.check_blacklisted_characters(), + ] + errors = [error for error in error_checks if error] + warnings = [warning for warning in warning_checks if warning] + return errors, warnings - if missing_fields: - for mf in missing_fields: - error.append(f'For entry: {mf[0]}, the required field {mf[1]} is missing in the data model graph, please double check your model and generate the graph again.') - return error + def check_graph_has_required_node_fields(self) -> List[str]: + """Checks that the graph has the required node fields for all nodes. + Returns: + error, list: List of error messages for each missing field. + """ + # Get all the fields that should be recorded per node + rel_dict = self.DMR.relationships_dictionary + node_fields = [] + for k, v in rel_dict.items(): + if "node_label" in v.keys(): + node_fields.append(v["node_label"]) - def check_is_dag(self) -> List[str]: - '''Check that generated graph is a directed acyclic graph - Returns: - error, list: List of error messages if graph is not a DAG. List will include a message for each cycle found, if not there is a more generic message for the graph as a whole. - ''' - error = [] - if not nx.is_directed_acyclic_graph(self.graph): - # Attempt to find any cycles: - cycles = nx.simple_cycles(self.graph) - if cycles: - for cycle in cycles: - error.append(f'Schematic requires models be a directed acyclic graph (DAG). Your graph is not a DAG, we found a loop between: {cycle[0]} and {cycle[1]}, please remove this loop from your model and submit again.') - else: - error.append(f'Schematic requires models be a directed acyclic graph (DAG). Your graph is not a DAG, we could not locate the sorce of the error, please inspect your model.') - return error + error = [] + missing_fields = [] + # Check that required fields are present for each node. + for node, node_dict in self.graph.nodes(data=True): + missing_fields.extend( + [(node, f) for f in node_fields if f not in node_dict.keys()] + ) - def check_blacklisted_characters(self) -> List[str]: - """ We strip these characters in store, so not sure if it matter if we have them now, maybe add warning - Returns: - warning, list: list of warnings for each node in the graph, that has a Display name that contains blacklisted characters. - """ - warning = [] - for node, node_dict in self.graph.nodes(data=True): - if any(bl_char in node_dict['displayName'] for bl_char in self.blacklisted_chars): - node_display_name = node_dict['displayName'] - blacklisted_characters_found = [bl_char for bl_char in self.blacklisted_chars if bl_char in node_dict['displayName'] ] - blacklisted_characters_str= ','.join(blacklisted_characters_found) - warning.append(f'Node: {node_display_name} contains a blacklisted character(s): {blacklisted_characters_str}, they will be striped if used in Synapse annotations.') - return warning + if missing_fields: + for mf in missing_fields: + error.append( + f"For entry: {mf[0]}, the required field {mf[1]} is missing in the data model graph, please double check your model and generate the graph again." + ) + return error - def check_reserved_names(self) -> List[str]: - '''Identify if any names nodes in the data model graph are the same as reserved name. - Returns: - error, list: List of erros for every node in the graph whose name overlaps with the reserved names. - ''' - error = [] - reserved_names_found = [(name, node) for node in self.graph.nodes - for name in self.reserved_names - if name.lower() == node.lower() - ] - if reserved_names_found: - for reserved_name, node_name in reserved_names_found: - error.append(f'Your data model entry name: {node_name} overlaps with the reserved name: {reserved_name}. Please change this name in your data model.') - return error + def check_is_dag(self) -> List[str]: + """Check that generated graph is a directed acyclic graph + Returns: + error, list: List of error messages if graph is not a DAG. List will include a message for each cycle found, if not there is a more generic message for the graph as a whole. + """ + error = [] + if not nx.is_directed_acyclic_graph(self.graph): + # Attempt to find any cycles: + cycles = nx.simple_cycles(self.graph) + if cycles: + for cycle in cycles: + error.append( + f"Schematic requires models be a directed acyclic graph (DAG). Your graph is not a DAG, we found a loop between: {cycle[0]} and {cycle[1]}, please remove this loop from your model and submit again." + ) + else: + error.append( + f"Schematic requires models be a directed acyclic graph (DAG). Your graph is not a DAG, we could not locate the sorce of the error, please inspect your model." + ) + return error + def check_blacklisted_characters(self) -> List[str]: + """We strip these characters in store, so not sure if it matter if we have them now, maybe add warning + Returns: + warning, list: list of warnings for each node in the graph, that has a Display name that contains blacklisted characters. + """ + warning = [] + for node, node_dict in self.graph.nodes(data=True): + if any( + bl_char in node_dict["displayName"] + for bl_char in self.blacklisted_chars + ): + node_display_name = node_dict["displayName"] + blacklisted_characters_found = [ + bl_char + for bl_char in self.blacklisted_chars + if bl_char in node_dict["displayName"] + ] + blacklisted_characters_str = ",".join(blacklisted_characters_found) + warning.append( + f"Node: {node_display_name} contains a blacklisted character(s): {blacklisted_characters_str}, they will be striped if used in Synapse annotations." + ) + return warning - def check_namespace_overlap(self): - ''' - Check if name is repeated. - Implement in the future - ''' - warning = [] - return warning + def check_reserved_names(self) -> List[str]: + """Identify if any names nodes in the data model graph are the same as reserved name. + Returns: + error, list: List of erros for every node in the graph whose name overlaps with the reserved names. + """ + error = [] + reserved_names_found = [ + (name, node) + for node in self.graph.nodes + for name in self.reserved_names + if name.lower() == node.lower() + ] + if reserved_names_found: + for reserved_name, node_name in reserved_names_found: + error.append( + f"Your data model entry name: {node_name} overlaps with the reserved name: {reserved_name}. Please change this name in your data model." + ) + return error - def check_for_orphan_attributes(self): - ''' - Check if attribute is specified but not connected to another attribute or component. - Implement in future - ''' - warning = [] - return warning + def check_namespace_overlap(self): + """ + Check if name is repeated. + Implement in the future + """ + warning = [] + return warning - def check_namespace_similarity(self): - """ - Using AI, check if submitted attributes or valid values are similar to other ones, warn users. - Implement in future - """ - warning=[] - return warning + def check_for_orphan_attributes(self): + """ + Check if attribute is specified but not connected to another attribute or component. + Implement in future + """ + warning = [] + return warning + + def check_namespace_similarity(self): + """ + Using AI, check if submitted attributes or valid values are similar to other ones, warn users. + Implement in future + """ + warning = [] + return warning From c43d13ab5439f54186eb37cce8b02dd0debbc1a5 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 18 Oct 2023 21:04:50 -0700 Subject: [PATCH 194/239] run black on schematic/schemas/data_model_relationships.py --- schematic/schemas/data_model_relationships.py | 356 +++++++++--------- 1 file changed, 185 insertions(+), 171 deletions(-) diff --git a/schematic/schemas/data_model_relationships.py b/schematic/schemas/data_model_relationships.py index c9ff0b944..6cf85d899 100644 --- a/schematic/schemas/data_model_relationships.py +++ b/schematic/schemas/data_model_relationships.py @@ -1,208 +1,222 @@ from typing import Dict -from schematic.utils.schema_utils import get_label_from_display_name, get_attribute_display_name_from_label, convert_bool_to_str, parse_validation_rules +from schematic.utils.schema_utils import ( + get_label_from_display_name, + get_attribute_display_name_from_label, + convert_bool_to_str, + parse_validation_rules, +) from schematic.schemas.curie import uri2curie, curie2uri -class DataModelRelationships(): + +class DataModelRelationships: def __init__(self) -> None: self.relationships_dictionary = self.define_data_model_relationships() def define_data_model_relationships(self) -> Dict: - """ Define the relationships and their attributes so they can be accessed through other classes. - The key is how it the relationship will be referenced througout Schematic. - Note: Though we could use other keys to determine which keys define nodes and edges, - edge_rel is used as an explicit definition, for easier code readablity. - key: - jsonld_key: Name for relationship in the JSONLD. - Include in all sub-dictionaries. - csv_header: Str, name for this relationshp in the CSV data model. - Enter None if not part of the CSV data model. - node_label: Name for relationship in the graph representation of the data model. - Do not include this key for edge relationships. - type: type, type of expected to be read into graph creation. - edge_rel: True, if this relationship defines an edge - False, if is a value relationship - Include in all sub-dictionaries. - required_header: True, if relationship header is required for the csv - jsonld_default: Defines default values to fill for JSONLD generation. - Used during func DataModelJsonLD.clean_template(), to fill value with a default, if not supplied in the data model. - node_attr_dict: This is used to add information to nodes in the model. Only include for nodes not edges. - set default values for this relationship - key is the node relationship name, value is the default value. - If want to set default as a function create a nested dictionary. - {'default': default_function, - 'standard': alternative function to call if relationship is present for a node} - } - If adding new functions to node_dict will - need to modify data_model_nodes.generate_node_dict in - edge_dir: str, 'in'/'out' is the edge an in or out edge. Define for edge relationships - jsonld_dir: str, 'in'/out is the direction in or out in the JSONLD. + """Define the relationships and their attributes so they can be accessed through other classes. + The key is how it the relationship will be referenced througout Schematic. + Note: Though we could use other keys to determine which keys define nodes and edges, + edge_rel is used as an explicit definition, for easier code readablity. + key: + jsonld_key: Name for relationship in the JSONLD. + Include in all sub-dictionaries. + csv_header: Str, name for this relationshp in the CSV data model. + Enter None if not part of the CSV data model. + node_label: Name for relationship in the graph representation of the data model. + Do not include this key for edge relationships. + type: type, type of expected to be read into graph creation. + edge_rel: True, if this relationship defines an edge + False, if is a value relationship + Include in all sub-dictionaries. + required_header: True, if relationship header is required for the csv + jsonld_default: Defines default values to fill for JSONLD generation. + Used during func DataModelJsonLD.clean_template(), to fill value with a default, if not supplied in the data model. + node_attr_dict: This is used to add information to nodes in the model. Only include for nodes not edges. + set default values for this relationship + key is the node relationship name, value is the default value. + If want to set default as a function create a nested dictionary. + {'default': default_function, + 'standard': alternative function to call if relationship is present for a node} + } + If adding new functions to node_dict will + need to modify data_model_nodes.generate_node_dict in + edge_dir: str, 'in'/'out' is the edge an in or out edge. Define for edge relationships + jsonld_dir: str, 'in'/out is the direction in or out in the JSONLD. - TODO: - - Use class inheritance to set up + TODO: + - Use class inheritance to set up """ map_data_model_relationships = { - 'displayName': { - 'jsonld_key': 'sms:displayName', - 'csv_header': 'Attribute', - 'node_label': 'displayName', - 'type': str, - 'edge_rel': False, - 'required_header': True, - 'node_attr_dict':{'default': get_attribute_display_name_from_label, - 'standard': get_attribute_display_name_from_label, - }, - }, - 'label':{ - 'jsonld_key': 'rdfs:label', - 'csv_header': None, - 'node_label': 'label', - 'type': str, - 'edge_rel': False, - 'required_header': False, - 'node_attr_dict':{'default': get_label_from_display_name, - 'standard': get_label_from_display_name, - }, - }, - 'comment': { - 'jsonld_key': 'rdfs:comment', - 'csv_header': 'Description', - 'node_label': 'comment', - 'type': str, - 'edge_rel': False, - 'required_header': True, - 'node_attr_dict':{'default': 'TBD'}, - }, - 'rangeIncludes': { - 'jsonld_key': 'schema:rangeIncludes', - 'csv_header': 'Valid Values', - 'edge_key': 'rangeValue', - 'jsonld_direction': 'out', - 'edge_dir': 'out', - 'type': list, - 'edge_rel': True, - 'required_header': True, - }, - 'requiresDependency': { - 'jsonld_key': 'sms:requiresDependency', - 'csv_header': 'DependsOn', - 'edge_key': 'requiresDependency', - 'jsonld_direction': 'out', - 'edge_dir': 'out', - 'type': list, - 'edge_rel': True, - 'required_header': True, - }, - 'requiresComponent': { - 'jsonld_key': 'sms:requiresComponent', - 'csv_header': 'DependsOn Component', - 'edge_key': 'requiresComponent', - 'jsonld_direction': 'out', - 'edge_dir': 'out', - 'type': list, - 'edge_rel': True, - 'required_header': True, - }, - 'required': { - 'jsonld_key': 'sms:required', - 'csv_header': 'Required', - 'node_label': 'required', - 'type': bool, - 'jsonld_default': 'sms:false', - 'edge_rel': False, - 'required_header': True, - 'node_attr_dict':{'default': False, - 'standard': convert_bool_to_str, - }, + "displayName": { + "jsonld_key": "sms:displayName", + "csv_header": "Attribute", + "node_label": "displayName", + "type": str, + "edge_rel": False, + "required_header": True, + "node_attr_dict": { + "default": get_attribute_display_name_from_label, + "standard": get_attribute_display_name_from_label, }, - 'subClassOf': { - 'jsonld_key': 'rdfs:subClassOf', - 'csv_header': 'Parent', - 'edge_key': 'parentOf', - 'jsonld_direction': 'in', - 'edge_dir': 'out', - 'jsonld_default': [{"@id": "bts:Thing"}], - 'type': list, - 'edge_rel': True, - 'required_header': True, + }, + "label": { + "jsonld_key": "rdfs:label", + "csv_header": None, + "node_label": "label", + "type": str, + "edge_rel": False, + "required_header": False, + "node_attr_dict": { + "default": get_label_from_display_name, + "standard": get_label_from_display_name, }, - 'validationRules': { - 'jsonld_key': 'sms:validationRules', - 'csv_header': 'Validation Rules', - 'node_label': 'validationRules', - 'jsonld_direction': 'out', - 'edge_dir': 'out', - 'jsonld_default': [], - 'type': list, - 'edge_rel': False, - 'required_header': True, - 'node_attr_dict':{'default': [], - 'standard': parse_validation_rules, - }, + }, + "comment": { + "jsonld_key": "rdfs:comment", + "csv_header": "Description", + "node_label": "comment", + "type": str, + "edge_rel": False, + "required_header": True, + "node_attr_dict": {"default": "TBD"}, + }, + "rangeIncludes": { + "jsonld_key": "schema:rangeIncludes", + "csv_header": "Valid Values", + "edge_key": "rangeValue", + "jsonld_direction": "out", + "edge_dir": "out", + "type": list, + "edge_rel": True, + "required_header": True, + }, + "requiresDependency": { + "jsonld_key": "sms:requiresDependency", + "csv_header": "DependsOn", + "edge_key": "requiresDependency", + "jsonld_direction": "out", + "edge_dir": "out", + "type": list, + "edge_rel": True, + "required_header": True, + }, + "requiresComponent": { + "jsonld_key": "sms:requiresComponent", + "csv_header": "DependsOn Component", + "edge_key": "requiresComponent", + "jsonld_direction": "out", + "edge_dir": "out", + "type": list, + "edge_rel": True, + "required_header": True, + }, + "required": { + "jsonld_key": "sms:required", + "csv_header": "Required", + "node_label": "required", + "type": bool, + "jsonld_default": "sms:false", + "edge_rel": False, + "required_header": True, + "node_attr_dict": { + "default": False, + "standard": convert_bool_to_str, }, - 'domainIncludes': { - 'jsonld_key': 'schema:domainIncludes', - 'csv_header': 'Properties', - 'edge_key': 'domainValue', - 'jsonld_direction': 'in', - 'edge_dir': 'in', - 'type': list, - 'edge_rel': True, - 'required_header': True, + }, + "subClassOf": { + "jsonld_key": "rdfs:subClassOf", + "csv_header": "Parent", + "edge_key": "parentOf", + "jsonld_direction": "in", + "edge_dir": "out", + "jsonld_default": [{"@id": "bts:Thing"}], + "type": list, + "edge_rel": True, + "required_header": True, + }, + "validationRules": { + "jsonld_key": "sms:validationRules", + "csv_header": "Validation Rules", + "node_label": "validationRules", + "jsonld_direction": "out", + "edge_dir": "out", + "jsonld_default": [], + "type": list, + "edge_rel": False, + "required_header": True, + "node_attr_dict": { + "default": [], + "standard": parse_validation_rules, }, - 'isPartOf': { - 'jsonld_key': 'schema:isPartOf', - 'csv_header': None, - 'node_label': 'isPartOf', - 'type': dict, - 'edge_rel': False, - 'required_header': False, - 'node_attr_dict':{'default': {"@id": "http://schema.biothings.io"}, - }, + }, + "domainIncludes": { + "jsonld_key": "schema:domainIncludes", + "csv_header": "Properties", + "edge_key": "domainValue", + "jsonld_direction": "in", + "edge_dir": "in", + "type": list, + "edge_rel": True, + "required_header": True, + }, + "isPartOf": { + "jsonld_key": "schema:isPartOf", + "csv_header": None, + "node_label": "isPartOf", + "type": dict, + "edge_rel": False, + "required_header": False, + "node_attr_dict": { + "default": {"@id": "http://schema.biothings.io"}, }, - 'id': { - 'jsonld_key': '@id', - 'csv_header': 'Source', - 'node_label': 'uri', - 'type': str, - 'edge_rel': False, - 'required_header': True, - 'node_attr_dict':{'default': get_label_from_display_name, - 'standard': get_label_from_display_name, - }, + }, + "id": { + "jsonld_key": "@id", + "csv_header": "Source", + "node_label": "uri", + "type": str, + "edge_rel": False, + "required_header": True, + "node_attr_dict": { + "default": get_label_from_display_name, + "standard": get_label_from_display_name, }, - } + }, + } return map_data_model_relationships def define_required_csv_headers(self): """Helper function to retrieve required CSV headers, alert if required header was not provided. - Returns: - required_headers: lst, Required CSV headers. + Returns: + required_headers: lst, Required CSV headers. """ required_headers = [] for k, v in self.relationships_dictionary.items(): try: - if v['required_header']: - required_headers.append(v['csv_header']) + if v["required_header"]: + required_headers.append(v["csv_header"]) except KeyError: - print(f"Did not provide a 'required_header' key, value pair for the nested dictionary {k} : {key}") + print( + f"Did not provide a 'required_header' key, value pair for the nested dictionary {k} : {key}" + ) return required_headers def retreive_rel_headers_dict(self, edge: bool) -> Dict[str, str]: """Helper function to retrieve CSV headers for edge and non-edge relationships defined by edge_type. - Args: - edge, bool: True if looking for edge relationships - Returns: - rel_headers_dict: dict, key: csv_header if the key represents an edge relationship. + Args: + edge, bool: True if looking for edge relationships + Returns: + rel_headers_dict: dict, key: csv_header if the key represents an edge relationship. """ rel_headers_dict = {} for rel, rel_dict in self.relationships_dictionary.items(): - if 'edge_rel' in rel_dict: - if rel_dict['edge_rel'] and edge: - rel_headers_dict.update({rel:rel_dict['csv_header']}) - elif rel_dict['edge_rel'] == False and edge == False: - rel_headers_dict.update({rel:rel_dict['csv_header']}) + if "edge_rel" in rel_dict: + if rel_dict["edge_rel"] and edge: + rel_headers_dict.update({rel: rel_dict["csv_header"]}) + elif rel_dict["edge_rel"] == False and edge == False: + rel_headers_dict.update({rel: rel_dict["csv_header"]}) else: raise ValueError(f"Did not provide a 'edge_rel' for relationship {rel}") From 8b843eeede9967d46c68277293a2ab7e4b7b3a5c Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Thu, 19 Oct 2023 10:01:05 -0700 Subject: [PATCH 195/239] add note on self loop error --- tests/test_validator.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/test_validator.py b/tests/test_validator.py index 0278fabc6..0f24f39fe 100644 --- a/tests/test_validator.py +++ b/tests/test_validator.py @@ -90,6 +90,9 @@ def test_check_graph_has_required_node_fields(self, helpers): assert expected_error == validator_errors def test_dag(self, helpers): + # TODO: The schema validator currently doesn't catch the Diagnosis-Diagnosis self loop. + # It is an expected error but it will need to be decided if the validator should prevent or allow such self loops + # Get graph data model graph_data_model = graph_data_model_func(helpers, data_model_name='validator_dag_test.model.csv') From caba6bd286fe13749d43146e7a2ffca02a4887c8 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Thu, 19 Oct 2023 14:46:48 -0700 Subject: [PATCH 196/239] update all references to DME to DMGE --- schematic/manifest/commands.py | 4 +- schematic/manifest/generator.py | 24 ++-- schematic/models/GE_Helpers.py | 18 +-- schematic/models/metadata.py | 20 ++-- schematic/models/validate_attribute.py | 84 +++++++------- schematic/models/validate_manifest.py | 32 +++--- schematic/schemas/data_model_json_schema.py | 16 +-- schematic/schemas/data_model_jsonld.py | 6 +- schematic/store/synapse.py | 106 +++++++++--------- .../visualization/attributes_explorer.py | 4 +- schematic/visualization/tangled_tree.py | 18 +-- schematic_api/api/routes.py | 32 +++--- tests/test_api.py | 4 +- tests/test_manifest.py | 2 +- tests/test_schemas.py | 2 +- tests/test_store.py | 20 ++-- 16 files changed, 196 insertions(+), 196 deletions(-) diff --git a/schematic/manifest/commands.py b/schematic/manifest/commands.py index 1d03b38d8..a1382ec88 100644 --- a/schematic/manifest/commands.py +++ b/schematic/manifest/commands.py @@ -213,8 +213,8 @@ def create_single_manifest(data_type, output_csv=None, output_xlsx=None): if data_type[0] == 'all manifests': # Feed graph into the data model graph explorer - DME = DataModelGraphExplorer(graph_data_model) - component_digraph = DME.get_digraph_by_edge_type('requiresComponent') + dmge = DataModelGraphExplorer(graph_data_model) + component_digraph = dmge.get_digraph_by_edge_type('requiresComponent') components = component_digraph.nodes() for component in components: t = f'{title}.{component}.manifest' diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index 428e56372..9d6f6e997 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -88,13 +88,13 @@ def __init__( ) # Instantiate Data Model Explorer object - self.DME = DataModelGraphExplorer(self.graph) + self.dmge = DataModelGraphExplorer(self.graph) # additional metadata to add to manifest self.additional_metadata = additional_metadata # Check if the class is in the schema - root_in_schema = self.DME.is_class_in_schema(self.root) + root_in_schema = self.dmge.is_class_in_schema(self.root) # If the class could not be found, give a notification if not root_in_schema: @@ -103,7 +103,7 @@ def __init__( raise LookupError(exception_message) # Determine whether current data type is file-based - self.is_file_based = "Filename" in self.DME.get_node_dependencies(self.root) + self.is_file_based = "Filename" in self.dmge.get_node_dependencies(self.root) def _attribute_to_letter(self, attribute, manifest_fields): """Map attribute to column letter in a google sheet""" @@ -819,9 +819,9 @@ def _request_row_format(self, i, req): notes_body["requests"] (dict): with information on note to add to the column header. This notes body will be added to a request. """ - if self.DME: + if self.dmge: # get node definition - note = self.DME.get_node_comment(node_display_name = req) + note = self.dmge.get_node_comment(node_display_name = req) notes_body = { "requests": [ @@ -1020,7 +1020,7 @@ def _dependency_formatting( dependency_formatting_body = {"requests": []} for j, val_dep in enumerate(val_dependencies): is_required = False - if self.DME.get_node_required(node_display_name=val_dep): + if self.dmge.get_node_required(node_display_name=val_dep): is_required = True else: is_required = False @@ -1063,13 +1063,13 @@ def _request_dependency_formatting( for req_val in req_vals: # get this required/valid value's node label in schema, based on display name (i.e. shown to the user in a dropdown to fill in) req_val = req_val["userEnteredValue"] - req_val_node_label = self.DME.get_node_label(req_val) + req_val_node_label = self.dmge.get_node_label(req_val) if not req_val_node_label: # if this node is not in the graph # continue - there are no dependencies for it continue # check if this required/valid value has additional dependency attributes - val_dependencies = self.DME.get_node_dependencies( + val_dependencies = self.dmge.get_node_dependencies( req_val_node_label, schema_ordered=False ) @@ -1122,7 +1122,7 @@ def _create_requests_body( requests_body["requests"] = [] for i, req in enumerate(ordered_metadata_fields[0]): # Gather validation rules and valid values for attribute. - validation_rules = self.DME.get_node_validation_rules(node_display_name=req) + validation_rules = self.dmge.get_node_validation_rules(node_display_name=req) # Add regex match validaiton rule to Google Sheets. if validation_rules and sheet_url: @@ -1537,7 +1537,7 @@ def get_manifest( # Get manifest file associated with given dataset (if applicable) # populate manifest with set of new files (if applicable) - manifest_record = store.updateDatasetManifestFiles(self.DME, datasetId = dataset_id, store = False) + manifest_record = store.updateDatasetManifestFiles(self.dmge, datasetId = dataset_id, store = False) # get URL of an empty manifest file created based on schema component empty_manifest_url = self.get_empty_manifest(strict=strict, sheet_url=True) @@ -1774,9 +1774,9 @@ def sort_manifest_fields(self, manifest_fields, order="schema"): # order manifest fields based on data-model schema if order == "schema": - if self.DME and self.root: + if self.dmge and self.root: # get display names of dependencies - dependencies_display_names = self.DME.get_node_dependencies(self.root) + dependencies_display_names = self.dmge.get_node_dependencies(self.root) # reorder manifest fields so that root dependencies are first and follow schema order manifest_fields = sorted( diff --git a/schematic/models/GE_Helpers.py b/schematic/models/GE_Helpers.py index 696b99ca7..f183d1daa 100644 --- a/schematic/models/GE_Helpers.py +++ b/schematic/models/GE_Helpers.py @@ -40,7 +40,7 @@ class GreatExpectationsHelpers(object): 2) Parse results dict to generate appropriate errors """ def __init__(self, - DME, + dmge, unimplemented_expectations, manifest, manifestPath @@ -49,8 +49,8 @@ def __init__(self, Purpose: Instantiate a great expectations helpers object Args: - DME: - DataModelExplorer Object + dmge: + DataModelGraphExplorer Object unimplemented_expectations: dictionary of validation rules that currently do not have expectations developed manifest: @@ -59,7 +59,7 @@ def __init__(self, path to manifest being validated """ self.unimplemented_expectations = unimplemented_expectations - self.DME = DME + self.dmge = dmge self.manifest = manifest self.manifestPath = manifestPath @@ -159,7 +159,7 @@ def build_expectation_suite(self,): # remove trailing/leading whitespaces from manifest self.manifest.applymap(lambda x: x.strip() if isinstance(x, str) else x) - validation_rules = self.DME.get_node_validation_rules(node_display_name=col) + validation_rules = self.dmge.get_node_validation_rules(node_display_name=col) #check if attribute has any rules associated with it if validation_rules: @@ -385,7 +385,7 @@ def generate_errors( validation_types: Dict, errors: List, warnings: List, - DME: DataModelGraphExplorer, + dmge: DataModelGraphExplorer, ): """ Purpose: @@ -450,7 +450,7 @@ def generate_errors( row_num = str(row+2), attribute_name = errColumn, invalid_entry = str(value), - DME = DME, + dmge = dmge, ) if vr_errors: errors.append(vr_errors) @@ -466,7 +466,7 @@ def generate_errors( module_to_call = 'match', attribute_name = errColumn, invalid_entry = value, - DME = DME, + dmge = dmge, ) if vr_errors: errors.append(vr_errors) @@ -478,7 +478,7 @@ def generate_errors( attribute_name = errColumn, row_num = np_array_to_str_list(np.array(indices)+2), error_val = iterable_to_str_list(values), - DME = self.DME + dmge = self.dmge ) if vr_errors: errors.append(vr_errors) diff --git a/schematic/models/metadata.py b/schematic/models/metadata.py index 748fdfab3..04a690bf6 100644 --- a/schematic/models/metadata.py +++ b/schematic/models/metadata.py @@ -61,7 +61,7 @@ def __init__(self, inputMModelLocation: str, inputMModelLocationType: str,) -> N # Generate graph self.graph_data_model = data_model_grapher.generate_data_model_graph() - self.DME = DataModelGraphExplorer(self.graph_data_model) + self.dmge = DataModelGraphExplorer(self.graph_data_model) # check if the type of MModel file is "local" # currently, the application only supports reading from local JSON-LD files @@ -100,7 +100,7 @@ def getOrderedModelNodes(self, rootNode: str, relationshipType: str) -> List[str Raises: ValueError: rootNode not found in metadata model. """ - ordered_nodes = self.DME.get_descendants_by_edge_type( + ordered_nodes = self.dmge.get_descendants_by_edge_type( rootNode, relationshipType, connected=True, ordered=True ) @@ -168,11 +168,11 @@ def get_component_requirements( """ # get required components for the input/source component - req_components = self.DME.get_component_requirements(source_component) + req_components = self.dmge.get_component_requirements(source_component) # retreive components as graph if as_graph: - req_components_graph = self.DME.get_component_requirements_graph( + req_components_graph = self.dmge.get_component_requirements_graph( source_component ) @@ -259,7 +259,7 @@ def validateModelManifest( warnings=warnings, manifest=manifest, manifestPath=manifestPath, - DME=self.DME, + dmge=self.dmge, jsonSchema=jsonSchema, restrict_rules=restrict_rules, project_scope=project_scope) @@ -327,7 +327,7 @@ def submit_metadata_manifest( try: # check if the component ("class" in schema) passed as argument is valid (present in schema) or not - self.DME.is_class_in_schema(validate_component) + self.dmge.is_class_in_schema(validate_component) except: # a KeyError exception is raised when validate_component fails in the try-block above # here, we are suppressing the KeyError exception and replacing it with a more @@ -347,7 +347,7 @@ def submit_metadata_manifest( # upload manifest file from `manifest_path` path to entity with Syn ID `dataset_id` if exists(censored_manifest_path): censored_manifest_id = syn_store.associateMetadataWithFiles( - DME = self.DME, + dmge = self.dmge, metadataManifestPath = censored_manifest_path, datasetId = dataset_id, manifest_record_type = manifest_record_type, @@ -358,7 +358,7 @@ def submit_metadata_manifest( restrict_maniest = True manifest_id = syn_store.associateMetadataWithFiles( - DME = self.DME, + dmge = self.dmge, metadataManifestPath = manifest_path, datasetId = dataset_id, manifest_record_type = manifest_record_type, @@ -380,7 +380,7 @@ def submit_metadata_manifest( # no need to perform validation, just submit/associate the metadata manifest file if exists(censored_manifest_path): censored_manifest_id = syn_store.associateMetadataWithFiles( - DME = self.DME, + dmge = self.dmge, metadataManifestPath=censored_manifest_path, datasetId=dataset_id, manifest_record_type=manifest_record_type, @@ -391,7 +391,7 @@ def submit_metadata_manifest( restrict_maniest = True manifest_id = syn_store.associateMetadataWithFiles( - DME = self.DME, + dmge = self.dmge, metadataManifestPath=manifest_path, datasetId=dataset_id, manifest_record_type=manifest_record_type, diff --git a/schematic/models/validate_attribute.py b/schematic/models/validate_attribute.py index f5ffd0e4b..f3bca8403 100644 --- a/schematic/models/validate_attribute.py +++ b/schematic/models/validate_attribute.py @@ -32,7 +32,7 @@ logger = logging.getLogger(__name__) class GenerateError: - def generate_schema_error(row_num: str, attribute_name: str, error_msg: str, invalid_entry: str, DME: DataModelGraphExplorer,)-> List[str]: + def generate_schema_error(row_num: str, attribute_name: str, error_msg: str, invalid_entry: str, dmge: DataModelGraphExplorer,)-> List[str]: ''' Purpose: Process error messages generated from schema Input: @@ -50,7 +50,7 @@ def generate_schema_error(row_num: str, attribute_name: str, error_msg: str, inv raises = GenerateError.get_message_level( val_rule = 'schema', attribute_name = attribute_name, - DME = DME, + dmge = dmge, ) #if a message needs to be raised, get the approrpiate function to do so @@ -78,7 +78,7 @@ def generate_schema_error(row_num: str, attribute_name: str, error_msg: str, inv def generate_list_error( list_string: str, row_num: str, attribute_name: str, list_error: str, - invalid_entry:str, DME: DataModelGraphExplorer, val_rule: str, + invalid_entry:str, dmge: DataModelGraphExplorer, val_rule: str, ) -> List[str]: """ Purpose: @@ -101,7 +101,7 @@ def generate_list_error( raises = GenerateError.get_message_level( val_rule = val_rule, attribute_name = attribute_name, - DME = DME, + dmge = dmge, ) #if a message needs to be raised, get the approrpiate function to do so @@ -138,7 +138,7 @@ def generate_regex_error( module_to_call: str, attribute_name: str, invalid_entry: str, - DME: DataModelGraphExplorer, + dmge: DataModelGraphExplorer, ) -> List[str]: """ Purpose: @@ -162,7 +162,7 @@ def generate_regex_error( raises = GenerateError.get_message_level( val_rule = val_rule, attribute_name = attribute_name, - DME = DME, + dmge = dmge, ) #if a message needs to be raised, get the approrpiate function to do so @@ -191,7 +191,7 @@ def generate_regex_error( return error_list, warning_list def generate_type_error( - val_rule: str, row_num: str, attribute_name: str, invalid_entry:str, DME: DataModelGraphExplorer, + val_rule: str, row_num: str, attribute_name: str, invalid_entry:str, dmge: DataModelGraphExplorer, ) -> List[str]: """ Purpose: @@ -212,7 +212,7 @@ def generate_type_error( #Determine which, if any, message to raise raises = GenerateError.get_message_level( - DME = DME, + dmge = dmge, attribute_name = attribute_name, val_rule = val_rule, ) @@ -233,7 +233,7 @@ def generate_type_error( error_val = invalid_entry #TODO: not sure if this i needed (to split) - validation_rules=DME.get_node_validation_rules(node_display_name=attribute_name) + validation_rules=dmge.get_node_validation_rules(node_display_name=attribute_name) #TODO: Can remove when handling updated so split within graph if validation_rules and '::' in validation_rules[0]: @@ -255,7 +255,7 @@ def generate_type_error( def generate_url_error( url: str, url_error: str, row_num: str, attribute_name: str, argument: str, - invalid_entry:str, DME: DataModelGraphExplorer, val_rule: str, + invalid_entry:str, dmge: DataModelGraphExplorer, val_rule: str, ) -> List[str]: """ Purpose: @@ -289,7 +289,7 @@ def generate_url_error( raises = GenerateError.get_message_level( val_rule = val_rule, attribute_name = attribute_name, - DME = DME, + dmge = dmge, ) #if a message needs to be raised, get the approrpiate function to do so @@ -339,7 +339,7 @@ def generate_url_error( def generate_cross_warning( val_rule: str, attribute_name: str, - DME: DataModelGraphExplorer, + dmge: DataModelGraphExplorer, matching_manifests = [], missing_manifest_ID = None, invalid_entry = None, @@ -369,7 +369,7 @@ def generate_cross_warning( raises = GenerateError.get_message_level( val_rule = val_rule, attribute_name = attribute_name, - DME = DME, + dmge = dmge, ) #if a message needs to be raised, get the approrpiate function to do so @@ -417,7 +417,7 @@ def generate_cross_warning( def generate_content_error( val_rule: str, attribute_name: str, - DME: DataModelGraphExplorer, + dmge: DataModelGraphExplorer, row_num = None, error_val = None, ) -> (List[str], List[str]): @@ -432,7 +432,7 @@ def generate_content_error( Input: val_rule: str, defined in the schema. attribute_name: str, attribute being validated - DME: DataModelGraphExplorer object + dmge: DataModelGraphExplorer object row_num: str, row where the error was detected error_val: value duplicated @@ -451,7 +451,7 @@ def generate_content_error( raises = GenerateError.get_message_level( val_rule=val_rule, attribute_name = attribute_name, - DME = DME, + dmge = dmge, ) #if a message needs to be raised, get the approrpiate function to do so @@ -513,7 +513,7 @@ def generate_content_error( return error_list, warning_list def get_message_level( - DME: DataModelGraphExplorer, + dmge: DataModelGraphExplorer, attribute_name: str, val_rule: str, ) -> str: @@ -529,7 +529,7 @@ def get_message_level( Input: val_rule: str, defined in the schema. - DME: DataModelGraphExplorer object + dmge: DataModelGraphExplorer object attribute_name: str, attribute being validated Returns: 'error', 'warning' or None @@ -548,10 +548,10 @@ def get_message_level( level = 'error' elif rule_parts[-1].lower() == 'warning': level = 'warning' - elif not DME.get_node_required(node_display_name=attribute_name): + elif not dmge.get_node_required(node_display_name=attribute_name): # If not required raise warnings to notify level = 'warning' - elif DME.get_node_required(node_display_name=attribute_name) and 'recommended' in val_rule: + elif dmge.get_node_required(node_display_name=attribute_name) and 'recommended' in val_rule: level = None return level @@ -596,7 +596,7 @@ def get_target_manifests(target_component, project_scope: List): return synStore, target_manifest_IDs, target_dataset_IDs def list_validation( - self, val_rule: str, manifest_col: pd.core.series.Series, DME: DataModelGraphExplorer, + self, val_rule: str, manifest_col: pd.core.series.Series, dmge: DataModelGraphExplorer, ) -> (List[List[str]], List[List[str]], pd.core.series.Series): """ Purpose: @@ -637,7 +637,7 @@ def list_validation( attribute_name=manifest_col.name, list_error=list_error, invalid_entry=manifest_col[i], - DME = DME, + dmge = dmge, val_rule = val_rule, ) if vr_errors: @@ -652,7 +652,7 @@ def list_validation( return errors, warnings, manifest_col def regex_validation( - self, val_rule: str, manifest_col: pd.core.series.Series, DME: DataModelGraphExplorer, + self, val_rule: str, manifest_col: pd.core.series.Series, dmge: DataModelGraphExplorer, ) -> (List[List[str]], List[List[str]]): """ Purpose: @@ -662,7 +662,7 @@ def regex_validation( - val_rule: str, Validation rule - manifest_col: pd.core.series.Series, column for a given attribute in the manifest - - DME: DataModelGraphExplorer Object + - dmge: DataModelGraphExplorer Object Using this module requres validation rules written in the following manner: 'regex module regular expression' - regex: is an exact string specifying that the input is to be validated as a @@ -694,7 +694,7 @@ def regex_validation( errors = [] warnings = [] - validation_rules = DME.get_node_validation_rules(node_display_name=manifest_col.name) + validation_rules = dmge.get_node_validation_rules(node_display_name=manifest_col.name) if validation_rules and '::' in validation_rules[0]: validation_rules = validation_rules[0].split("::") # Handle case where validating re's within a list. @@ -716,7 +716,7 @@ def regex_validation( module_to_call=reg_exp_rules[1], attribute_name=manifest_col.name, invalid_entry=manifest_col[i], - DME = DME, + dmge = dmge, ) if vr_errors: errors.append(vr_errors) @@ -737,7 +737,7 @@ def regex_validation( module_to_call=reg_exp_rules[1], attribute_name=manifest_col.name, invalid_entry=manifest_col[i], - DME = DME, + dmge = dmge, ) if vr_errors: errors.append(vr_errors) @@ -747,7 +747,7 @@ def regex_validation( return errors, warnings def type_validation( - self, val_rule: str, manifest_col: pd.core.series.Series, DME: DataModelGraphExplorer, + self, val_rule: str, manifest_col: pd.core.series.Series, dmge: DataModelGraphExplorer, ) -> (List[List[str]], List[List[str]]): """ Purpose: @@ -758,7 +758,7 @@ def type_validation( 'float', 'int', 'num', 'str' - manifest_col: pd.core.series.Series, column for a given attribute in the manifest - - DME: DataModelGraphExplorer Object + - dmge: DataModelGraphExplorer Object Returns: -This function will return errors when the user input value does not match schema specifications. @@ -786,7 +786,7 @@ def type_validation( row_num=str(i + 2), attribute_name=manifest_col.name, invalid_entry=str(manifest_col[i]), - DME = DME, + dmge = dmge, ) if vr_errors: errors.append(vr_errors) @@ -800,7 +800,7 @@ def type_validation( row_num=str(i + 2), attribute_name=manifest_col.name, invalid_entry=str(manifest_col[i]), - DME = DME, + dmge = dmge, ) if vr_errors: errors.append(vr_errors) @@ -808,7 +808,7 @@ def type_validation( warnings.append(vr_warnings) return errors, warnings - def url_validation(self, val_rule: str, manifest_col: str, DME: DataModelGraphExplorer) -> (List[List[str]], List[List[str]]): + def url_validation(self, val_rule: str, manifest_col: str, dmge: DataModelGraphExplorer) -> (List[List[str]], List[List[str]]): """ Purpose: Validate URL's submitted for a particular attribute in a manifest. @@ -818,7 +818,7 @@ def url_validation(self, val_rule: str, manifest_col: str, DME: DataModelGraphEx - val_rule: str, Validation rule - manifest_col: pd.core.series.Series, column for a given attribute in the manifest - - DME: DataModelGraphExplorer Object + - dmge: DataModelGraphExplorer Object Output: This function will return errors when the user input value does not match schema specifications. @@ -848,7 +848,7 @@ def url_validation(self, val_rule: str, manifest_col: str, DME: DataModelGraphEx attribute_name=manifest_col.name, argument=url_args, invalid_entry=manifest_col[i], - DME = DME, + dmge = dmge, val_rule = val_rule, ) if vr_errors: @@ -876,7 +876,7 @@ def url_validation(self, val_rule: str, manifest_col: str, DME: DataModelGraphEx attribute_name=manifest_col.name, argument=url_args, invalid_entry=manifest_col[i], - DME = DME, + dmge = dmge, val_rule = val_rule, ) if vr_errors: @@ -896,7 +896,7 @@ def url_validation(self, val_rule: str, manifest_col: str, DME: DataModelGraphEx attribute_name=manifest_col.name, argument=arg, invalid_entry=manifest_col[i], - DME = DME, + dmge = dmge, val_rule = val_rule, ) if vr_errors: @@ -906,7 +906,7 @@ def url_validation(self, val_rule: str, manifest_col: str, DME: DataModelGraphEx return errors, warnings def cross_validation( - self, val_rule: str, manifest_col: pd.core.series.Series, project_scope: List, DME: DataModelGraphExplorer, + self, val_rule: str, manifest_col: pd.core.series.Series, project_scope: List, dmge: DataModelGraphExplorer, ) -> List[List[str]]: """ Purpose: @@ -916,7 +916,7 @@ def cross_validation( - val_rule: str, Validation rule - manifest_col: pd.core.series.Series, column for a given attribute in the manifest - - DME: DataModelGraphExplorer Object + - dmge: DataModelGraphExplorer Object Output: This function will return errors when values in the current manifest's attribute are not fully present in the correct amount of other manifests. @@ -994,7 +994,7 @@ def cross_validation( row_num = missing_rows, attribute_name = source_attribute, invalid_entry = iterable_to_str_list(missing_values), - DME = DME, + dmge = dmge, ) if vr_errors: errors.append(vr_errors) @@ -1009,7 +1009,7 @@ def cross_validation( row_num = invalid_rows, attribute_name = source_attribute, invalid_entry = iterable_to_str_list(invalid_values.squeeze()), - DME = DME, + dmge = dmge, ) if vr_errors: errors.append(vr_errors) @@ -1036,7 +1036,7 @@ def cross_validation( attribute_name = source_attribute, invalid_entry = missing_values, missing_manifest_ID = missing_manifest_IDs, - DME = DME, + dmge = dmge, ) if vr_errors: errors.append(vr_errors) @@ -1047,7 +1047,7 @@ def cross_validation( val_rule = val_rule, attribute_name = source_attribute, matching_manifests = present_manifest_log, - DME = DME, + dmge = dmge, ) if vr_errors: errors.append(vr_errors) diff --git a/schematic/models/validate_manifest.py b/schematic/models/validate_manifest.py index 83cbf169e..c54d7945d 100644 --- a/schematic/models/validate_manifest.py +++ b/schematic/models/validate_manifest.py @@ -29,11 +29,11 @@ logger = logging.getLogger(__name__) class ValidateManifest(object): - def __init__(self, errors, manifest, manifestPath, DME, jsonSchema): + def __init__(self, errors, manifest, manifestPath, dmge, jsonSchema): self.errors = errors self.manifest = manifest self.manifestPath = manifestPath - self.DME = DME + self.dmge = dmge self.jsonSchema = jsonSchema def get_multiple_types_error( @@ -63,7 +63,7 @@ def get_multiple_types_error( return ["NA", error_col, error_message, error_val] def validate_manifest_rules( - self, manifest: pd.core.frame.DataFrame, DME: DataModelGraphExplorer, restrict_rules: bool, project_scope: List, + self, manifest: pd.core.frame.DataFrame, dmge: DataModelGraphExplorer, restrict_rules: bool, project_scope: List, ) -> (pd.core.frame.DataFrame, List[List[str]]): """ Purpose: @@ -73,7 +73,7 @@ def validate_manifest_rules( manifest: pd.core.frame.DataFrame imported from models/metadata.py contains metadata input from user for each attribute. - DME: DataModelGraphExplorer + dmge: DataModelGraphExplorer initialized within models/metadata.py Returns: manifest: pd.core.frame.DataFrame @@ -130,7 +130,7 @@ def validate_manifest_rules( t_GE = perf_counter() #operations necessary to set up and run ge suite validation ge_helpers=GreatExpectationsHelpers( - DME=DME, + dmge=dmge, unimplemented_expectations=unimplemented_expectations, manifest = manifest, manifestPath = self.manifestPath, @@ -163,7 +163,7 @@ def validate_manifest_rules( warnings = warnings, validation_results = validation_results, validation_types = validation_types, - DME = DME, + dmge = dmge, ) logger.debug(f"GE elapsed time {perf_counter()-t_GE}") else: @@ -175,7 +175,7 @@ def validate_manifest_rules( # remove trailing/leading whitespaces from manifest manifest.applymap(lambda x: x.strip() if isinstance(x, str) else x) - validation_rules = DME.get_node_validation_rules(node_display_name=col) + validation_rules = dmge.get_node_validation_rules(node_display_name=col) #TODO: Can remove when handling updated so split within graph if validation_rules and '::' in validation_rules[0]: @@ -207,16 +207,16 @@ def validate_manifest_rules( if validation_type == "list": vr_errors, vr_warnings, manifest_col = validation_method( - self, rule, manifest[col], DME, + self, rule, manifest[col], dmge, ) manifest[col] = manifest_col elif validation_type.lower().startswith("match"): vr_errors, vr_warnings = validation_method( - self, rule, manifest[col], project_scope, DME, + self, rule, manifest[col], project_scope, dmge, ) else: vr_errors, vr_warnings = validation_method( - self, rule, manifest[col], DME, + self, rule, manifest[col], dmge, ) # Check for validation rule errors and add them to other errors. if vr_errors: @@ -227,7 +227,7 @@ def validate_manifest_rules( logger.debug(f"In House validation elapsed time {perf_counter()-t_err}") return manifest, errors, warnings - def validate_manifest_values(self, manifest, jsonSchema, DME, + def validate_manifest_values(self, manifest, jsonSchema, dmge, ) -> (List[List[str]], List[List[str]]): t_json_schema = perf_counter() @@ -250,7 +250,7 @@ def validate_manifest_values(self, manifest, jsonSchema, DME, errorMsg = error.message[0:500] errorVal = error.instance if len(error.path) > 0 else "Wrong schema" - val_errors, val_warnings = GenerateError.generate_schema_error(row_num = errorRow, attribute_name = errorColName, error_msg = errorMsg, invalid_entry = errorVal, DME = DME) + val_errors, val_warnings = GenerateError.generate_schema_error(row_num = errorRow, attribute_name = errorColName, error_msg = errorMsg, invalid_entry = errorVal, dmge = dmge) if val_errors: errors.append(val_errors) @@ -260,15 +260,15 @@ def validate_manifest_values(self, manifest, jsonSchema, DME, return errors, warnings -def validate_all(self, errors, warnings, manifest, manifestPath, DME, jsonSchema, restrict_rules, project_scope: List): - vm = ValidateManifest(errors, manifest, manifestPath, DME, jsonSchema) - manifest, vmr_errors, vmr_warnings = vm.validate_manifest_rules(manifest, DME, restrict_rules, project_scope) +def validate_all(self, errors, warnings, manifest, manifestPath, dmge, jsonSchema, restrict_rules, project_scope: List): + vm = ValidateManifest(errors, manifest, manifestPath, dmge, jsonSchema) + manifest, vmr_errors, vmr_warnings = vm.validate_manifest_rules(manifest, dmge, restrict_rules, project_scope) if vmr_errors: errors.extend(vmr_errors) if vmr_warnings: warnings.extend(vmr_warnings) - vmv_errors, vmv_warnings = vm.validate_manifest_values(manifest, jsonSchema, DME) + vmv_errors, vmv_warnings = vm.validate_manifest_values(manifest, jsonSchema, dmge) if vmv_errors: errors.extend(vmv_errors) if vmv_warnings: diff --git a/schematic/schemas/data_model_json_schema.py b/schematic/schemas/data_model_json_schema.py index d8610f1d5..844f5a4ec 100644 --- a/schematic/schemas/data_model_json_schema.py +++ b/schematic/schemas/data_model_json_schema.py @@ -20,7 +20,7 @@ def __init__( # TODO: Change jsonld_path to data_model_path (can work with CSV too) self.jsonld_path = jsonld_path self.graph = graph # Graph would be fully made at this point. - self.DME = DataModelGraphExplorer(self.graph) + self.dmge = DataModelGraphExplorer(self.graph) self.dmr = DataModelRelationships() self.rel_dict = self.dmr.relationships_dictionary @@ -127,7 +127,7 @@ def get_json_validation_schema( ) # maintain a map between range nodes and their domain nodes {range_value : domain_value} # the domain node is very likely the parentof ("parentOf" relationship) of the range node - root_dependencies = self.DME.get_adjacent_nodes_by_relationship( + root_dependencies = self.dmge.get_adjacent_nodes_by_relationship( node_label=source_node, relationship=self.rel_dict["requiresDependency"]["edge_key"], ) @@ -146,15 +146,15 @@ def get_json_validation_schema( # node is being processed node_is_processed = True - node_range = self.DME.get_adjacent_nodes_by_relationship( + node_range = self.dmge.get_adjacent_nodes_by_relationship( node_label=process_node, relationship=self.rel_dict["rangeIncludes"]["edge_key"], ) # get node range display name - node_range_d = self.DME.get_nodes_display_names(node_list=node_range) + node_range_d = self.dmge.get_nodes_display_names(node_list=node_range) - node_dependencies = self.DME.get_adjacent_nodes_by_relationship( + node_dependencies = self.dmge.get_adjacent_nodes_by_relationship( node_label=process_node, relationship=self.rel_dict["requiresDependency"]["edge_key"], ) @@ -172,10 +172,10 @@ def get_json_validation_schema( # can this node be map to the empty set (if required no; if not required yes) # TODO: change "required" to different term, required may be a bit misleading (i.e. is the node required in the schema) - node_required = self.DME.get_node_required(node_label=process_node) + node_required = self.dmge.get_node_required(node_label=process_node) # get any additional validation rules associated with this node (e.g. can this node be mapped to a list of other nodes) - node_validation_rules = self.DME.get_node_validation_rules( + node_validation_rules = self.dmge.get_node_validation_rules( node_display_name=node_display_name ) @@ -328,7 +328,7 @@ def get_json_validation_schema( node_is_processed = False # add process node as a conditional to its dependencies - node_dependencies_d = self.DME.get_nodes_display_names( + node_dependencies_d = self.dmge.get_nodes_display_names( node_list=node_dependencies ) diff --git a/schematic/schemas/data_model_jsonld.py b/schematic/schemas/data_model_jsonld.py index 346fe04c9..d4e9c98c8 100644 --- a/schematic/schemas/data_model_jsonld.py +++ b/schematic/schemas/data_model_jsonld.py @@ -108,7 +108,7 @@ def __init__(self, Graph: nx.MultiDiGraph, output_path: str = ""): self.graph = Graph # Graph would be fully made at this point. self.dmr = DataModelRelationships() self.rel_dict = self.dmr.relationships_dictionary - self.DME = DataModelGraphExplorer(self.graph) + self.dmge = DataModelGraphExplorer(self.graph) self.output_path = output_path # Gather the templates @@ -361,7 +361,7 @@ def reorder_template_entries(self, template: dict) -> dict: ][0] # Order edges - sorted_edges = self.DME.get_ordered_entry( + sorted_edges = self.dmge.get_ordered_entry( key=key, source_node_label=template_label ) edge_weights_dict = {edge: i for i, edge in enumerate(sorted_edges)} @@ -383,7 +383,7 @@ def generate_jsonld_object(self): jsonld_object, dict: JSONLD object containing all nodes and related information """ # Get properties. - properties = self.DME.find_properties() + properties = self.dmge.find_properties() # Get JSONLD Template json_ld_template = self.base_jsonld_template diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index ee99e41e7..b24663128 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -629,11 +629,11 @@ def fill_in_entity_id_filename(self, datasetId: str, manifest: pd.DataFrame) -> manifest = manifest.fillna("") return dataset_files, manifest - def updateDatasetManifestFiles(self, DME: DataModelGraphExplorer, datasetId: str, store:bool = True) -> Union[Tuple[str, pd.DataFrame], None]: + def updateDatasetManifestFiles(self, dmge: DataModelGraphExplorer, datasetId: str, store:bool = True) -> Union[Tuple[str, pd.DataFrame], None]: """Fetch the names and entity IDs of all current files in dataset in store, if any; update dataset's manifest with new files, if any. Args: - DME: DataModelGraphExplorer Instance + dmge: DataModelGraphExplorer Instance datasetId: synapse ID of a storage dataset. store: if set to True store updated manifest in asset store; if set to False return a Pandas dataframe containing updated manifest but do not store to asset store @@ -666,7 +666,7 @@ def updateDatasetManifestFiles(self, DME: DataModelGraphExplorer, datasetId: str manifest.to_csv(manifest_filepath, index=False) # store manifest and update associated metadata with manifest on Synapse - manifest_id = self.associateMetadataWithFiles(DME, manifest_filepath, datasetId) + manifest_id = self.associateMetadataWithFiles(dmge, manifest_filepath, datasetId) return manifest_id, manifest @@ -804,7 +804,7 @@ def getProjectManifests(self, projectId: str) -> List[str]: return manifests - def upload_project_manifests_to_synapse(self, DME: DataModelGraphExplorer, projectId: str) -> List[str]: + def upload_project_manifests_to_synapse(self, dmge: DataModelGraphExplorer, projectId: str) -> List[str]: """Upload all metadata manifest files across all datasets in a specified project as tables in Synapse. Returns: String of all the manifest_table_ids of all the manifests that have been loaded. @@ -826,7 +826,7 @@ def upload_project_manifests_to_synapse(self, DME: DataModelGraphExplorer, proje manifest_name = manifest_info["properties"]["name"] manifest_path = manifest_info["path"] manifest_df = load_df(manifest_path) - manifest_table_id = uploadDB(DME=DME, manifest=manifest, datasetId=datasetId, table_name=datasetName) + manifest_table_id = uploadDB(dmge=dmge, manifest=manifest, datasetId=datasetId, table_name=datasetName) manifest_loaded.append(datasetName) return manifest_loaded @@ -849,7 +849,7 @@ def upload_annotated_project_manifests_to_synapse(self, projectId:str, path_to_j graph_data_model = data_model_grapher.generate_data_model_graph() #Instantiate DataModelGraphExplorer - DME = DataModelGraphExplorer(graph_data_model) + dmge = DataModelGraphExplorer(graph_data_model) manifests = [] manifest_loaded = [] @@ -869,7 +869,7 @@ def upload_annotated_project_manifests_to_synapse(self, projectId:str, path_to_j manifest_path = manifest_info["path"] manifest = ((datasetId, datasetName), (manifest_id, manifest_name), ("", "")) if not dry_run: - manifest_syn_id = self.associateMetadataWithFiles(DME, manifest_path, datasetId, manifest_record_type='table') + manifest_syn_id = self.associateMetadataWithFiles(dmge, manifest_path, datasetId, manifest_record_type='table') manifest_loaded.append(manifest) return manifests, manifest_loaded @@ -968,7 +968,7 @@ def get_table_info(self, datasetId: str = None, projectId: str = None) -> List[s @missing_entity_handler def uploadDB(self, - DME: DataModelGraphExplorer, + dmge: DataModelGraphExplorer, manifest: pd.DataFrame, datasetId: str, table_name: str, @@ -980,7 +980,7 @@ def uploadDB(self, Method to upload a database to an asset store. In synapse, this will upload a metadata table Args: - DME: DataModelGraphExplorer object + dmge: DataModelGraphExplorer object manifest: pd.Df manifest to upload datasetId: synID of the dataset for the manifest table_name: name of the table to be uploaded @@ -997,18 +997,18 @@ def uploadDB(self, """ - col_schema, table_manifest = self.formatDB(DME=DME, manifest=manifest, useSchemaLabel=useSchemaLabel) + col_schema, table_manifest = self.formatDB(dmge=dmge, manifest=manifest, useSchemaLabel=useSchemaLabel) - manifest_table_id = self.buildDB(datasetId, table_name, col_schema, table_manifest, table_manipulation, DME, restrict,) + manifest_table_id = self.buildDB(datasetId, table_name, col_schema, table_manifest, table_manipulation, dmge, restrict,) return manifest_table_id, manifest, table_manifest - def formatDB(self, DME, manifest, useSchemaLabel): + def formatDB(self, dmge, manifest, useSchemaLabel): """ Method to format a manifest appropriatly for upload as table Args: - DME: DataModelGraphExplorer object + dmge: DataModelGraphExplorer object manifest: pd.Df manifest to upload useSchemaLabel: bool whether to use schemaLabel (True) or display label (False) @@ -1058,7 +1058,7 @@ def buildDB(self, col_schema: List, table_manifest: pd.DataFrame, table_manipulation: str, - DME: DataModelGraphExplorer, + dmge: DataModelGraphExplorer, restrict: bool = False, ): @@ -1104,7 +1104,7 @@ def buildDB(self, if table_manipulation.lower() == 'replace': manifest_table_id = tableOps.replaceTable(specifySchema = True, columnTypeDict=col_schema,) elif table_manipulation.lower() == 'upsert': - manifest_table_id = tableOps.upsertTable(DME=DME,) + manifest_table_id = tableOps.upsertTable(dmge=dmge,) elif table_manipulation.lower() == 'update': manifest_table_id = tableOps.updateTable() @@ -1148,7 +1148,7 @@ def upload_manifest_file(self, manifest, metadataManifestPath, datasetId, restri return manifest_synapse_file_id @missing_entity_handler - def format_row_annotations(self, DME, row, entityId, hideBlanks): + def format_row_annotations(self, dmge, row, entityId, hideBlanks): # prepare metadata for Synapse storage (resolve display name into a name that Synapse annotations support (e.g no spaces, parenthesis) # note: the removal of special characters, will apply only to annotation keys; we are not altering the manifest # this could create a divergence between manifest column and annotations. this should be ok for most use cases. @@ -1186,7 +1186,7 @@ def format_row_annotations(self, DME, row, entityId, hideBlanks): else: if isinstance(anno_v,float) and np.isnan(anno_v): annos[anno_k] = "" - elif isinstance(anno_v,str) and re.fullmatch(csv_list_regex, anno_v) and rule_in_rule_list('list', DME.get_node_validation_rules(anno_k)): + elif isinstance(anno_v,str) and re.fullmatch(csv_list_regex, anno_v) and rule_in_rule_list('list', dmge.get_node_validation_rules(anno_k)): annos[anno_k] = anno_v.split(",") else: annos[anno_k] = anno_v @@ -1265,7 +1265,7 @@ def annotate_upload_manifest_table(self, manifest, datasetId, metadataManifestPa manifest["entityId"].fillna("", inplace=True) # get a DataModelGraphExplorer object to ensure schema attribute names used in manifest are translated to schema labels for synapse annotations - DME = DataModelGraphExplorer() + dmge = DataModelGraphExplorer() # Create table name here. if 'Component' in manifest.columns: @@ -1275,7 +1275,7 @@ def annotate_upload_manifest_table(self, manifest, datasetId, metadataManifestPa # Upload manifest as a table and get the SynID and manifest manifest_synapse_table_id, manifest, table_manifest = self.upload_format_manifest_table( - DME, manifest, datasetId, table_name, restrict = restrict_manifest, useSchemaLabel=useSchemaLabel,) + dmge, manifest, datasetId, table_name, restrict = restrict_manifest, useSchemaLabel=useSchemaLabel,) # Iterate over manifest rows, create Synapse entities and store corresponding entity IDs in manifest if needed # also set metadata for each synapse entity as Synapse annotations @@ -1335,7 +1335,7 @@ def _read_manifest(self, metadataManifestPath:str) -> pd.DataFrame: ) from err return manifest - def _add_id_columns_to_manifest(self, manifest: pd.DataFrame, DME: DataModelGraphExplorer): + def _add_id_columns_to_manifest(self, manifest: pd.DataFrame, dmge: DataModelGraphExplorer): """Helper function to add id and entityId columns to the manifest if they do not already exist, Fill id values per row. Args: Manifest loaded as a pd.Dataframe @@ -1347,7 +1347,7 @@ def _add_id_columns_to_manifest(self, manifest: pd.DataFrame, DME: DataModelGrap if not col_in_dataframe("Id", manifest): # See if schema has `Uuid` column specified try: - uuid_col_in_schema = DME.is_class_in_schema('Uuid') or DME.is_class_in_schema('uuid') + uuid_col_in_schema = dmge.is_class_in_schema('Uuid') or dmge.is_class_in_schema('uuid') except (KeyError): uuid_col_in_schema = False @@ -1390,10 +1390,10 @@ def _generate_table_name(self, manifest): table_name = 'synapse_storage_manifest_table' return table_name, component_name - def _add_annotations(self, DME, row, entityId, hideBlanks): + def _add_annotations(self, dmge, row, entityId, hideBlanks): """Helper function to format and add annotations to entities in Synapse. Args: - DME: DataModelGraphExplorer object, + dmge: DataModelGraphExplorer object, row: current row of manifest being processed entityId (str): synapseId of entity to add annotations to hideBlanks: Boolean flag that does not upload annotation keys with blank values when true. Uploads Annotation keys with empty string values when false. @@ -1401,7 +1401,7 @@ def _add_annotations(self, DME, row, entityId, hideBlanks): Annotations are added to entities in Synapse, no return. """ # Format annotations for Synapse - annos = self.format_row_annotations(DME, row, entityId, hideBlanks) + annos = self.format_row_annotations(dmge, row, entityId, hideBlanks) if annos: # Store annotations for an entity folder @@ -1429,7 +1429,7 @@ def _create_entity_id(self, idx, row, manifest, datasetId): def add_annotations_to_entities_files( self, - DME, + dmge, manifest, manifest_record_type, datasetId, @@ -1438,7 +1438,7 @@ def add_annotations_to_entities_files( ): '''Depending on upload type add Ids to entityId row. Add anotations to connected files. Args: - DME: DataModelGraphExplorer Object + dmge: DataModelGraphExplorer Object manifest (pd.DataFrame): loaded df containing user supplied data. manifest_record_type: valid values are 'entity', 'table' or 'both'. Specifies whether to create entity ids and folders for each row in a manifest, a Synapse table to house the entire manifest or do both. datasetId (str): synapse ID of folder containing the dataset @@ -1475,13 +1475,13 @@ def add_annotations_to_entities_files( # Adding annotations to connected files. if entityId: - self._add_annotations(DME, row, entityId, hideBlanks) + self._add_annotations(dmge, row, entityId, hideBlanks) logger.info(f"Added annotations to entity: {entityId}") return manifest def upload_manifest_as_table( self, - DME, + dmge, manifest, metadataManifestPath, datasetId, @@ -1495,7 +1495,7 @@ def upload_manifest_as_table( ): """Upload manifest to Synapse as a table and csv. Args: - DME: DataModelGraphExplorer object + dmge: DataModelGraphExplorer object manifest (pd.DataFrame): loaded df containing user supplied data. metadataManifestPath: path to csv containing a validated metadata manifest. datasetId (str): synapse ID of folder containing the dataset @@ -1510,7 +1510,7 @@ def upload_manifest_as_table( """ # Upload manifest as a table, get the ID and updated manifest. manifest_synapse_table_id, manifest, table_manifest = self.uploadDB( - DME=DME, + dmge=dmge, manifest=manifest, datasetId=datasetId, table_name=table_name, @@ -1518,7 +1518,7 @@ def upload_manifest_as_table( useSchemaLabel=useSchemaLabel, table_manipulation=table_manipulation) - manifest = self.add_annotations_to_entities_files(DME, manifest, manifest_record_type, datasetId, hideBlanks, manifest_synapse_table_id) + manifest = self.add_annotations_to_entities_files(dmge, manifest, manifest_record_type, datasetId, hideBlanks, manifest_synapse_table_id) # Load manifest to synapse as a CSV File manifest_synapse_file_id = self.upload_manifest_file(manifest, metadataManifestPath, datasetId, restrict, component_name = component_name) @@ -1529,7 +1529,7 @@ def upload_manifest_as_table( # Update manifest Synapse table with new entity id column. manifest_synapse_table_id, manifest, table_manifest = self.uploadDB( - DME=DME, + dmge=dmge, manifest=manifest, datasetId=datasetId, table_name=table_name, @@ -1544,7 +1544,7 @@ def upload_manifest_as_table( def upload_manifest_as_csv( self, - DME, + dmge, manifest, metadataManifestPath, datasetId, @@ -1554,7 +1554,7 @@ def upload_manifest_as_csv( component_name): """Upload manifest to Synapse as a csv only. Args: - DME: DataModelGraphExplorer object + dmge: DataModelGraphExplorer object manifest (pd.DataFrame): loaded df containing user supplied data. metadataManifestPath: path to csv containing a validated metadata manifest. datasetId (str): synapse ID of folder containing the dataset @@ -1567,7 +1567,7 @@ def upload_manifest_as_csv( manifest_synapse_file_id (str): SynID of manifest csv uploaded to synapse. """ - manifest = self.add_annotations_to_entities_files(DME, manifest, manifest_record_type, datasetId, hideBlanks) + manifest = self.add_annotations_to_entities_files(dmge, manifest, manifest_record_type, datasetId, hideBlanks) # Load manifest to synapse as a CSV File manifest_synapse_file_id = self.upload_manifest_file(manifest, @@ -1583,7 +1583,7 @@ def upload_manifest_as_csv( def upload_manifest_combo( self, - DME, + dmge, manifest, metadataManifestPath, datasetId, @@ -1597,7 +1597,7 @@ def upload_manifest_combo( ): """Upload manifest to Synapse as a table and CSV with entities. Args: - DME: DataModelGraphExplorer object + dmge: DataModelGraphExplorer object manifest (pd.DataFrame): loaded df containing user supplied data. metadataManifestPath: path to csv containing a validated metadata manifest. datasetId (str): synapse ID of folder containing the dataset @@ -1612,7 +1612,7 @@ def upload_manifest_combo( manifest_synapse_file_id (str): SynID of manifest csv uploaded to synapse. """ manifest_synapse_table_id, manifest, table_manifest = self.uploadDB( - DME=DME, + dmge=dmge, manifest=manifest, datasetId=datasetId, table_name=table_name, @@ -1620,7 +1620,7 @@ def upload_manifest_combo( useSchemaLabel=useSchemaLabel, table_manipulation=table_manipulation) - manifest = self.add_annotations_to_entities_files(DME, manifest, manifest_record_type, datasetId, hideBlanks, manifest_synapse_table_id) + manifest = self.add_annotations_to_entities_files(dmge, manifest, manifest_record_type, datasetId, hideBlanks, manifest_synapse_table_id) # Load manifest to synapse as a CSV File manifest_synapse_file_id = self.upload_manifest_file(manifest, metadataManifestPath, datasetId, restrict, component_name) @@ -1632,7 +1632,7 @@ def upload_manifest_combo( # Update manifest Synapse table with new entity id column. manifest_synapse_table_id, manifest, table_manifest = self.uploadDB( - DME=DME, + dmge=dmge, manifest=manifest, datasetId=datasetId, table_name=table_name, @@ -1646,7 +1646,7 @@ def upload_manifest_combo( return manifest_synapse_file_id def associateMetadataWithFiles( - self, DME: DataModelGraphExplorer, metadataManifestPath: str, datasetId: str, manifest_record_type: str = 'table_file_and_entities', + self, dmge: DataModelGraphExplorer, metadataManifestPath: str, datasetId: str, manifest_record_type: str = 'table_file_and_entities', useSchemaLabel: bool = True, hideBlanks: bool = False, restrict_manifest = False, table_manipulation: str = 'replace', ) -> str: """Associate metadata with files in a storage dataset already on Synapse. @@ -1661,7 +1661,7 @@ def associateMetadataWithFiles( for downstream query and interaction with the data. Args: - DME: DataModelGraphExplorer Object + dmge: DataModelGraphExplorer Object metadataManifestPath: path to csv containing a validated metadata manifest. The manifest should include a column entityId containing synapse IDs of files/entities to be associated with metadata, if that is applicable to the dataset type. Some datasets, e.g. clinical data, do not contain file id's, but data is stored in a table: one row per item. @@ -1677,7 +1677,7 @@ def associateMetadataWithFiles( """ # Read new manifest CSV: manifest = self._read_manifest(metadataManifestPath) - manifest = self._add_id_columns_to_manifest(manifest, DME) + manifest = self._add_id_columns_to_manifest(manifest, dmge) table_name, component_name = self._generate_table_name(manifest) @@ -1685,7 +1685,7 @@ def associateMetadataWithFiles( if manifest_record_type == "file_only": manifest_synapse_file_id = self.upload_manifest_as_csv( - DME, + dmge, manifest, metadataManifestPath, datasetId=datasetId, @@ -1696,7 +1696,7 @@ def associateMetadataWithFiles( ) elif manifest_record_type == "table_and_file": manifest_synapse_file_id = self.upload_manifest_as_table( - DME, + dmge, manifest, metadataManifestPath, datasetId=datasetId, @@ -1710,7 +1710,7 @@ def associateMetadataWithFiles( ) elif manifest_record_type == "file_and_entities": manifest_synapse_file_id = self.upload_manifest_as_csv( - DME, + dmge, manifest, metadataManifestPath, datasetId=datasetId, @@ -1721,7 +1721,7 @@ def associateMetadataWithFiles( ) elif manifest_record_type == "table_file_and_entities": manifest_synapse_file_id = self.upload_manifest_combo( - DME, + dmge, manifest, metadataManifestPath, datasetId=datasetId, @@ -2207,7 +2207,7 @@ def _get_auth_token(self,): return authtoken - def upsertTable(self, DME: DataModelGraphExplorer): + def upsertTable(self, dmge: DataModelGraphExplorer): """ Method to upsert rows from a new manifest into an existing table on synapse For upsert functionality to work, primary keys must follow the naming convention of _id @@ -2216,7 +2216,7 @@ def upsertTable(self, DME: DataModelGraphExplorer): Args: - DME: DataModelGraphExplorer instance + dmge: DataModelGraphExplorer instance Returns: existingTableId: synID of the already existing table that had its metadata replaced @@ -2232,7 +2232,7 @@ def upsertTable(self, DME: DataModelGraphExplorer): except(SynapseHTTPError) as ex: # If error is raised because Table has old `Uuid` column and not new `Id` column, then handle and re-attempt upload if 'Id is not a valid column name or id' in str(ex): - self._update_table_uuid_column(DME) + self._update_table_uuid_column(dmge) synapseDB.upsert_table_rows(table_name=self.tableName, data=self.tableToLoad) # Raise if other error else: @@ -2240,12 +2240,12 @@ def upsertTable(self, DME: DataModelGraphExplorer): return self.existingTableId - def _update_table_uuid_column(self, DME: DataModelGraphExplorer,) -> None: + def _update_table_uuid_column(self, dmge: DataModelGraphExplorer,) -> None: """Removes the `Uuid` column when present, and relpaces with an `Id` column Used to enable backwards compatability for manifests using the old `Uuid` convention Args: - DME: DataModelGraphExplorer instance + dmge: DataModelGraphExplorer instance Returns: None @@ -2260,7 +2260,7 @@ def _update_table_uuid_column(self, DME: DataModelGraphExplorer,) -> None: if col.name.lower() == 'uuid': # See if schema has `Uuid` column specified try: - uuid_col_in_schema = DME.is_class_in_schema(col.name) + uuid_col_in_schema = dmge.is_class_in_schema(col.name) except (KeyError): uuid_col_in_schema = False diff --git a/schematic/visualization/attributes_explorer.py b/schematic/visualization/attributes_explorer.py index 704fc1f4c..0917172dd 100644 --- a/schematic/visualization/attributes_explorer.py +++ b/schematic/visualization/attributes_explorer.py @@ -36,7 +36,7 @@ def __init__(self, self.graph_data_model = data_model_grapher.generate_data_model_graph() # Instantiate Data Model Graph Explorer - self.DME = DataModelGraphExplorer(self.graph_data_model) + self.dmge = DataModelGraphExplorer(self.graph_data_model) # Instantiate Data Model Json Schema self.data_model_js = DataModelJSONSchema(jsonld_path=self.path_to_jsonld, graph=self.graph_data_model) @@ -80,7 +80,7 @@ def parse_attributes(self, save_file=True): ''' # get all components - component_dg = self.DME.get_digraph_by_edge_type('requiresComponent') + component_dg = self.dmge.get_digraph_by_edge_type('requiresComponent') components = component_dg.nodes() # For each data type to be loaded gather all attribtes the user would diff --git a/schematic/visualization/tangled_tree.py b/schematic/visualization/tangled_tree.py index 2a5970e05..03bffd825 100644 --- a/schematic/visualization/tangled_tree.py +++ b/schematic/visualization/tangled_tree.py @@ -56,7 +56,7 @@ def __init__(self, self.graph_data_model = data_model_grapher.generate_data_model_graph() # Instantiate Data Model Graph Explorer - self.DME = DataModelGraphExplorer(self.graph_data_model) + self.dmge = DataModelGraphExplorer(self.graph_data_model) # Set Parameters self.figure_type = figure_type.lower() @@ -92,14 +92,14 @@ def get_text_for_tangled_tree(self, text_type, save_file=False): save_file==False: Returns plain or highlighted text as a csv string. ''' # Get nodes in the digraph, many more nodes returned if figure type is dependency - cdg = self.DME.get_digraph_by_edge_type(self.dependency_type) + cdg = self.dmge.get_digraph_by_edge_type(self.dependency_type) nodes = cdg.nodes() if self.dependency_type == 'requiresComponent': component_nodes = nodes else: # get component nodes if making dependency figure - component_dg = self.DME.get_digraph_by_edge_type('requiresComponent') + component_dg = self.dmge.get_digraph_by_edge_type('requiresComponent') component_nodes = component_dg.nodes() # Initialize lists @@ -110,7 +110,7 @@ def get_text_for_tangled_tree(self, text_type, save_file=False): for node in component_nodes: # Get the highlighted components based on figure_type if self.figure_type == 'component': - highlight_descendants = self.DME.get_descendants_by_edge_type(node, 'requiresComponent') + highlight_descendants = self.dmge.get_descendants_by_edge_type(node, 'requiresComponent') elif self.figure_type == 'dependency': highlight_descendants = [node] @@ -151,13 +151,13 @@ def get_topological_generations(self): edges: (Networkx EdgeDataView) Edges of component or dependency graph. When iterated over it works like a list of tuples. ''' # Get nodes in the digraph - digraph = self.DME.get_digraph_by_edge_type(self.dependency_type) + digraph = self.dmge.get_digraph_by_edge_type(self.dependency_type) nodes = digraph.nodes() # Get subgraph #mm_graph = self.sg.se.get_nx_schema() #subg = self.sg.get_subgraph_by_edge_type(mm_graph, self.dependency_type) - subg = self.DME.get_subgraph_by_edge_type(self.dependency_type) + subg = self.dmge.get_subgraph_by_edge_type(self.dependency_type) # Get edges and topological_gen based on figure type. if self.figure_type == 'component': @@ -230,7 +230,7 @@ def gather_component_dependency_info(self, cn, attributes_df): ''' # Gather all component dependency information - component_attributes = self.DME.get_descendants_by_edge_type( + component_attributes = self.dmge.get_descendants_by_edge_type( cn, self.dependency_type, connected=True @@ -738,7 +738,7 @@ def get_ancestors_nodes(self, subgraph, components): """ all_parent_children = {} for component in components: - all_ancestors = self.DME.get_nodes_ancestors(subgraph=subgraph, node_label=component) + all_ancestors = self.dmge.get_nodes_ancestors(subgraph=subgraph, node_label=component) all_parent_children[component] = all_ancestors return all_parent_children @@ -779,7 +779,7 @@ def get_tangled_tree_layers(self, save_file=True): if self.figure_type == 'dependency': # Get component digraph and nodes. - component_dg = self.DME.get_digraph_by_edge_type('requiresComponent') + component_dg = self.dmge.get_digraph_by_edge_type('requiresComponent') component_nodes = component_dg.nodes() # Get table of attributes. diff --git a/schematic_api/api/routes.py b/schematic_api/api/routes.py index 42400e679..30b1984e9 100644 --- a/schematic_api/api/routes.py +++ b/schematic_api/api/routes.py @@ -335,8 +335,8 @@ def create_single_manifest(data_type, title, dataset_id=None, output_format=None # Gather all returned result urls all_results = [] if data_type[0] == 'all manifests': - DME = DataModelGraphExplorer(graph_data_model) - component_digraph = DME.get_digraph_by_edge_type('requiresComponent') + dmge = DataModelGraphExplorer(graph_data_model) + component_digraph = dmge.get_digraph_by_edge_type('requiresComponent') components = component_digraph.nodes() for component in components: if title: @@ -767,10 +767,10 @@ def get_subgraph_by_edge_type(schema_url, relationship): # Generate graph graph_data_model = data_model_grapher.generate_data_model_graph() - DME = DataModelGraphExplorer(graph_data_model) + dmge = DataModelGraphExplorer(graph_data_model) # relationship subgraph - relationship_subgraph = DME.get_subgraph_by_edge_type(relationship) + relationship_subgraph = dmge.get_subgraph_by_edge_type(relationship) # return relationship Arr = [] for t in relationship_subgraph.edges: @@ -791,10 +791,10 @@ def find_class_specific_properties(schema_url, schema_class): # Generate graph graph_data_model = data_model_grapher.generate_data_model_graph() - DME = DataModelGraphExplorer(graph_data_model) + dmge = DataModelGraphExplorer(graph_data_model) # return properties - properties = DME.find_class_specific_properties(schema_class) + properties = dmge.find_class_specific_properties(schema_class) return properties @@ -832,9 +832,9 @@ def get_node_dependencies( # Generate graph graph_data_model = data_model_grapher.generate_data_model_graph() - DME = DataModelGraphExplorer(graph_data_model) + dmge = DataModelGraphExplorer(graph_data_model) - dependencies = DME.get_node_dependencies( + dependencies = dmge.get_node_dependencies( source_node, return_display_names, return_schema_ordered ) return dependencies @@ -885,9 +885,9 @@ def get_node_range( # Generate graph graph_data_model = data_model_grapher.generate_data_model_graph() - DME = DataModelGraphExplorer(graph_data_model) + dmge = DataModelGraphExplorer(graph_data_model) - node_range = DME.get_node_range(node_label, return_display_names) + node_range = dmge.get_node_range(node_label, return_display_names) return node_range def get_if_node_required(schema_url: str, node_display_name: str) -> bool: @@ -911,9 +911,9 @@ def get_if_node_required(schema_url: str, node_display_name: str) -> bool: # Generate graph graph_data_model = data_model_grapher.generate_data_model_graph() - DME = DataModelGraphExplorer(graph_data_model) + dmge = DataModelGraphExplorer(graph_data_model) - is_required = DME.get_node_required(node_display_name) + is_required = dmge.get_node_required(node_display_name) return is_required @@ -938,9 +938,9 @@ def get_node_validation_rules(schema_url: str, node_display_name: str) -> list: graph_data_model = data_model_grapher.generate_data_model_graph() #Instantiate DataModelGraphExplorer - DME = DataModelGraphExplorer(graph_data_model) + dmge = DataModelGraphExplorer(graph_data_model) - node_validation_rules = DME.get_node_validation_rules(node_display_name) + node_validation_rules = dmge.get_node_validation_rules(node_display_name) return node_validation_rules @@ -968,9 +968,9 @@ def get_nodes_display_names(schema_url: str, node_list: list[str]) -> list: graph_data_model = data_model_grapher.generate_data_model_graph() #Instantiate DataModelGraphExplorer - DME = DataModelGraphExplorer(graph_data_model) + dmge = DataModelGraphExplorer(graph_data_model) - node_display_names = DME.get_nodes_display_names(node_list) + node_display_names = dmge.get_nodes_display_names(node_list) return node_display_names def get_schematic_version() -> str: diff --git a/tests/test_api.py b/tests/test_api.py index bc9b9137f..05c88aaec 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -87,9 +87,9 @@ def get_MockComponent_attribute(): # Generate graph graph_data_model = data_model_grapher.generate_data_model_graph() - DME = DataModelGraphExplorer(graph_data_model) + dmge = DataModelGraphExplorer(graph_data_model) #sg = SchemaGenerator("https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.single_rule.model.jsonld") - attributes=DME.get_node_dependencies('MockComponent') + attributes=dmge.get_node_dependencies('MockComponent') attributes.remove('Component') for MockComponent_attribute in attributes: diff --git a/tests/test_manifest.py b/tests/test_manifest.py index 4196fd795..07876b0fa 100644 --- a/tests/test_manifest.py +++ b/tests/test_manifest.py @@ -128,7 +128,7 @@ def test_init(self, helpers): assert type(generator.title) is str # assert generator.sheet_service == mock_creds["sheet_service"] assert generator.root is "Patient" - assert type(generator.DME) is DataModelGraphExplorer + assert type(generator.dmge) is DataModelGraphExplorer @pytest.mark.parametrize("data_type, exc, exc_message", [("MissingComponent", LookupError, "could not be found in the data model schema"), diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 08c5b89cc..fbf8faf47 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -1183,7 +1183,7 @@ def test_init(self, helpers, data_model): assert type(data_model_jsonld.graph) == nx.MultiDiGraph assert type(data_model_jsonld.rel_dict) == dict assert "required" in data_model_jsonld.rel_dict - assert type(data_model_jsonld.DME) == DataModelGraphExplorer + assert type(data_model_jsonld.dmge) == DataModelGraphExplorer assert data_model_jsonld.output_path == "" def test_base_jsonld_template(self, helpers): diff --git a/tests/test_store.py b/tests/test_store.py index e953d7092..5896b0ef7 100644 --- a/tests/test_store.py +++ b/tests/test_store.py @@ -172,7 +172,7 @@ def test_annotation_submission(self, synapse_store, helpers, manifest_path, test graph_data_model = data_model_grapher.generate_data_model_graph() # Instantiate DataModelGraphExplorer - DME = DataModelGraphExplorer(graph_data_model) + dmge = DataModelGraphExplorer(graph_data_model) try: for attempt in Retrying( @@ -182,7 +182,7 @@ def test_annotation_submission(self, synapse_store, helpers, manifest_path, test ): with attempt: manifest_id = synapse_store.associateMetadataWithFiles( - DME = DME, + dmge = dmge, metadataManifestPath = helpers.get_data_path(manifest_path), datasetId = datasetId, manifest_record_type = manifest_record_type, @@ -431,11 +431,11 @@ def test_createTable(self, helpers, synapse_store, config: Configuration, projec graph_data_model = data_model_grapher.generate_data_model_graph() # Instantiate DataModelGraphExplorer - DME = DataModelGraphExplorer(graph_data_model) + dmge = DataModelGraphExplorer(graph_data_model) # updating file view on synapse takes a long time manifestId = synapse_store.associateMetadataWithFiles( - DME = DME, + dmge = dmge, metadataManifestPath = helpers.get_data_path(manifest_path), datasetId = datasetId, manifest_record_type = 'table_and_file', @@ -483,11 +483,11 @@ def test_replaceTable(self, helpers, synapse_store, config: Configuration, proje graph_data_model = data_model_grapher.generate_data_model_graph() # Instantiate DataModelGraphExplorer - DME = DataModelGraphExplorer(graph_data_model) + dmge = DataModelGraphExplorer(graph_data_model) # updating file view on synapse takes a long time manifestId = synapse_store.associateMetadataWithFiles( - DME = DME, + dmge = dmge, metadataManifestPath = helpers.get_data_path(manifest_path), datasetId = datasetId, manifest_record_type = 'table_and_file', @@ -509,7 +509,7 @@ def test_replaceTable(self, helpers, synapse_store, config: Configuration, proje # Associate replacement manifest with files manifestId = synapse_store.associateMetadataWithFiles( - DME = DME, + dmge = dmge, metadataManifestPath = helpers.get_data_path(replacement_manifest_path), datasetId = datasetId, manifest_record_type = 'table_and_file', @@ -562,11 +562,11 @@ def test_upsertTable(self, helpers, synapse_store, config:Configuration, project graph_data_model = data_model_grapher.generate_data_model_graph() # Instantiate DataModelGraphExplorer - DME = DataModelGraphExplorer(graph_data_model) + dmge = DataModelGraphExplorer(graph_data_model) # updating file view on synapse takes a long time manifestId = synapse_store.associateMetadataWithFiles( - DME = DME, + dmge = dmge, metadataManifestPath = helpers.get_data_path(manifest_path), datasetId = datasetId, manifest_record_type = 'table_and_file', @@ -592,7 +592,7 @@ def test_upsertTable(self, helpers, synapse_store, config:Configuration, project # Associate new manifest with files manifestId = synapse_store.associateMetadataWithFiles( - DME = DME, + dmge = dmge, metadataManifestPath = helpers.get_data_path(replacement_manifest_path), datasetId = datasetId, manifest_record_type = 'table_and_file', From 617c4c130dcac5d8c1bd9f4d01a2ee76cb2eb843 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Thu, 19 Oct 2023 15:08:19 -0700 Subject: [PATCH 197/239] fix issue where .jsonld was doubled when converting jsonld to jsonld --- schematic/schemas/commands.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/schematic/schemas/commands.py b/schematic/schemas/commands.py index 3707d7409..80700e2bf 100644 --- a/schematic/schemas/commands.py +++ b/schematic/schemas/commands.py @@ -101,12 +101,15 @@ def convert(schema, output_jsonld): # output JSON-LD file alongside CSV file by default, get path. if output_jsonld is None: - csv_no_ext = re.sub("[.]csv$", "", schema) - output_jsonld = csv_no_ext + ".jsonld" + if not '.jsonld' in schema: + csv_no_ext = re.sub("[.]csv$", "", schema) + output_jsonld = csv_no_ext + ".jsonld" + else: + output_jsonld = schema logger.info( "By default, the JSON-LD output will be stored alongside the first " - f"input CSV file. In this case, it will appear here: '{output_jsonld}'. " + f"input CSV or JSON-LD file. In this case, it will appear here: '{output_jsonld}'. " "You can use the `--output_jsonld` argument to specify another file path." ) From d985fb411d300a2d3b194b1297da559452435a95 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Thu, 19 Oct 2023 15:54:05 -0700 Subject: [PATCH 198/239] fix construction of attr:rel dict when parsing JSONLD so that ispartOf is made correctly --- schematic/schemas/data_model_parser.py | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/schematic/schemas/data_model_parser.py b/schematic/schemas/data_model_parser.py index 2134dc7f1..74068c180 100644 --- a/schematic/schemas/data_model_parser.py +++ b/schematic/schemas/data_model_parser.py @@ -320,9 +320,10 @@ def gather_jsonld_attributes_relationships(self, model_jsonld: List[dict]) -> Di # Go through each defined relationship type (rel_key) and its attributes (rel_vals) for rel_key, rel_vals in self.rel_dict.items(): # Determine if current entry in the for loop, can be described by the current relationship that is being cycled through. + # used to also check "csv_header" in rel_vals.keys() which allows all JSONLD values through even if it does not have a CSV counterpart, will allow other values thorough in the else statement now if ( rel_vals["jsonld_key"] in entry.keys() - and "csv_header" in rel_vals.keys() + and rel_vals["csv_header"] ): # Retrieve entry value associated with the given relationship rel_entry = entry[rel_vals["jsonld_key"]] @@ -335,6 +336,21 @@ def gather_jsonld_attributes_relationships(self, model_jsonld: List[dict]) -> Di attr_rel_dictionary[attr_key]["Relationships"].update( {self.rel_dict[rel_key]["csv_header"]: parsed_rel_entry} ) + elif ( + rel_vals["jsonld_key"] in entry.keys() + and not rel_vals["csv_header"] + ): + # Retrieve entry value associated with the given relationship + rel_entry = entry[rel_vals["jsonld_key"]] + # If there is an entry parset it by type and add to the attr:relationships dictionary. + if rel_entry: + parsed_rel_entry = self.parse_entry( + rel_entry=rel_entry, id_jsonld_key=id_jsonld_key + ) + # Add relationships for each attribute and relationship to the dictionary + attr_rel_dictionary[attr_key]["Relationships"].update( + {rel_key: parsed_rel_entry} + ) return attr_rel_dictionary def parse_jsonld_model( From 2d6ae20ab56dab28c2d31f1958da9e9bd4d02710 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 23 Oct 2023 20:39:48 -0700 Subject: [PATCH 199/239] update data model edges to update a list instead of the graph directly --- schematic/schemas/data_model_edges.py | 37 ++++++++++++++------------- 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/schematic/schemas/data_model_edges.py b/schematic/schemas/data_model_edges.py index 5acbb5e7b..6cec66c85 100644 --- a/schematic/schemas/data_model_edges.py +++ b/schematic/schemas/data_model_edges.py @@ -10,12 +10,12 @@ def __init__(self): def generate_edge( self, - G: nx.MultiDiGraph, node: str, all_node_dict: dict, attr_rel_dict: dict, edge_relationships: dict, - ) -> nx.MultiDiGraph: + edge_list:list, + ) -> list(tuple(str, str, dict{str:str, str:int})): """Generate an edge between a target node and relevant other nodes the data model. In short, does this current node belong to a recorded relationship in the attribute, relationshps dictionary. Go through each attribute and relationship to find where the node may be. Args: G, nx.MultiDiGraph: networkx graph representation of the data model, that is in the process of being fully built. At this point, all the nodes would have been added, and edges are being added per target node. @@ -28,9 +28,11 @@ def generate_edge( Relationships: { CSV Header: Value}}} edge_relationships: dict, rel_key: csv_header if the key represents a value relationship. - + edge_list: list(tuple), list of tuples describing the edges and the edge attributes, organized as (node_1, node_2, {key:edge_relationship_key, weight:int}) + At this point, the edge list will be in the process of being built. Adding edges from list so they will be added properly to the graph without being overwritten in the loop, and passing the Graph around more. Returns: - G, nx.MultiDiGraph: networkx graph representation of the data model, that has had new edges attached. + edge_list: list(tuple), list of tuples describing the edges and the edge attributes, organized as (node_1, node_2, {key:edge_relationship_key, weight:int}) + At this point, the edge list will have additional edges added related to the current node. """ # For each attribute in the model. for attribute_display_name, relationship in attr_rel_dict.items(): @@ -65,26 +67,25 @@ def generate_edge( # Add edges, in a manner that preserves directionality # TODO: rewrite to use edge_dir if rel_key in ["subClassOf", "domainIncludes"]: - G.add_edge( + edge_list.append(( all_node_dict[node]["label"], all_node_dict[attribute_display_name]["label"], - key=edge_key, - weight=weight, - ) + {'key':edge_key, + 'weight':weight,}) + ) else: - G.add_edge( + edge_list.append(( all_node_dict[attribute_display_name]["label"], all_node_dict[node]["label"], - key=edge_key, - weight=weight, - ) + {'key':edge_key, + 'weight':weight},) + ) # Add add rangeIncludes/valid value relationships in reverse as well, making the attribute the parent of the valid value. if rel_key == "rangeIncludes": - G.add_edge( + edge_list.append(( all_node_dict[attribute_display_name]["label"], all_node_dict[node]["label"], - key="parentOf", - weight=weight, - ) - - return G + {'key':"parentOf", + 'weight':weight},) + ) + return edge_list From 85ae4fad6733f6dd18f2c4f1ee443eccc84be6d3 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 23 Oct 2023 22:35:28 -0700 Subject: [PATCH 200/239] add timing to cycle finding so that it will move on if too many cycles are found --- schematic/schemas/data_model_validator.py | 39 ++++++++++++++++------- 1 file changed, 28 insertions(+), 11 deletions(-) diff --git a/schematic/schemas/data_model_validator.py b/schematic/schemas/data_model_validator.py index 40911e6a9..e3d626882 100644 --- a/schematic/schemas/data_model_validator.py +++ b/schematic/schemas/data_model_validator.py @@ -1,8 +1,11 @@ +import logging +import multiprocessing import networkx as nx +import time from typing import Any, Dict, Optional, Text, List, Tuple from schematic.schemas.data_model_relationships import DataModelRelationships - +logger = logging.getLogger(__name__) class DataModelValidator: """ @@ -69,6 +72,14 @@ def check_graph_has_required_node_fields(self) -> List[str]: ) return error + def run_cycles(self, graph): + cycles = nx.simple_cycles(self.graph) + if cycles: + for cycle in cycles: + logger.warning( + f"Schematic requires models be a directed acyclic graph (DAG). Your graph is not a DAG, we found a loop between: {cycle[0]} and {cycle[1]}, please remove this loop from your model and submit again." + ) + def check_is_dag(self) -> List[str]: """Check that generated graph is a directed acyclic graph Returns: @@ -76,17 +87,23 @@ def check_is_dag(self) -> List[str]: """ error = [] if not nx.is_directed_acyclic_graph(self.graph): - # Attempt to find any cycles: - cycles = nx.simple_cycles(self.graph) - if cycles: - for cycle in cycles: - error.append( - f"Schematic requires models be a directed acyclic graph (DAG). Your graph is not a DAG, we found a loop between: {cycle[0]} and {cycle[1]}, please remove this loop from your model and submit again." - ) - else: - error.append( - f"Schematic requires models be a directed acyclic graph (DAG). Your graph is not a DAG, we could not locate the sorce of the error, please inspect your model." + cycles = multiprocessing.Process(target=self.run_cycles, name="Get Cycles", args=(self.graph,)) + cycles.start() + + # Give up to 5 seconds to find cycles, if not exit and issue standard error + time.sleep(5) + + # If thread is active + if cycles.is_alive(): + # Terminate foo + cycles.terminate() + # Cleanup + cycles.join() + + error.append( + f"Schematic requires models be a directed acyclic graph (DAG). Please inspect your model." ) + return error def check_blacklisted_characters(self) -> List[str]: From f57b31aa818e1ca81b2340a03b44bb62185aaf43 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 23 Oct 2023 22:36:09 -0700 Subject: [PATCH 201/239] fix typing error in data_model_edges --- schematic/schemas/data_model_edges.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic/schemas/data_model_edges.py b/schematic/schemas/data_model_edges.py index 6cec66c85..7abbc26a8 100644 --- a/schematic/schemas/data_model_edges.py +++ b/schematic/schemas/data_model_edges.py @@ -15,7 +15,7 @@ def generate_edge( attr_rel_dict: dict, edge_relationships: dict, edge_list:list, - ) -> list(tuple(str, str, dict{str:str, str:int})): + ) -> list[tuple[str, str, dict[str:str, str:int]]]: """Generate an edge between a target node and relevant other nodes the data model. In short, does this current node belong to a recorded relationship in the attribute, relationshps dictionary. Go through each attribute and relationship to find where the node may be. Args: G, nx.MultiDiGraph: networkx graph representation of the data model, that is in the process of being fully built. At this point, all the nodes would have been added, and edges are being added per target node. From 9b77be90a7f71e6a12e4ed03db2243404189d7e2 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 23 Oct 2023 22:42:04 -0700 Subject: [PATCH 202/239] fix issue with get_ordered_entry --- schematic/schemas/data_model_graph.py | 31 ++++++++++++++++++++++++--- 1 file changed, 28 insertions(+), 3 deletions(-) diff --git a/schematic/schemas/data_model_graph.py b/schematic/schemas/data_model_graph.py index e63cd4137..f5946d35c 100644 --- a/schematic/schemas/data_model_graph.py +++ b/schematic/schemas/data_model_graph.py @@ -1,3 +1,4 @@ +from copy import deepcopy import graphviz import logging from typing import Any, Dict, Optional, Text @@ -94,16 +95,22 @@ def generate_data_model_graph(self) -> nx.MultiDiGraph: # Generate node and attach information (attributes) to each node G = self.dmn.generate_node(G, node_dict) + edge_list = [] ## Connect nodes via edges for node in all_nodes: # Generate edges - G = self.dme.generate_edge( - G, + edge_list_2 = self.dme.generate_edge( node, all_node_dict, self.attribute_relationships_dict, edge_relationships, + edge_list, ) + edge_list = edge_list_2.copy() + + # Add edges to the Graph + for node_1, node_2, edge_dict in edge_list: + G.add_edge(node_1, node_2, key=edge_dict['key'], weight=edge_dict['weight']) return G @@ -357,7 +364,24 @@ def get_ordered_entry(self, key: str, source_node_label: str) -> list[str]: ) edge_key = self.rel_dict[key]["edge_key"] - if self.rel_dict[key]["jsonld_direction"] == "out": + + # Get edge keys for domain includes and subclassOf + domainIncludes_edge_key = self.rel_dict['domainIncludes']['edge_key'] + subclassOf_edge_key = self.rel_dict['subClassOf']['edge_key'] + + # Order lists when they are part of subclassOf or domainIncludes + if edge_key in [domainIncludes_edge_key, subclassOf_edge_key]: + original_edge_weights_dict = { + attached_node: self.graph[source_node][attached_node][edge_key][ + "weight" + ] + for source_node, attached_node in self.graph.out_edges( + source_node_label + ) + if edge_key in self.graph[source_node][attached_node] + } + # Handle out edges + elif self.rel_dict[key]["jsonld_direction"] == "out": # use outedges original_edge_weights_dict = { @@ -369,6 +393,7 @@ def get_ordered_entry(self, key: str, source_node_label: str) -> list[str]: ) if edge_key in self.graph[source_node][attached_node] } + # Handle in edges else: # use inedges original_edge_weights_dict = { From 459120ad1637aa26570d8650b6ea11d92371c434 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 23 Oct 2023 23:13:28 -0700 Subject: [PATCH 203/239] WIP: fix domain includes handling, contains subclass off issue --- schematic/schemas/data_model_jsonld.py | 60 +++++++++++++++++++++++++- 1 file changed, 58 insertions(+), 2 deletions(-) diff --git a/schematic/schemas/data_model_jsonld.py b/schematic/schemas/data_model_jsonld.py index d4e9c98c8..5363e7ffc 100644 --- a/schematic/schemas/data_model_jsonld.py +++ b/schematic/schemas/data_model_jsonld.py @@ -134,6 +134,28 @@ def get_edges_associated_with_node( node_edges.extend(list(self.graph.out_edges(node, data=True))) return node_edges + def get_edges_associated_with_property_nodes( + self, node:str + ) -> List[tuple[str, str, dict[str, int]]]: + """Get edges associated with property nodes to make sure we add that relationship. + Args: + node, str: Label of node property in the graph to look for assiciated edges + Returns: + node_edges, list: List of Tuples of edges associated with the given node, tuple contains the two nodes, plus the weight dict associated with the edge connection. + """ + # Get edge keys for domainIncludes and subclassOf + domainIncludes_edge_key = self.rel_dict['domainIncludes']['edge_key'] + node_edges = [] + # Get dict of edges for the current property node + node_edges_dict = self.graph[node] + for node_2, edge_dict in node_edges_dict.items(): + # Look through relationships in the edge dictionary + for edge_key in edge_dict: + # If the edge is a property or subclass then add the edges to the list + if edge_key in [domainIncludes_edge_key]: + node_edges.append((node, node_2, edge_dict[edge_key])) + return node_edges + def add_edge_rels_to_template(self, template: dict, rel_vals: dict, node: str): """ Args: @@ -145,6 +167,12 @@ def add_edge_rels_to_template(self, template: dict, rel_vals: dict, node: str): # Get all edges associated with the current node node_edges = self.get_edges_associated_with_node(node=node) + + # For properties look for reverse relationships too + if node in self.dmge.find_properties(): + property_node_edges = self.get_edges_associated_with_property_nodes(node=node) + node_edges.extend(property_node_edges) + # Get node pairs and weights for each edge for node_1, node_2, weight in node_edges: # Retrieve the relationship(s) and related info between the two nodes @@ -153,6 +181,7 @@ def add_edge_rels_to_template(self, template: dict, rel_vals: dict, node: str): # Get the relationship edge key edge_key = rel_vals["edge_key"] + # Check if edge_key is even one of the relationships for this node pair. if edge_key in node_edge_relationships: # for each relationship between the given nodes @@ -160,7 +189,10 @@ def add_edge_rels_to_template(self, template: dict, rel_vals: dict, node: str): # If the relationship defined and edge_key if relationship == edge_key: # TODO: rewrite to use edge_dir - if edge_key in ["domainIncludes", "parentOf"]: + + domainIncludes_edge_key = self.rel_dict['domainIncludes']['edge_key'] + subclassOf_edge_key = self.rel_dict['subClassOf']['edge_key'] + if edge_key in [domainIncludes_edge_key, subclassOf_edge_key]: if node_2 == node: # Make sure the key is in the template (differs between properties and classes) if rel_vals["jsonld_key"] in template.keys(): @@ -178,6 +210,23 @@ def add_edge_rels_to_template(self, template: dict, rel_vals: dict, node: str): ) else: template[rel_vals["jsonld_key"]] == node_1 + elif node_1 == node: + # Make sure the key is in the template (differs between properties and classes) + if rel_vals["jsonld_key"] in template.keys(): + node_2_id = {"@id": "bts:" + node_2} + # TODO Move this to a helper function to clear up. + if ( + isinstance( + template[rel_vals["jsonld_key"]], list + ) + and node_2_id + not in template[rel_vals["jsonld_key"]] + ): + template[rel_vals["jsonld_key"]].append( + node_2_id + ) + else: + template[rel_vals["jsonld_key"]] == node_2 else: if node_1 == node: # Make sure the key is in the template (differs between properties and classes) @@ -238,6 +287,7 @@ def fill_entry_template(self, template: dict, node: str) -> dict: template = self.add_edge_rels_to_template( template=template, rel_vals=rel_vals, node=node ) + # Fill in node value information else: template = self.add_node_info_to_template( @@ -249,11 +299,11 @@ def fill_entry_template(self, template: dict, node: str) -> dict: template=template, data_model_relationships=data_model_relationships, ) + # Reorder lists based on weights: template = self.reorder_template_entries( template=template, ) - # Add contexts to certain values template = self.add_contexts_to_entries( template=template, @@ -364,6 +414,12 @@ def reorder_template_entries(self, template: dict) -> dict: sorted_edges = self.dmge.get_ordered_entry( key=key, source_node_label=template_label ) + try: + len(entry) == len(sorted_edges) + except: + breakpoint() + #raise ValueError("There is an error with sorting values in the JSONLD, please issue a bug report.") + edge_weights_dict = {edge: i for i, edge in enumerate(sorted_edges)} ordered_edges = [0] * len(edge_weights_dict.keys()) for edge, normalized_weight in edge_weights_dict.items(): From 44cc0e88c68959f43e1c5f6a1222002db5887e77 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Tue, 24 Oct 2023 06:06:02 -0700 Subject: [PATCH 204/239] change jsonld direction for domainIncludes and update jsonld to remove subclassOf specific handling --- schematic/schemas/data_model_graph.py | 17 +---------------- schematic/schemas/data_model_relationships.py | 2 +- 2 files changed, 2 insertions(+), 17 deletions(-) diff --git a/schematic/schemas/data_model_graph.py b/schematic/schemas/data_model_graph.py index f5946d35c..917d1eb71 100644 --- a/schematic/schemas/data_model_graph.py +++ b/schematic/schemas/data_model_graph.py @@ -365,23 +365,8 @@ def get_ordered_entry(self, key: str, source_node_label: str) -> list[str]: edge_key = self.rel_dict[key]["edge_key"] - # Get edge keys for domain includes and subclassOf - domainIncludes_edge_key = self.rel_dict['domainIncludes']['edge_key'] - subclassOf_edge_key = self.rel_dict['subClassOf']['edge_key'] - - # Order lists when they are part of subclassOf or domainIncludes - if edge_key in [domainIncludes_edge_key, subclassOf_edge_key]: - original_edge_weights_dict = { - attached_node: self.graph[source_node][attached_node][edge_key][ - "weight" - ] - for source_node, attached_node in self.graph.out_edges( - source_node_label - ) - if edge_key in self.graph[source_node][attached_node] - } # Handle out edges - elif self.rel_dict[key]["jsonld_direction"] == "out": + if self.rel_dict[key]["jsonld_direction"] == "out": # use outedges original_edge_weights_dict = { diff --git a/schematic/schemas/data_model_relationships.py b/schematic/schemas/data_model_relationships.py index 6cf85d899..9be7952dc 100644 --- a/schematic/schemas/data_model_relationships.py +++ b/schematic/schemas/data_model_relationships.py @@ -153,7 +153,7 @@ def define_data_model_relationships(self) -> Dict: "jsonld_key": "schema:domainIncludes", "csv_header": "Properties", "edge_key": "domainValue", - "jsonld_direction": "in", + "jsonld_direction": "out", "edge_dir": "in", "type": list, "edge_rel": True, From a7def06856daaf440b12fa00a1966c57570ec2c9 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Tue, 24 Oct 2023 07:00:10 -0700 Subject: [PATCH 205/239] handle subclassOf and domainIncludes separately when handling edge relationships --- schematic/schemas/data_model_jsonld.py | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/schematic/schemas/data_model_jsonld.py b/schematic/schemas/data_model_jsonld.py index 5363e7ffc..41b5a05e3 100644 --- a/schematic/schemas/data_model_jsonld.py +++ b/schematic/schemas/data_model_jsonld.py @@ -181,7 +181,6 @@ def add_edge_rels_to_template(self, template: dict, rel_vals: dict, node: str): # Get the relationship edge key edge_key = rel_vals["edge_key"] - # Check if edge_key is even one of the relationships for this node pair. if edge_key in node_edge_relationships: # for each relationship between the given nodes @@ -189,10 +188,9 @@ def add_edge_rels_to_template(self, template: dict, rel_vals: dict, node: str): # If the relationship defined and edge_key if relationship == edge_key: # TODO: rewrite to use edge_dir - domainIncludes_edge_key = self.rel_dict['domainIncludes']['edge_key'] subclassOf_edge_key = self.rel_dict['subClassOf']['edge_key'] - if edge_key in [domainIncludes_edge_key, subclassOf_edge_key]: + if edge_key in [subclassOf_edge_key]: if node_2 == node: # Make sure the key is in the template (differs between properties and classes) if rel_vals["jsonld_key"] in template.keys(): @@ -210,7 +208,8 @@ def add_edge_rels_to_template(self, template: dict, rel_vals: dict, node: str): ) else: template[rel_vals["jsonld_key"]] == node_1 - elif node_1 == node: + elif edge_key in [domainIncludes_edge_key]: + if node_1 == node: # Make sure the key is in the template (differs between properties and classes) if rel_vals["jsonld_key"] in template.keys(): node_2_id = {"@id": "bts:" + node_2} @@ -414,9 +413,7 @@ def reorder_template_entries(self, template: dict) -> dict: sorted_edges = self.dmge.get_ordered_entry( key=key, source_node_label=template_label ) - try: - len(entry) == len(sorted_edges) - except: + if not len(entry) == len(sorted_edges): breakpoint() #raise ValueError("There is an error with sorting values in the JSONLD, please issue a bug report.") From e2f540e30fc84bae3e535d6488236cc910cd9387 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Tue, 24 Oct 2023 13:54:00 -0700 Subject: [PATCH 206/239] update JSONLD parsing to better handle properties --- schematic/schemas/data_model_parser.py | 51 +++++++++++++++++++++++--- 1 file changed, 46 insertions(+), 5 deletions(-) diff --git a/schematic/schemas/data_model_parser.py b/schematic/schemas/data_model_parser.py index 74068c180..c7423eb67 100644 --- a/schematic/schemas/data_model_parser.py +++ b/schematic/schemas/data_model_parser.py @@ -274,6 +274,21 @@ def parse_entry(self, rel_entry: any, id_jsonld_key: str) -> Any: parsed_rel_entry = rel_entry return parsed_rel_entry + def get_display_name_from_label(self, label, model_jsonld): + jsonld_keys_to_extract = ["label", "displayName"] + label_jsonld_key, dn_jsonld_key = [ + self.rel_dict[key]["jsonld_key"] for key in jsonld_keys_to_extract + ] + for entry in model_jsonld: + # Get the attr key for the dictionary + if dn_jsonld_key in entry: + # The attr_key is the entry display name if one was recorded + attr_key = entry[dn_jsonld_key] + else: + # If not we wil use the get the label. + attr_key = entry[label_jsonld_key] + return attr_key + def gather_jsonld_attributes_relationships(self, model_jsonld: List[dict]) -> Dict: """ Args: @@ -327,15 +342,41 @@ def gather_jsonld_attributes_relationships(self, model_jsonld: List[dict]) -> Di ): # Retrieve entry value associated with the given relationship rel_entry = entry[rel_vals["jsonld_key"]] - # If there is an entry parset it by type and add to the attr:relationships dictionary. + # If there is an entry parse it by type and add to the attr:relationships dictionary. if rel_entry: parsed_rel_entry = self.parse_entry( rel_entry=rel_entry, id_jsonld_key=id_jsonld_key ) - # Add relationships for each attribute and relationship to the dictionary - attr_rel_dictionary[attr_key]["Relationships"].update( - {self.rel_dict[rel_key]["csv_header"]: parsed_rel_entry} - ) + rel_csv_header = self.rel_dict[rel_key]["csv_header"] + if rel_key == 'domainIncludes': + # In the JSONLD the domain includes field contains the ids of attributes that the current attribute is the property of. + # Because of this we need to handle these values differently. + # We will get the values in the field (parsed_val), then add the current attribute as to the property key in the attr_rel_dictionary[property_attr_key]. + for parsed_val in parsed_rel_entry: + attr_in_dict = False + property_attr_key='' + # Check if the parsed value is already a part of the attr_rel_dictionary + for attr_dn, rels in attr_rel_dictionary.items(): + if parsed_val == rels["Relationships"].get('label'): + property_attr_key = attr_dn + attr_in_dict = True + # If it is part of the dictionary update add current attribute as a property of the parsed value + if attr_in_dict == True: + if not rel_csv_header in attr_rel_dictionary[property_attr_key]["Relationships"]: + attr_rel_dictionary[property_attr_key]["Relationships"].update({rel_csv_header:[entry[label_jsonld_key]]}) + else: + attr_rel_dictionary[property_attr_key]["Relationships"][rel_csv_header].append(entry[label_jsonld_key]) + # If the parsed_val is not already recorded in the dictionary, add it + elif attr_in_dict == False: + # Get the display name for the parsed value + property_attr_key = self.get_display_name_from_label(parsed_val, model_jsonld) + + attr_rel_dictionary.update(attr_dict_template(property_attr_key)) + attr_rel_dictionary[property_attr_key]["Relationships"].update({rel_csv_header:[entry[label_jsonld_key]]}) + else: + attr_rel_dictionary[attr_key]["Relationships"].update( + {rel_csv_header: parsed_rel_entry} + ) elif ( rel_vals["jsonld_key"] in entry.keys() and not rel_vals["csv_header"] From 7181728d6c664b3b7a4dce52f93f192e44cc34aa Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Tue, 24 Oct 2023 15:00:25 -0700 Subject: [PATCH 207/239] update test_skip_edge to work with new edge_list handling --- tests/test_schemas.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index fbf8faf47..532e00b1a 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -900,13 +900,17 @@ def test_skip_edge(self, helpers, DMR, data_model_edges): # Check the edges in the graph, there should be none before_edges = deepcopy(G.edges) + edge_list = [] # Generate an edge in the graph with one node and a subset of the parsed data model # We're attempting to add an edge for a node that is the only one in the graph, # so `generate_edge` should skip adding edges and return the same graph - G = data_model_edges.generate_edge( - G, node, node_dict, {node: parsed_data_model[node]}, edge_relationships + edge_list_2 = data_model_edges.generate_edge( + node, node_dict, {node: parsed_data_model[node]}, edge_relationships, edge_list, ) + for node_1, node_2, edge_dict in edge_list_2: + G.add_edge(node_1, node_2, key=edge_dict['key'], weight=edge_dict['weight']) + breakpoint() # Assert that no edges were added and that the current graph edges are the same as before the call to `generate_edge` assert before_edges == G.edges From c9737b37b2d91abfe1ddaa8dbb031148d2deb121 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 25 Oct 2023 09:27:25 -0700 Subject: [PATCH 208/239] add parentOf in the JSONLD parsing update, also change some naming to be clearer --- schematic/schemas/data_model_parser.py | 23 ++++++++++++----------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/schematic/schemas/data_model_parser.py b/schematic/schemas/data_model_parser.py index c7423eb67..f1684da1c 100644 --- a/schematic/schemas/data_model_parser.py +++ b/schematic/schemas/data_model_parser.py @@ -348,31 +348,32 @@ def gather_jsonld_attributes_relationships(self, model_jsonld: List[dict]) -> Di rel_entry=rel_entry, id_jsonld_key=id_jsonld_key ) rel_csv_header = self.rel_dict[rel_key]["csv_header"] - if rel_key == 'domainIncludes': - # In the JSONLD the domain includes field contains the ids of attributes that the current attribute is the property of. + if rel_key == 'domainIncludes' or rel_key == 'parentOf': + # In the JSONLD the domain includes field contains the ids of attributes that the current attribute is the property/parent of. # Because of this we need to handle these values differently. - # We will get the values in the field (parsed_val), then add the current attribute as to the property key in the attr_rel_dictionary[property_attr_key]. + # We will get the values in the field (parsed_val), then add the current attribute as to the property key in the attr_rel_dictionary[p_attr_key]. for parsed_val in parsed_rel_entry: attr_in_dict = False - property_attr_key='' + #Get propert/parent key (displayName) + p_attr_key='' # Check if the parsed value is already a part of the attr_rel_dictionary for attr_dn, rels in attr_rel_dictionary.items(): if parsed_val == rels["Relationships"].get('label'): - property_attr_key = attr_dn + p_attr_key = attr_dn attr_in_dict = True # If it is part of the dictionary update add current attribute as a property of the parsed value if attr_in_dict == True: - if not rel_csv_header in attr_rel_dictionary[property_attr_key]["Relationships"]: - attr_rel_dictionary[property_attr_key]["Relationships"].update({rel_csv_header:[entry[label_jsonld_key]]}) + if not rel_csv_header in attr_rel_dictionary[p_attr_key]["Relationships"]: + attr_rel_dictionary[p_attr_key]["Relationships"].update({rel_csv_header:[entry[label_jsonld_key]]}) else: - attr_rel_dictionary[property_attr_key]["Relationships"][rel_csv_header].append(entry[label_jsonld_key]) + attr_rel_dictionary[p_attr_key]["Relationships"][rel_csv_header].append(entry[label_jsonld_key]) # If the parsed_val is not already recorded in the dictionary, add it elif attr_in_dict == False: # Get the display name for the parsed value - property_attr_key = self.get_display_name_from_label(parsed_val, model_jsonld) + p_attr_key = self.get_display_name_from_label(parsed_val, model_jsonld) - attr_rel_dictionary.update(attr_dict_template(property_attr_key)) - attr_rel_dictionary[property_attr_key]["Relationships"].update({rel_csv_header:[entry[label_jsonld_key]]}) + attr_rel_dictionary.update(attr_dict_template(p_attr_key)) + attr_rel_dictionary[p_attr_key]["Relationships"].update({rel_csv_header:[entry[label_jsonld_key]]}) else: attr_rel_dictionary[attr_key]["Relationships"].update( {rel_csv_header: parsed_rel_entry} From 2cd4f85bd1a8112125768735422b53ab5506ec1c Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 25 Oct 2023 09:28:07 -0700 Subject: [PATCH 209/239] update graph tests to conform to new list expectations --- tests/test_schemas.py | 20 +++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 532e00b1a..7436712d1 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -910,7 +910,7 @@ def test_skip_edge(self, helpers, DMR, data_model_edges): for node_1, node_2, edge_dict in edge_list_2: G.add_edge(node_1, node_2, key=edge_dict['key'], weight=edge_dict['weight']) - breakpoint() + # Assert that no edges were added and that the current graph edges are the same as before the call to `generate_edge` assert before_edges == G.edges @@ -957,11 +957,16 @@ def test_generate_edge( # Check the edges in the graph, there should be none before_edges = deepcopy(G.edges) + edge_list = [] + # Generate edges for whichever node we are testing - G = data_model_edges.generate_edge( - G, node_to_add, all_node_dict, parsed_data_model, edge_relationships + edge_list_2 = data_model_edges.generate_edge( + node_to_add, all_node_dict, parsed_data_model, edge_relationships, edge_list, ) + for node_1, node_2, edge_dict in edge_list_2: + G.add_edge(node_1, node_2, key=edge_dict['key'], weight=edge_dict['weight']) + # Assert that the current edges are different from the edges of the graph before assert G.edges > before_edges @@ -1018,11 +1023,16 @@ def test_generate_weights( # Check the edges in the graph, there should be none before_edges = deepcopy(G.edges) + edge_list = [] + # Generate edges for whichever node we are testing - G = data_model_edges.generate_edge( - G, node_to_add, all_node_dict, parsed_data_model, edge_relationships + edge_list_2 = data_model_edges.generate_edge( + node_to_add, all_node_dict, parsed_data_model, edge_relationships, edge_list, ) + for node_1, node_2, edge_dict in edge_list_2: + G.add_edge(node_1, node_2, key=edge_dict['key'], weight=edge_dict['weight']) + # Assert that the current edges are different from the edges of the graph before assert G.edges > before_edges From a62530ec22646fb8bdd01edc3e754fb7bdd96073 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 25 Oct 2023 09:28:59 -0700 Subject: [PATCH 210/239] update validation tests to use dmge vs DME --- tests/test_validation.py | 105 +++++++++++++++++---------------------- 1 file changed, 45 insertions(+), 60 deletions(-) diff --git a/tests/test_validation.py b/tests/test_validation.py index 923669f84..1b447190d 100644 --- a/tests/test_validation.py +++ b/tests/test_validation.py @@ -20,25 +20,10 @@ logging.basicConfig(level=logging.DEBUG) logger = logging.getLogger(__name__) -@pytest.fixture -def DME(helpers): - - inputModelLocation = helpers.get_data_path('example.model.jsonld') - #sg = SchemaGenerator(inputModelLocation) - data_model_parser = DataModelParser(path_to_data_model = inputModelLocation) - #Parse Model - parsed_data_model = data_model_parser.parse_model() - - # Instantiate DataModelGraph - data_model_grapher = DataModelGraph(parsed_data_model) - - # Generate graph - graph_data_model = data_model_grapher.generate_data_model_graph() - - # Instantiate DataModelGraphExplorer - DME = DataModelGraphExplorer(graph_data_model) - - yield DME +@pytest.fixture(name="dmge") +def DMGE(helpers): + dmge = helpers.get_data_model_graph_explorer(path="example.model.jsonld") + yield dmge @pytest.fixture def metadataModel(helpers): @@ -73,7 +58,7 @@ def test_valid_manifest(self,helpers,metadataModel): assert warnings == [] - def test_invalid_manifest(self,helpers, DME,metadataModel): + def test_invalid_manifest(self,helpers, dmge,metadataModel): manifestPath = helpers.get_data_path("mock_manifests/Invalid_Test_Manifest.csv") rootNode = 'MockComponent' @@ -88,7 +73,7 @@ def test_invalid_manifest(self,helpers, DME,metadataModel): row_num = '3', attribute_name = 'Check Num', invalid_entry = 'c', - DME = DME, + dmge = dmge, )[0] in errors assert GenerateError.generate_type_error( @@ -96,7 +81,7 @@ def test_invalid_manifest(self,helpers, DME,metadataModel): row_num = '3', attribute_name = 'Check Int', invalid_entry = '5.63', - DME = DME, + dmge = dmge, )[0] in errors assert GenerateError.generate_type_error( @@ -104,7 +89,7 @@ def test_invalid_manifest(self,helpers, DME,metadataModel): row_num = '3', attribute_name = 'Check String', invalid_entry = '94', - DME = DME, + dmge = dmge, )[0] in errors assert GenerateError.generate_list_error( @@ -114,7 +99,7 @@ def test_invalid_manifest(self,helpers, DME,metadataModel): attribute_name = 'Check List', list_error = "not_comma_delimited", invalid_entry = 'invalid list values', - DME = DME, + dmge = dmge, )[0] in errors assert GenerateError.generate_list_error( @@ -124,7 +109,7 @@ def test_invalid_manifest(self,helpers, DME,metadataModel): attribute_name = 'Check Regex List', list_error = "not_comma_delimited", invalid_entry = 'ab cd ef', - DME = DME, + dmge = dmge, )[0] in errors assert GenerateError.generate_regex_error( @@ -134,7 +119,7 @@ def test_invalid_manifest(self,helpers, DME,metadataModel): attribute_name = 'Check Regex Format', module_to_call = 'match', invalid_entry = 'm', - DME = DME, + dmge = dmge, )[0] in errors assert GenerateError.generate_regex_error( @@ -144,7 +129,7 @@ def test_invalid_manifest(self,helpers, DME,metadataModel): attribute_name = 'Check Regex Single', module_to_call = 'search', invalid_entry = 'q', - DME = DME, + dmge = dmge, )[0] in errors assert GenerateError.generate_regex_error( @@ -154,7 +139,7 @@ def test_invalid_manifest(self,helpers, DME,metadataModel): attribute_name = 'Check Regex Integer', module_to_call = 'search', invalid_entry = '5.4', - DME = DME, + dmge = dmge, )[0] in errors assert GenerateError.generate_url_error( @@ -165,14 +150,14 @@ def test_invalid_manifest(self,helpers, DME,metadataModel): attribute_name = 'Check URL', argument = None, invalid_entry = 'http://googlef.com/', - DME = DME, + dmge = dmge, )[0] in errors date_err = GenerateError.generate_content_error( val_rule = 'date', attribute_name = 'Check Date', - DME = DME, + dmge = dmge, row_num = ['2','3','4'], error_val = ['84-43-094', '32-984', 'notADate'], )[0] @@ -182,7 +167,7 @@ def test_invalid_manifest(self,helpers, DME,metadataModel): assert GenerateError.generate_content_error( val_rule = 'unique error', attribute_name = 'Check Unique', - DME = DME, + dmge = dmge, row_num = ['2','3','4'], error_val = ['str1'], )[0] in errors @@ -190,7 +175,7 @@ def test_invalid_manifest(self,helpers, DME,metadataModel): assert GenerateError.generate_content_error( val_rule = 'inRange 50 100 error', attribute_name = 'Check Range', - DME = DME, + dmge = dmge, row_num = ['3'], error_val = ['30'], )[0] in errors @@ -199,13 +184,13 @@ def test_invalid_manifest(self,helpers, DME,metadataModel): assert GenerateError.generate_content_error( val_rule = 'recommended', attribute_name = 'Check Recommended', - DME = DME, + dmge = dmge, )[1] in warnings assert GenerateError.generate_content_error( val_rule = 'protectAges', attribute_name = 'Check Ages', - DME = DME, + dmge = dmge, row_num = ['2','3'], error_val = ['6549','32851'], )[1] in warnings @@ -216,7 +201,7 @@ def test_invalid_manifest(self,helpers, DME,metadataModel): attribute_name='Check Match at Least', invalid_entry = ['7163'], missing_manifest_ID = ['syn27600110', 'syn29381803'], - DME = DME, + dmge = dmge, )[1] in warnings assert GenerateError.generate_cross_warning( @@ -224,7 +209,7 @@ def test_invalid_manifest(self,helpers, DME,metadataModel): row_num = ['3'], attribute_name = 'Check Match at Least values', invalid_entry = ['51100'], - DME = DME, + dmge = dmge, )[1] in warnings assert \ @@ -232,14 +217,14 @@ def test_invalid_manifest(self,helpers, DME,metadataModel): val_rule = 'matchExactlyOne', attribute_name='Check Match Exactly', matching_manifests = ['syn29862078', 'syn27648165'], - DME = DME, + dmge = dmge, )[1] in warnings \ or \ GenerateError.generate_cross_warning( val_rule = 'matchExactlyOne', attribute_name='Check Match Exactly', matching_manifests = ['syn29862066', 'syn27648165'], - DME = DME, + dmge = dmge, )[1] in warnings @@ -248,7 +233,7 @@ def test_invalid_manifest(self,helpers, DME,metadataModel): row_num = ['2', '3', '4'], attribute_name='Check Match Exactly values', invalid_entry = ['71738', '98085', '210065'], - DME = DME, + dmge = dmge, )[1] warning_in_list = [cross_warning[1] in warning for warning in warnings] assert any(warning_in_list) @@ -256,7 +241,7 @@ def test_invalid_manifest(self,helpers, DME,metadataModel): - def test_in_house_validation(self,helpers,DME,metadataModel): + def test_in_house_validation(self,helpers,dmge,metadataModel): manifestPath = helpers.get_data_path("mock_manifests/Invalid_Test_Manifest.csv") rootNode = 'MockComponent' @@ -272,7 +257,7 @@ def test_in_house_validation(self,helpers,DME,metadataModel): row_num = '3', attribute_name = 'Check Num', invalid_entry = 'c', - DME = DME, + dmge = dmge, )[0] in errors assert GenerateError.generate_type_error( @@ -280,7 +265,7 @@ def test_in_house_validation(self,helpers,DME,metadataModel): row_num = '3', attribute_name = 'Check Int', invalid_entry = '5.63', - DME = DME, + dmge = dmge, )[0] in errors assert GenerateError.generate_type_error( @@ -288,7 +273,7 @@ def test_in_house_validation(self,helpers,DME,metadataModel): row_num = '3', attribute_name = 'Check String', invalid_entry = '94', - DME = DME, + dmge = dmge, )[0] in errors assert GenerateError.generate_type_error( @@ -296,7 +281,7 @@ def test_in_house_validation(self,helpers,DME,metadataModel): row_num = '3', attribute_name = 'Check NA', invalid_entry = '9.5', - DME = DME, + dmge = dmge, )[0] in errors assert GenerateError.generate_list_error( @@ -306,7 +291,7 @@ def test_in_house_validation(self,helpers,DME,metadataModel): attribute_name = 'Check List', list_error = "not_comma_delimited", invalid_entry = 'invalid list values', - DME = DME, + dmge = dmge, )[0] in errors assert GenerateError.generate_list_error( @@ -316,7 +301,7 @@ def test_in_house_validation(self,helpers,DME,metadataModel): attribute_name = 'Check Regex List', list_error = "not_comma_delimited", invalid_entry = 'ab cd ef', - DME = DME, + dmge = dmge, )[0] in errors assert GenerateError.generate_regex_error( @@ -326,7 +311,7 @@ def test_in_house_validation(self,helpers,DME,metadataModel): attribute_name = 'Check Regex Single', module_to_call = 'search', invalid_entry = 'q', - DME = DME, + dmge = dmge, )[0] in errors assert GenerateError.generate_regex_error( @@ -336,7 +321,7 @@ def test_in_house_validation(self,helpers,DME,metadataModel): attribute_name = 'Check Regex Format', module_to_call = 'match', invalid_entry = 'm', - DME = DME, + dmge = dmge, )[0] in errors assert GenerateError.generate_url_error( @@ -347,7 +332,7 @@ def test_in_house_validation(self,helpers,DME,metadataModel): attribute_name = 'Check URL', argument = None, invalid_entry = 'http://googlef.com/', - DME = DME, + dmge = dmge, )[0] in errors @@ -358,7 +343,7 @@ def test_in_house_validation(self,helpers,DME,metadataModel): attribute_name='Check Match at Least', invalid_entry = ['7163'], missing_manifest_ID = ['syn27600110', 'syn29381803'], - DME = DME, + dmge = dmge, )[1] in warnings assert GenerateError.generate_cross_warning( @@ -366,7 +351,7 @@ def test_in_house_validation(self,helpers,DME,metadataModel): row_num = ['3'], attribute_name = 'Check Match at Least values', invalid_entry = ['51100'], - DME = DME, + dmge = dmge, )[1] in warnings assert \ @@ -374,14 +359,14 @@ def test_in_house_validation(self,helpers,DME,metadataModel): val_rule = 'matchExactlyOne', attribute_name='Check Match Exactly', matching_manifests = ['syn29862078', 'syn27648165'], - DME = DME, + dmge = dmge, )[1] in warnings \ or \ GenerateError.generate_cross_warning( val_rule = 'matchExactlyOne', attribute_name='Check Match Exactly', matching_manifests = ['syn29862066', 'syn27648165'], - DME = DME, + dmge = dmge, )[1] in warnings assert GenerateError.generate_cross_warning( @@ -389,13 +374,13 @@ def test_in_house_validation(self,helpers,DME,metadataModel): row_num = ['2', '3', '4'], attribute_name='Check Match Exactly values', invalid_entry = ['71738', '98085', '210065'], - DME = DME, + dmge = dmge, )[1] in warnings @pytest.mark.rule_combos(reason = 'This introduces a great number of tests covering every possible rule combination that are only necessary on occasion.') @pytest.mark.parametrize("base_rule, second_rule", get_rule_combinations()) - def test_rule_combinations(self, helpers, DME, base_rule, second_rule, metadataModel): + def test_rule_combinations(self, helpers, dmge, base_rule, second_rule, metadataModel): """ TODO: Describe what this test is doing. Updating the data model graph to allow testing of allowable rule combinations. @@ -408,12 +393,12 @@ def test_rule_combinations(self, helpers, DME, base_rule, second_rule, metadataM manifest = helpers.get_data_frame(manifestPath) # Get a view of the node data - all_node_data = DME.graph.nodes.data() + all_node_data = dmge.graph.nodes.data() # Update select validation rules in the data model graph for columns in the manifest for attribute in manifest.columns: # Get the node label - node_label = DME.get_node_label(attribute) + node_label = dmge.get_node_label(attribute) # Get a view of the recorded info for current node node_info = all_node_data[node_label] @@ -440,21 +425,21 @@ def test_rule_combinations(self, helpers, DME, base_rule, second_rule, metadataM # Update the manifest to only contain the Component and attribute column where the rule was changed. manifest = manifest[['Component', attribute]] - data_model_js = DataModelJSONSchema(jsonld_path=helpers.get_data_path('example.model.jsonld'), graph=DME.graph) + data_model_js = DataModelJSONSchema(jsonld_path=helpers.get_data_path('example.model.jsonld'), graph=dmge.graph) json_schema = data_model_js.get_json_validation_schema(source_node=rootNode, schema_name=rootNode + "_validation") validateManifest = ValidateManifest( errors = [], manifest = manifest, manifestPath = manifestPath, - DME = DME, + dmge = dmge, jsonSchema = json_schema ) #perform validation with no exceptions raised _, errors, warnings = validateManifest.validate_manifest_rules( manifest = manifest, - DME = DME, + dmge = dmge, restrict_rules = False, project_scope = None, ) From 1511eb3d793be4788305ceca28ded5b029b27a21 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 25 Oct 2023 09:29:41 -0700 Subject: [PATCH 211/239] update the expected errors since the old errors are just printed as warnings to the screen --- tests/test_validator.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/tests/test_validator.py b/tests/test_validator.py index 0f24f39fe..641fcccea 100644 --- a/tests/test_validator.py +++ b/tests/test_validator.py @@ -103,9 +103,7 @@ def test_dag(self, helpers): validator_errors = DMV.check_is_dag() # nodes could be in different order so need to account for that - expected_errors = ['Schematic requires models be a directed acyclic graph (DAG). Your graph is not a DAG, we found a loop between: Patient and PatientID, please remove this loop from your model and submit again.', - 'Schematic requires models be a directed acyclic graph (DAG). Your graph is not a DAG, we found a loop between: PatientID and Patient, please remove this loop from your model and submit again.', - 'Schematic requires models be a directed acyclic graph (DAG). Your graph is not a DAG, we found a loop between: Diagnosis and Diagnosis, please remove this loop from your model and submit again.'] - + expected_errors = ['Schematic requires models be a directed acyclic graph (DAG). Please inspect your model.'] + assert validator_errors[0] in expected_errors From 74d3b98f97eb14f70e9f6ad3a02005812b237329 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 25 Oct 2023 10:22:23 -0700 Subject: [PATCH 212/239] convert breakpoint to logger error --- schematic/schemas/data_model_jsonld.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/schematic/schemas/data_model_jsonld.py b/schematic/schemas/data_model_jsonld.py index 41b5a05e3..fa9af86ef 100644 --- a/schematic/schemas/data_model_jsonld.py +++ b/schematic/schemas/data_model_jsonld.py @@ -414,8 +414,7 @@ def reorder_template_entries(self, template: dict) -> dict: key=key, source_node_label=template_label ) if not len(entry) == len(sorted_edges): - breakpoint() - #raise ValueError("There is an error with sorting values in the JSONLD, please issue a bug report.") + logger.error("There is an error with sorting values in the JSONLD, please issue a bug report.") edge_weights_dict = {edge: i for i, edge in enumerate(sorted_edges)} ordered_edges = [0] * len(edge_weights_dict.keys()) From 8b25ce7534b108a05de0cb495d9e8562f74754f4 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Tue, 7 Nov 2023 10:12:40 -0800 Subject: [PATCH 213/239] update poetry.lock file --- poetry.lock | 753 +--------------------------------------------------- 1 file changed, 1 insertion(+), 752 deletions(-) diff --git a/poetry.lock b/poetry.lock index 27b1e11b9..4bf58751b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,8 +1,4 @@ -<<<<<<< HEAD -# This file is automatically @generated by Poetry and should not be changed by hand. -======= # This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d [[package]] name = "alabaster" @@ -73,10 +69,6 @@ files = [ name = "argon2-cffi" version = "23.1.0" description = "Argon2 for Python" -<<<<<<< HEAD -category = "main" -======= ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d optional = false python-versions = ">=3.7" files = [ @@ -147,11 +139,7 @@ types-python-dateutil = ">=2.8.10" [package.extras] doc = ["doc8", "sphinx (>=7.0.0)", "sphinx-autobuild", "sphinx-autodoc-typehints", "sphinx_rtd_theme (>=1.3.0)"] -<<<<<<< HEAD -test = ["dateparser (>=1.0.0,<2.0.0)", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "pytz (==2021.1)", "simplejson (>=3.0.0,<4.0.0)"] -======= test = ["dateparser (==1.*)", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "pytz (==2021.1)", "simplejson (==3.*)"] ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d [[package]] name = "astroid" @@ -171,22 +159,13 @@ wrapt = {version = ">=1.11,<2", markers = "python_version < \"3.11\""} [[package]] name = "asttokens" -<<<<<<< HEAD -version = "2.4.0" -======= version = "2.4.1" ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d description = "Annotate AST trees with source code positions" optional = false python-versions = "*" files = [ -<<<<<<< HEAD - {file = "asttokens-2.4.0-py2.py3-none-any.whl", hash = "sha256:cf8fc9e61a86461aa9fb161a14a0841a03c405fa829ac6b202670b3495d2ce69"}, - {file = "asttokens-2.4.0.tar.gz", hash = "sha256:2e0171b991b2c959acc6c49318049236844a5da1d65ba2672c4880c1c894834e"}, -======= {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d ] [package.dependencies] @@ -210,21 +189,6 @@ files = [ [package.dependencies] typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} -[[package]] -name = "async-lru" -version = "2.0.4" -description = "Simple LRU cache for asyncio" -category = "main" -optional = false -python-versions = ">=3.8" -files = [ - {file = "async-lru-2.0.4.tar.gz", hash = "sha256:b8a59a5df60805ff63220b2a0c5b5393da5521b113cd5465a44eb037d81a5627"}, - {file = "async_lru-2.0.4-py3-none-any.whl", hash = "sha256:ff02944ce3c288c5be660c42dbcca0742b32c3b279d6dceda655190240b99224"}, -] - -[package.dependencies] -typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} - [[package]] name = "attrs" version = "23.1.0" @@ -245,42 +209,17 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte [[package]] name = "babel" -<<<<<<< HEAD -version = "2.13.0" -======= version = "2.13.1" ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d description = "Internationalization utilities" optional = false python-versions = ">=3.7" files = [ -<<<<<<< HEAD - {file = "Babel-2.13.0-py3-none-any.whl", hash = "sha256:fbfcae1575ff78e26c7449136f1abbefc3c13ce542eeb13d43d50d8b047216ec"}, - {file = "Babel-2.13.0.tar.gz", hash = "sha256:04c3e2d28d2b7681644508f836be388ae49e0cfe91465095340395b60d00f210"}, -] - -[package.extras] -dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] - -[[package]] -name = "backcall" -version = "0.2.0" -description = "Specifications for callback functions passed in to an API" -category = "main" -optional = false -python-versions = "*" -files = [ - {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, - {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, -] -======= {file = "Babel-2.13.1-py3-none-any.whl", hash = "sha256:7077a4984b02b6727ac10f1f7294484f737443d7e2e66c5e4380e41a3ae0b4ed"}, {file = "Babel-2.13.1.tar.gz", hash = "sha256:33e0952d7dd6374af8dbf6768cc4ddf3ccfefc244f9986d4074704f2fbd18900"}, ] [package.extras] dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d [[package]] name = "beautifulsoup4" @@ -302,39 +241,11 @@ lxml = ["lxml"] [[package]] name = "black" -<<<<<<< HEAD -version = "23.9.1" -======= version = "23.10.1" ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ -<<<<<<< HEAD - {file = "black-23.9.1-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:d6bc09188020c9ac2555a498949401ab35bb6bf76d4e0f8ee251694664df6301"}, - {file = "black-23.9.1-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:13ef033794029b85dfea8032c9d3b92b42b526f1ff4bf13b2182ce4e917f5100"}, - {file = "black-23.9.1-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:75a2dc41b183d4872d3a500d2b9c9016e67ed95738a3624f4751a0cb4818fe71"}, - {file = "black-23.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13a2e4a93bb8ca74a749b6974925c27219bb3df4d42fc45e948a5d9feb5122b7"}, - {file = "black-23.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:adc3e4442eef57f99b5590b245a328aad19c99552e0bdc7f0b04db6656debd80"}, - {file = "black-23.9.1-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:8431445bf62d2a914b541da7ab3e2b4f3bc052d2ccbf157ebad18ea126efb91f"}, - {file = "black-23.9.1-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:8fc1ddcf83f996247505db6b715294eba56ea9372e107fd54963c7553f2b6dfe"}, - {file = "black-23.9.1-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:7d30ec46de88091e4316b17ae58bbbfc12b2de05e069030f6b747dfc649ad186"}, - {file = "black-23.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:031e8c69f3d3b09e1aa471a926a1eeb0b9071f80b17689a655f7885ac9325a6f"}, - {file = "black-23.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:538efb451cd50f43aba394e9ec7ad55a37598faae3348d723b59ea8e91616300"}, - {file = "black-23.9.1-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:638619a559280de0c2aa4d76f504891c9860bb8fa214267358f0a20f27c12948"}, - {file = "black-23.9.1-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:a732b82747235e0542c03bf352c126052c0fbc458d8a239a94701175b17d4855"}, - {file = "black-23.9.1-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:cf3a4d00e4cdb6734b64bf23cd4341421e8953615cba6b3670453737a72ec204"}, - {file = "black-23.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf99f3de8b3273a8317681d8194ea222f10e0133a24a7548c73ce44ea1679377"}, - {file = "black-23.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:14f04c990259576acd093871e7e9b14918eb28f1866f91968ff5524293f9c573"}, - {file = "black-23.9.1-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:c619f063c2d68f19b2d7270f4cf3192cb81c9ec5bc5ba02df91471d0b88c4c5c"}, - {file = "black-23.9.1-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:6a3b50e4b93f43b34a9d3ef00d9b6728b4a722c997c99ab09102fd5efdb88325"}, - {file = "black-23.9.1-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:c46767e8df1b7beefb0899c4a95fb43058fa8500b6db144f4ff3ca38eb2f6393"}, - {file = "black-23.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50254ebfa56aa46a9fdd5d651f9637485068a1adf42270148cd101cdf56e0ad9"}, - {file = "black-23.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:403397c033adbc45c2bd41747da1f7fc7eaa44efbee256b53842470d4ac5a70f"}, - {file = "black-23.9.1-py3-none-any.whl", hash = "sha256:6ccd59584cc834b6d127628713e4b6b968e5f79572da66284532525a042549f9"}, - {file = "black-23.9.1.tar.gz", hash = "sha256:24b6b3ff5c6d9ea08a8888f6977eae858e1f340d7260cf56d70a49823236b62d"}, -======= {file = "black-23.10.1-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:ec3f8e6234c4e46ff9e16d9ae96f4ef69fa328bb4ad08198c8cee45bb1f08c69"}, {file = "black-23.10.1-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:1b917a2aa020ca600483a7b340c165970b26e9029067f019e3755b56e8dd5916"}, {file = "black-23.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c74de4c77b849e6359c6f01987e94873c707098322b91490d24296f66d067dc"}, @@ -353,7 +264,6 @@ files = [ {file = "black-23.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:6d23d7822140e3fef190734216cefb262521789367fbdc0b3f22af6744058982"}, {file = "black-23.10.1-py3-none-any.whl", hash = "sha256:d431e6739f727bb2e0495df64a6c7a5310758e87505f5f8cde9ff6c0f2d7e4fe"}, {file = "black-23.10.1.tar.gz", hash = "sha256:1f8ce316753428ff68749c65a5f7844631aa18c8679dfd3ca9dc1a289979c258"}, ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d ] [package.dependencies] @@ -396,13 +306,8 @@ description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ -<<<<<<< HEAD - {file = "cachetools-5.3.1-py3-none-any.whl", hash = "sha256:95ef631eeaea14ba2e36f06437f36463aac3a096799e876ee55e5cdccb102590"}, - {file = "cachetools-5.3.1.tar.gz", hash = "sha256:dce83f2d9b4e1f732a8cd44af8e8fab2dbe46201467fc98b3ef8f269092bf62b"}, -======= {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d ] [[package]] @@ -482,107 +387,11 @@ pycparser = "*" [[package]] name = "charset-normalizer" -<<<<<<< HEAD -version = "3.3.0" -======= version = "3.3.2" ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ -<<<<<<< HEAD - {file = "charset-normalizer-3.3.0.tar.gz", hash = "sha256:63563193aec44bce707e0c5ca64ff69fa72ed7cf34ce6e11d5127555756fd2f6"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:effe5406c9bd748a871dbcaf3ac69167c38d72db8c9baf3ff954c344f31c4cbe"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4162918ef3098851fcd8a628bf9b6a98d10c380725df9e04caf5ca6dd48c847a"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0570d21da019941634a531444364f2482e8db0b3425fcd5ac0c36565a64142c8"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5707a746c6083a3a74b46b3a631d78d129edab06195a92a8ece755aac25a3f3d"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:278c296c6f96fa686d74eb449ea1697f3c03dc28b75f873b65b5201806346a69"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a4b71f4d1765639372a3b32d2638197f5cd5221b19531f9245fcc9ee62d38f56"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5969baeaea61c97efa706b9b107dcba02784b1601c74ac84f2a532ea079403e"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3f93dab657839dfa61025056606600a11d0b696d79386f974e459a3fbc568ec"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:db756e48f9c5c607b5e33dd36b1d5872d0422e960145b08ab0ec7fd420e9d649"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:232ac332403e37e4a03d209a3f92ed9071f7d3dbda70e2a5e9cff1c4ba9f0678"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e5c1502d4ace69a179305abb3f0bb6141cbe4714bc9b31d427329a95acfc8bdd"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:2502dd2a736c879c0f0d3e2161e74d9907231e25d35794584b1ca5284e43f596"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23e8565ab7ff33218530bc817922fae827420f143479b753104ab801145b1d5b"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-win32.whl", hash = "sha256:1872d01ac8c618a8da634e232f24793883d6e456a66593135aeafe3784b0848d"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:557b21a44ceac6c6b9773bc65aa1b4cc3e248a5ad2f5b914b91579a32e22204d"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d7eff0f27edc5afa9e405f7165f85a6d782d308f3b6b9d96016c010597958e63"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6a685067d05e46641d5d1623d7c7fdf15a357546cbb2f71b0ebde91b175ffc3e"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0d3d5b7db9ed8a2b11a774db2bbea7ba1884430a205dbd54a32d61d7c2a190fa"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2935ffc78db9645cb2086c2f8f4cfd23d9b73cc0dc80334bc30aac6f03f68f8c"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fe359b2e3a7729010060fbca442ca225280c16e923b37db0e955ac2a2b72a05"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:380c4bde80bce25c6e4f77b19386f5ec9db230df9f2f2ac1e5ad7af2caa70459"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0d1e3732768fecb052d90d62b220af62ead5748ac51ef61e7b32c266cac9293"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b2919306936ac6efb3aed1fbf81039f7087ddadb3160882a57ee2ff74fd2382"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f8888e31e3a85943743f8fc15e71536bda1c81d5aa36d014a3c0c44481d7db6e"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:82eb849f085624f6a607538ee7b83a6d8126df6d2f7d3b319cb837b289123078"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7b8b8bf1189b3ba9b8de5c8db4d541b406611a71a955bbbd7385bbc45fcb786c"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5adf257bd58c1b8632046bbe43ee38c04e1038e9d37de9c57a94d6bd6ce5da34"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c350354efb159b8767a6244c166f66e67506e06c8924ed74669b2c70bc8735b1"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-win32.whl", hash = "sha256:02af06682e3590ab952599fbadac535ede5d60d78848e555aa58d0c0abbde786"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:86d1f65ac145e2c9ed71d8ffb1905e9bba3a91ae29ba55b4c46ae6fc31d7c0d4"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3b447982ad46348c02cb90d230b75ac34e9886273df3a93eec0539308a6296d7"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:abf0d9f45ea5fb95051c8bfe43cb40cda383772f7e5023a83cc481ca2604d74e"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b09719a17a2301178fac4470d54b1680b18a5048b481cb8890e1ef820cb80455"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3d9b48ee6e3967b7901c052b670c7dda6deb812c309439adaffdec55c6d7b78"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:edfe077ab09442d4ef3c52cb1f9dab89bff02f4524afc0acf2d46be17dc479f5"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3debd1150027933210c2fc321527c2299118aa929c2f5a0a80ab6953e3bd1908"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86f63face3a527284f7bb8a9d4f78988e3c06823f7bea2bd6f0e0e9298ca0403"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24817cb02cbef7cd499f7c9a2735286b4782bd47a5b3516a0e84c50eab44b98e"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c71f16da1ed8949774ef79f4a0260d28b83b3a50c6576f8f4f0288d109777989"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9cf3126b85822c4e53aa28c7ec9869b924d6fcfb76e77a45c44b83d91afd74f9"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:b3b2316b25644b23b54a6f6401074cebcecd1244c0b8e80111c9a3f1c8e83d65"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:03680bb39035fbcffe828eae9c3f8afc0428c91d38e7d61aa992ef7a59fb120e"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4cc152c5dd831641e995764f9f0b6589519f6f5123258ccaca8c6d34572fefa8"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-win32.whl", hash = "sha256:b8f3307af845803fb0b060ab76cf6dd3a13adc15b6b451f54281d25911eb92df"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:8eaf82f0eccd1505cf39a45a6bd0a8cf1c70dcfc30dba338207a969d91b965c0"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dc45229747b67ffc441b3de2f3ae5e62877a282ea828a5bdb67883c4ee4a8810"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f4a0033ce9a76e391542c182f0d48d084855b5fcba5010f707c8e8c34663d77"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ada214c6fa40f8d800e575de6b91a40d0548139e5dc457d2ebb61470abf50186"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b1121de0e9d6e6ca08289583d7491e7fcb18a439305b34a30b20d8215922d43c"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1063da2c85b95f2d1a430f1c33b55c9c17ffaf5e612e10aeaad641c55a9e2b9d"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70f1d09c0d7748b73290b29219e854b3207aea922f839437870d8cc2168e31cc"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:250c9eb0f4600361dd80d46112213dff2286231d92d3e52af1e5a6083d10cad9"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:750b446b2ffce1739e8578576092179160f6d26bd5e23eb1789c4d64d5af7dc7"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:fc52b79d83a3fe3a360902d3f5d79073a993597d48114c29485e9431092905d8"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:588245972aca710b5b68802c8cad9edaa98589b1b42ad2b53accd6910dad3545"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e39c7eb31e3f5b1f88caff88bcff1b7f8334975b46f6ac6e9fc725d829bc35d4"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-win32.whl", hash = "sha256:abecce40dfebbfa6abf8e324e1860092eeca6f7375c8c4e655a8afb61af58f2c"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:24a91a981f185721542a0b7c92e9054b7ab4fea0508a795846bc5b0abf8118d4"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:67b8cc9574bb518ec76dc8e705d4c39ae78bb96237cb533edac149352c1f39fe"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac71b2977fb90c35d41c9453116e283fac47bb9096ad917b8819ca8b943abecd"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3ae38d325b512f63f8da31f826e6cb6c367336f95e418137286ba362925c877e"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:542da1178c1c6af8873e143910e2269add130a299c9106eef2594e15dae5e482"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30a85aed0b864ac88309b7d94be09f6046c834ef60762a8833b660139cfbad13"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aae32c93e0f64469f74ccc730a7cb21c7610af3a775157e50bbd38f816536b38"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15b26ddf78d57f1d143bdf32e820fd8935d36abe8a25eb9ec0b5a71c82eb3895"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f5d10bae5d78e4551b7be7a9b29643a95aded9d0f602aa2ba584f0388e7a557"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:249c6470a2b60935bafd1d1d13cd613f8cd8388d53461c67397ee6a0f5dce741"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c5a74c359b2d47d26cdbbc7845e9662d6b08a1e915eb015d044729e92e7050b7"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:b5bcf60a228acae568e9911f410f9d9e0d43197d030ae5799e20dca8df588287"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:187d18082694a29005ba2944c882344b6748d5be69e3a89bf3cc9d878e548d5a"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:81bf654678e575403736b85ba3a7867e31c2c30a69bc57fe88e3ace52fb17b89"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-win32.whl", hash = "sha256:85a32721ddde63c9df9ebb0d2045b9691d9750cb139c161c80e500d210f5e26e"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:468d2a840567b13a590e67dd276c570f8de00ed767ecc611994c301d0f8c014f"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e0fc42822278451bc13a2e8626cf2218ba570f27856b536e00cfa53099724828"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:09c77f964f351a7369cc343911e0df63e762e42bac24cd7d18525961c81754f4"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:12ebea541c44fdc88ccb794a13fe861cc5e35d64ed689513a5c03d05b53b7c82"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:805dfea4ca10411a5296bcc75638017215a93ffb584c9e344731eef0dcfb026a"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96c2b49eb6a72c0e4991d62406e365d87067ca14c1a729a870d22354e6f68115"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaf7b34c5bc56b38c931a54f7952f1ff0ae77a2e82496583b247f7c969eb1479"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:619d1c96099be5823db34fe89e2582b336b5b074a7f47f819d6b3a57ff7bdb86"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0ac5e7015a5920cfce654c06618ec40c33e12801711da6b4258af59a8eff00a"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:93aa7eef6ee71c629b51ef873991d6911b906d7312c6e8e99790c0f33c576f89"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7966951325782121e67c81299a031f4c115615e68046f79b85856b86ebffc4cd"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:02673e456dc5ab13659f85196c534dc596d4ef260e4d86e856c3b2773ce09843"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:c2af80fb58f0f24b3f3adcb9148e6203fa67dd3f61c4af146ecad033024dde43"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:153e7b6e724761741e0974fc4dcd406d35ba70b92bfe3fedcb497226c93b9da7"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-win32.whl", hash = "sha256:d47ecf253780c90ee181d4d871cd655a789da937454045b17b5798da9393901a"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:d97d85fa63f315a8bdaba2af9a6a686e0eceab77b3089af45133252618e70884"}, - {file = "charset_normalizer-3.3.0-py3-none-any.whl", hash = "sha256:e46cd37076971c1040fc8c41273a8b3e2c624ce4f2be3f5dfcb7a430c1d3acc2"}, -======= {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, @@ -673,7 +482,6 @@ files = [ {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d ] [[package]] @@ -848,40 +656,11 @@ toml = ["tomli"] [[package]] name = "cryptography" -<<<<<<< HEAD -version = "41.0.4" -======= version = "41.0.5" ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ -<<<<<<< HEAD - {file = "cryptography-41.0.4-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:80907d3faa55dc5434a16579952ac6da800935cd98d14dbd62f6f042c7f5e839"}, - {file = "cryptography-41.0.4-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:35c00f637cd0b9d5b6c6bd11b6c3359194a8eba9c46d4e875a3660e3b400005f"}, - {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cecfefa17042941f94ab54f769c8ce0fe14beff2694e9ac684176a2535bf9714"}, - {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e40211b4923ba5a6dc9769eab704bdb3fbb58d56c5b336d30996c24fcf12aadb"}, - {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:23a25c09dfd0d9f28da2352503b23e086f8e78096b9fd585d1d14eca01613e13"}, - {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2ed09183922d66c4ec5fdaa59b4d14e105c084dd0febd27452de8f6f74704143"}, - {file = "cryptography-41.0.4-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:5a0f09cefded00e648a127048119f77bc2b2ec61e736660b5789e638f43cc397"}, - {file = "cryptography-41.0.4-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:9eeb77214afae972a00dee47382d2591abe77bdae166bda672fb1e24702a3860"}, - {file = "cryptography-41.0.4-cp37-abi3-win32.whl", hash = "sha256:3b224890962a2d7b57cf5eeb16ccaafba6083f7b811829f00476309bce2fe0fd"}, - {file = "cryptography-41.0.4-cp37-abi3-win_amd64.whl", hash = "sha256:c880eba5175f4307129784eca96f4e70b88e57aa3f680aeba3bab0e980b0f37d"}, - {file = "cryptography-41.0.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:004b6ccc95943f6a9ad3142cfabcc769d7ee38a3f60fb0dddbfb431f818c3a67"}, - {file = "cryptography-41.0.4-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:86defa8d248c3fa029da68ce61fe735432b047e32179883bdb1e79ed9bb8195e"}, - {file = "cryptography-41.0.4-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:37480760ae08065437e6573d14be973112c9e6dcaf5f11d00147ee74f37a3829"}, - {file = "cryptography-41.0.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b5f4dfe950ff0479f1f00eda09c18798d4f49b98f4e2006d644b3301682ebdca"}, - {file = "cryptography-41.0.4-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7e53db173370dea832190870e975a1e09c86a879b613948f09eb49324218c14d"}, - {file = "cryptography-41.0.4-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5b72205a360f3b6176485a333256b9bcd48700fc755fef51c8e7e67c4b63e3ac"}, - {file = "cryptography-41.0.4-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:93530900d14c37a46ce3d6c9e6fd35dbe5f5601bf6b3a5c325c7bffc030344d9"}, - {file = "cryptography-41.0.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:efc8ad4e6fc4f1752ebfb58aefece8b4e3c4cae940b0994d43649bdfce8d0d4f"}, - {file = "cryptography-41.0.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c3391bd8e6de35f6f1140e50aaeb3e2b3d6a9012536ca23ab0d9c35ec18c8a91"}, - {file = "cryptography-41.0.4-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:0d9409894f495d465fe6fda92cb70e8323e9648af912d5b9141d616df40a87b8"}, - {file = "cryptography-41.0.4-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8ac4f9ead4bbd0bc8ab2d318f97d85147167a488be0e08814a37eb2f439d5cf6"}, - {file = "cryptography-41.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:047c4603aeb4bbd8db2756e38f5b8bd7e94318c047cfe4efeb5d715e08b49311"}, - {file = "cryptography-41.0.4.tar.gz", hash = "sha256:7febc3094125fc126a7f6fb1f420d0da639f3f32cb15c8ff0dc3997c4549f51a"}, -======= {file = "cryptography-41.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:da6a0ff8f1016ccc7477e6339e1d50ce5f59b88905585f77193ebd5068f1e797"}, {file = "cryptography-41.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b948e09fe5fb18517d99994184854ebd50b57248736fd4c720ad540560174ec5"}, {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d38e6031e113b7421db1de0c1b1f7739564a88f1684c6b89234fbf6c11b75147"}, @@ -905,7 +684,6 @@ files = [ {file = "cryptography-41.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1e91467c65fe64a82c689dc6cf58151158993b13eb7a7f3f4b7f395636723"}, {file = "cryptography-41.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:0d2a6a598847c46e3e321a7aef8af1436f11c27f1254933746304ff014664d84"}, {file = "cryptography-41.0.5.tar.gz", hash = "sha256:392cb88b597247177172e02da6b7a63deeff1937fa6fec3bbf902ebd75d97ec7"}, ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d ] [package.dependencies] @@ -921,22 +699,6 @@ ssh = ["bcrypt (>=3.1.5)"] test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] -[[package]] -name = "dataclasses-json" -version = "0.6.1" -description = "Easily serialize dataclasses to and from JSON." -category = "main" -optional = false -python-versions = ">=3.7,<4.0" -files = [ - {file = "dataclasses_json-0.6.1-py3-none-any.whl", hash = "sha256:1bd8418a61fe3d588bb0079214d7fb71d44937da40742b787256fd53b26b6c80"}, - {file = "dataclasses_json-0.6.1.tar.gz", hash = "sha256:a53c220c35134ce08211a1057fd0e5bf76dc5331627c6b241cacbc570a89faae"}, -] - -[package.dependencies] -marshmallow = ">=3.18.0,<4.0.0" -typing-inspect = ">=0.4.0,<1" - [[package]] name = "dateparser" version = "1.1.8" @@ -1043,10 +805,6 @@ packaging = "*" name = "dill" version = "0.3.7" description = "serialize all of Python" -<<<<<<< HEAD -category = "main" -======= ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d optional = false python-versions = ">=3.7" files = [ @@ -1106,24 +864,13 @@ test = ["pytest (>=6)"] [[package]] name = "executing" -<<<<<<< HEAD -version = "2.0.0" -======= version = "2.0.1" ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d description = "Get the currently executing AST node of a frame, and other information" optional = false -<<<<<<< HEAD -python-versions = "*" -files = [ - {file = "executing-2.0.0-py2.py3-none-any.whl", hash = "sha256:06df6183df67389625f4e763921c6cf978944721abf3e714000200aab95b0657"}, - {file = "executing-2.0.0.tar.gz", hash = "sha256:0ff053696fdeef426cda5bd18eacd94f82c91f49823a2e9090124212ceea9b08"}, -======= python-versions = ">=3.5" files = [ {file = "executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc"}, {file = "executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147"}, ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d ] [package.extras] @@ -1231,22 +978,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-api-python-client" -<<<<<<< HEAD -version = "2.103.0" -======= version = "2.106.0" ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d description = "Google API Client Library for Python" optional = false python-versions = ">=3.7" files = [ -<<<<<<< HEAD - {file = "google-api-python-client-2.103.0.tar.gz", hash = "sha256:5b48dc23913b9a1b447991add03f27c335831559b5a870c522316eae671caf44"}, - {file = "google_api_python_client-2.103.0-py2.py3-none-any.whl", hash = "sha256:5d6cf80cc34598a85b73e7e689e6eb1ba34f342095aeab9ec408f94521382a7c"}, -======= {file = "google-api-python-client-2.106.0.tar.gz", hash = "sha256:f6a3862be2f6e5e0536d7bd47b5af3f24ac0b9147c76c830cafb3329d71d5724"}, {file = "google_api_python_client-2.106.0-py2.py3-none-any.whl", hash = "sha256:c47c0dae5dd20aa43e4ea184566fe59d0c8fd0b86dd223b29040d8ea4f7ed6ea"}, ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d ] [package.dependencies] @@ -1258,22 +996,13 @@ uritemplate = ">=3.0.1,<5" [[package]] name = "google-auth" -<<<<<<< HEAD -version = "2.23.3" -======= version = "2.23.4" ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d description = "Google Authentication Library" optional = false python-versions = ">=3.7" files = [ -<<<<<<< HEAD - {file = "google-auth-2.23.3.tar.gz", hash = "sha256:6864247895eea5d13b9c57c9e03abb49cb94ce2dc7c58e91cba3248c7477c9e3"}, - {file = "google_auth-2.23.3-py2.py3-none-any.whl", hash = "sha256:a8f4608e65c244ead9e0538f181a96c6e11199ec114d41f1d7b1bffa96937bda"}, -======= {file = "google-auth-2.23.4.tar.gz", hash = "sha256:79905d6b1652187def79d491d6e23d0cbb3a21d3c7ba0dbaa9c8a01906b13ff3"}, {file = "google_auth-2.23.4-py2.py3-none-any.whl", hash = "sha256:d4bbc92fe4b8bfd2f3e8d88e5ba7085935da208ee38a134fc280e7ce682a05f2"}, ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d ] [package.dependencies] @@ -1430,79 +1159,11 @@ vertica = ["sqlalchemy (>=1.3.18,<2.0.0)", "sqlalchemy-vertica-python (>=0.5.10) [[package]] name = "greenlet" -<<<<<<< HEAD -version = "3.0.0" -======= version = "3.0.1" ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d description = "Lightweight in-process concurrent programming" optional = false python-versions = ">=3.7" files = [ -<<<<<<< HEAD - {file = "greenlet-3.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e09dea87cc91aea5500262993cbd484b41edf8af74f976719dd83fe724644cd6"}, - {file = "greenlet-3.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f47932c434a3c8d3c86d865443fadc1fbf574e9b11d6650b656e602b1797908a"}, - {file = "greenlet-3.0.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bdfaeecf8cc705d35d8e6de324bf58427d7eafb55f67050d8f28053a3d57118c"}, - {file = "greenlet-3.0.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6a68d670c8f89ff65c82b936275369e532772eebc027c3be68c6b87ad05ca695"}, - {file = "greenlet-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ad562a104cd41e9d4644f46ea37167b93190c6d5e4048fcc4b80d34ecb278f"}, - {file = "greenlet-3.0.0-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:02a807b2a58d5cdebb07050efe3d7deaf915468d112dfcf5e426d0564aa3aa4a"}, - {file = "greenlet-3.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b1660a15a446206c8545edc292ab5c48b91ff732f91b3d3b30d9a915d5ec4779"}, - {file = "greenlet-3.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:813720bd57e193391dfe26f4871186cf460848b83df7e23e6bef698a7624b4c9"}, - {file = "greenlet-3.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:aa15a2ec737cb609ed48902b45c5e4ff6044feb5dcdfcf6fa8482379190330d7"}, - {file = "greenlet-3.0.0-cp310-universal2-macosx_11_0_x86_64.whl", hash = "sha256:7709fd7bb02b31908dc8fd35bfd0a29fc24681d5cc9ac1d64ad07f8d2b7db62f"}, - {file = "greenlet-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:211ef8d174601b80e01436f4e6905aca341b15a566f35a10dd8d1e93f5dbb3b7"}, - {file = "greenlet-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6512592cc49b2c6d9b19fbaa0312124cd4c4c8a90d28473f86f92685cc5fef8e"}, - {file = "greenlet-3.0.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:871b0a8835f9e9d461b7fdaa1b57e3492dd45398e87324c047469ce2fc9f516c"}, - {file = "greenlet-3.0.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b505fcfc26f4148551826a96f7317e02c400665fa0883fe505d4fcaab1dabfdd"}, - {file = "greenlet-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:123910c58234a8d40eaab595bc56a5ae49bdd90122dde5bdc012c20595a94c14"}, - {file = "greenlet-3.0.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:96d9ea57292f636ec851a9bb961a5cc0f9976900e16e5d5647f19aa36ba6366b"}, - {file = "greenlet-3.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0b72b802496cccbd9b31acea72b6f87e7771ccfd7f7927437d592e5c92ed703c"}, - {file = "greenlet-3.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:527cd90ba3d8d7ae7dceb06fda619895768a46a1b4e423bdb24c1969823b8362"}, - {file = "greenlet-3.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:37f60b3a42d8b5499be910d1267b24355c495064f271cfe74bf28b17b099133c"}, - {file = "greenlet-3.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1482fba7fbed96ea7842b5a7fc11d61727e8be75a077e603e8ab49d24e234383"}, - {file = "greenlet-3.0.0-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:be557119bf467d37a8099d91fbf11b2de5eb1fd5fc5b91598407574848dc910f"}, - {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:73b2f1922a39d5d59cc0e597987300df3396b148a9bd10b76a058a2f2772fc04"}, - {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1e22c22f7826096ad503e9bb681b05b8c1f5a8138469b255eb91f26a76634f2"}, - {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1d363666acc21d2c204dd8705c0e0457d7b2ee7a76cb16ffc099d6799744ac99"}, - {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:334ef6ed8337bd0b58bb0ae4f7f2dcc84c9f116e474bb4ec250a8bb9bd797a66"}, - {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6672fdde0fd1a60b44fb1751a7779c6db487e42b0cc65e7caa6aa686874e79fb"}, - {file = "greenlet-3.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:952256c2bc5b4ee8df8dfc54fc4de330970bf5d79253c863fb5e6761f00dda35"}, - {file = "greenlet-3.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:269d06fa0f9624455ce08ae0179430eea61085e3cf6457f05982b37fd2cefe17"}, - {file = "greenlet-3.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:9adbd8ecf097e34ada8efde9b6fec4dd2a903b1e98037adf72d12993a1c80b51"}, - {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6b5ce7f40f0e2f8b88c28e6691ca6806814157ff05e794cdd161be928550f4c"}, - {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecf94aa539e97a8411b5ea52fc6ccd8371be9550c4041011a091eb8b3ca1d810"}, - {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80dcd3c938cbcac986c5c92779db8e8ce51a89a849c135172c88ecbdc8c056b7"}, - {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e52a712c38e5fb4fd68e00dc3caf00b60cb65634d50e32281a9d6431b33b4af1"}, - {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d5539f6da3418c3dc002739cb2bb8d169056aa66e0c83f6bacae0cd3ac26b423"}, - {file = "greenlet-3.0.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:343675e0da2f3c69d3fb1e894ba0a1acf58f481f3b9372ce1eb465ef93cf6fed"}, - {file = "greenlet-3.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:abe1ef3d780de56defd0c77c5ba95e152f4e4c4e12d7e11dd8447d338b85a625"}, - {file = "greenlet-3.0.0-cp37-cp37m-win32.whl", hash = "sha256:e693e759e172fa1c2c90d35dea4acbdd1d609b6936115d3739148d5e4cd11947"}, - {file = "greenlet-3.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:bdd696947cd695924aecb3870660b7545a19851f93b9d327ef8236bfc49be705"}, - {file = "greenlet-3.0.0-cp37-universal2-macosx_11_0_x86_64.whl", hash = "sha256:cc3e2679ea13b4de79bdc44b25a0c4fcd5e94e21b8f290791744ac42d34a0353"}, - {file = "greenlet-3.0.0-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:63acdc34c9cde42a6534518e32ce55c30f932b473c62c235a466469a710bfbf9"}, - {file = "greenlet-3.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a1a6244ff96343e9994e37e5b4839f09a0207d35ef6134dce5c20d260d0302c"}, - {file = "greenlet-3.0.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b822fab253ac0f330ee807e7485769e3ac85d5eef827ca224feaaefa462dc0d0"}, - {file = "greenlet-3.0.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8060b32d8586e912a7b7dac2d15b28dbbd63a174ab32f5bc6d107a1c4143f40b"}, - {file = "greenlet-3.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:621fcb346141ae08cb95424ebfc5b014361621b8132c48e538e34c3c93ac7365"}, - {file = "greenlet-3.0.0-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6bb36985f606a7c49916eff74ab99399cdfd09241c375d5a820bb855dfb4af9f"}, - {file = "greenlet-3.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10b5582744abd9858947d163843d323d0b67be9432db50f8bf83031032bc218d"}, - {file = "greenlet-3.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f351479a6914fd81a55c8e68963609f792d9b067fb8a60a042c585a621e0de4f"}, - {file = "greenlet-3.0.0-cp38-cp38-win32.whl", hash = "sha256:9de687479faec7db5b198cc365bc34addd256b0028956501f4d4d5e9ca2e240a"}, - {file = "greenlet-3.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:3fd2b18432e7298fcbec3d39e1a0aa91ae9ea1c93356ec089421fabc3651572b"}, - {file = "greenlet-3.0.0-cp38-universal2-macosx_11_0_x86_64.whl", hash = "sha256:3c0d36f5adc6e6100aedbc976d7428a9f7194ea79911aa4bf471f44ee13a9464"}, - {file = "greenlet-3.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4cd83fb8d8e17633ad534d9ac93719ef8937568d730ef07ac3a98cb520fd93e4"}, - {file = "greenlet-3.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a5b2d4cdaf1c71057ff823a19d850ed5c6c2d3686cb71f73ae4d6382aaa7a06"}, - {file = "greenlet-3.0.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e7dcdfad252f2ca83c685b0fa9fba00e4d8f243b73839229d56ee3d9d219314"}, - {file = "greenlet-3.0.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c94e4e924d09b5a3e37b853fe5924a95eac058cb6f6fb437ebb588b7eda79870"}, - {file = "greenlet-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad6fb737e46b8bd63156b8f59ba6cdef46fe2b7db0c5804388a2d0519b8ddb99"}, - {file = "greenlet-3.0.0-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d55db1db455c59b46f794346efce896e754b8942817f46a1bada2d29446e305a"}, - {file = "greenlet-3.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:56867a3b3cf26dc8a0beecdb4459c59f4c47cdd5424618c08515f682e1d46692"}, - {file = "greenlet-3.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9a812224a5fb17a538207e8cf8e86f517df2080c8ee0f8c1ed2bdaccd18f38f4"}, - {file = "greenlet-3.0.0-cp39-cp39-win32.whl", hash = "sha256:0d3f83ffb18dc57243e0151331e3c383b05e5b6c5029ac29f754745c800f8ed9"}, - {file = "greenlet-3.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:831d6f35037cf18ca5e80a737a27d822d87cd922521d18ed3dbc8a6967be50ce"}, - {file = "greenlet-3.0.0-cp39-universal2-macosx_11_0_x86_64.whl", hash = "sha256:a048293392d4e058298710a54dfaefcefdf49d287cd33fb1f7d63d55426e4355"}, - {file = "greenlet-3.0.0.tar.gz", hash = "sha256:19834e3f91f485442adc1ee440171ec5d9a4840a1f7bd5ed97833544719ce10b"}, -======= {file = "greenlet-3.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f89e21afe925fcfa655965ca8ea10f24773a1791400989ff32f467badfe4a064"}, {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28e89e232c7593d33cac35425b58950789962011cc274aa43ef8865f2e11f46d"}, {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8ba29306c5de7717b5761b9ea74f9c72b9e2b834e24aa984da99cbfc70157fd"}, @@ -1560,7 +1221,6 @@ files = [ {file = "greenlet-3.0.1-cp39-cp39-win32.whl", hash = "sha256:cf868e08690cb89360eebc73ba4be7fb461cfbc6168dd88e2fbbe6f31812cd57"}, {file = "greenlet-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:ac4a39d1abae48184d420aa8e5e63efd1b75c8444dd95daa3e03f6c6310e9619"}, {file = "greenlet-3.0.1.tar.gz", hash = "sha256:816bd9488a94cba78d93e1abb58000e8266fa9cc2aa9ccdd6eb0696acb24005b"}, ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d ] [package.extras] @@ -1608,17 +1268,10 @@ name = "importlib-metadata" version = "6.8.0" description = "Read metadata from Python packages" optional = false -<<<<<<< HEAD -python-versions = ">=3.7" -files = [ - {file = "importlib_metadata-4.13.0-py3-none-any.whl", hash = "sha256:8a8a81bcf996e74fee46f0d16bd3eaa382a7eb20fd82445c3ad11f4090334116"}, - {file = "importlib_metadata-4.13.0.tar.gz", hash = "sha256:dd0173e8f150d6815e098fd354f6414b0f079af4644ddfe90c71e2fc6174346d"}, -======= python-versions = ">=3.8" files = [ {file = "importlib_metadata-6.8.0-py3-none-any.whl", hash = "sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb"}, {file = "importlib_metadata-6.8.0.tar.gz", hash = "sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743"}, ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d ] [package.dependencies] @@ -1678,22 +1331,13 @@ tests = ["pytest", "pytest-cov", "pytest-mock"] [[package]] name = "ipykernel" -<<<<<<< HEAD -version = "6.25.2" -======= version = "6.26.0" ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d description = "IPython Kernel for Jupyter" optional = false python-versions = ">=3.8" files = [ -<<<<<<< HEAD - {file = "ipykernel-6.25.2-py3-none-any.whl", hash = "sha256:2e2ee359baba19f10251b99415bb39de1e97d04e1fab385646f24f0596510b77"}, - {file = "ipykernel-6.25.2.tar.gz", hash = "sha256:f468ddd1f17acb48c8ce67fcfa49ba6d46d4f9ac0438c1f441be7c3d1372230b"}, -======= {file = "ipykernel-6.26.0-py3-none-any.whl", hash = "sha256:3ba3dc97424b87b31bb46586b5167b3161b32d7820b9201a9e698c71e271602c"}, {file = "ipykernel-6.26.0.tar.gz", hash = "sha256:553856658eb8430bbe9653ea041a41bff63e9606fc4628873fc92a6cf3abd404"}, ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d ] [package.dependencies] @@ -1720,22 +1364,13 @@ test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio" [[package]] name = "ipython" -<<<<<<< HEAD -version = "8.16.1" -======= version = "8.17.2" ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d description = "IPython: Productive Interactive Computing" optional = false python-versions = ">=3.9" files = [ -<<<<<<< HEAD - {file = "ipython-8.16.1-py3-none-any.whl", hash = "sha256:0852469d4d579d9cd613c220af7bf0c9cc251813e12be647cb9d463939db9b1e"}, - {file = "ipython-8.16.1.tar.gz", hash = "sha256:ad52f58fca8f9f848e256c629eff888efc0528c12fe0f8ec14f33205f23ef938"}, -======= {file = "ipython-8.17.2-py3-none-any.whl", hash = "sha256:1e4d1d666a023e3c93585ba0d8e962867f7a111af322efff6b9c58062b3e5444"}, {file = "ipython-8.17.2.tar.gz", hash = "sha256:126bb57e1895594bb0d91ea3090bbd39384f6fe87c3d57fd558d0670f50339bb"}, ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d ] [package.dependencies] @@ -1753,28 +1388,17 @@ traitlets = ">=5" typing-extensions = {version = "*", markers = "python_version < \"3.10\""} [package.extras] -<<<<<<< HEAD -all = ["black", "curio", "docrepr", "exceptiongroup", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.21)", "pandas", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] -black = ["black"] -doc = ["docrepr", "exceptiongroup", "ipykernel", "matplotlib", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] -======= all = ["black", "curio", "docrepr", "exceptiongroup", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.22)", "pandas", "pickleshare", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio (<0.22)", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] black = ["black"] doc = ["docrepr", "exceptiongroup", "ipykernel", "matplotlib", "pickleshare", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio (<0.22)", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d kernel = ["ipykernel"] nbconvert = ["nbconvert"] nbformat = ["nbformat"] notebook = ["ipywidgets", "notebook"] parallel = ["ipyparallel"] qtconsole = ["qtconsole"] -<<<<<<< HEAD -test = ["pytest (<7.1)", "pytest-asyncio", "testpath"] -test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pandas", "pytest (<7.1)", "pytest-asyncio", "testpath", "trio"] -======= test = ["pickleshare", "pytest (<7.1)", "pytest-asyncio (<0.22)", "testpath"] test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.22)", "pandas", "pickleshare", "pytest (<7.1)", "pytest-asyncio (<0.22)", "testpath", "trio"] ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d [[package]] name = "ipywidgets" @@ -1908,10 +1532,6 @@ i18n = ["Babel (>=2.7)"] name = "json5" version = "0.9.14" description = "A Python implementation of the JSON5 data format." -<<<<<<< HEAD -category = "main" -======= ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d optional = false python-versions = "*" files = [ @@ -1949,22 +1569,13 @@ files = [ [[package]] name = "jsonschema" -<<<<<<< HEAD -version = "4.19.1" -======= version = "4.19.2" ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d description = "An implementation of JSON Schema validation for Python" optional = false python-versions = ">=3.8" files = [ -<<<<<<< HEAD - {file = "jsonschema-4.19.1-py3-none-any.whl", hash = "sha256:cd5f1f9ed9444e554b38ba003af06c0a8c2868131e56bfbef0550fb450c0330e"}, - {file = "jsonschema-4.19.1.tar.gz", hash = "sha256:ec84cc37cfa703ef7cd4928db24f9cb31428a5d0fa77747b8b51a847458e0bbf"}, -======= {file = "jsonschema-4.19.2-py3-none-any.whl", hash = "sha256:eee9e502c788e89cb166d4d37f43084e3b64ab405c795c03d343a4dbc2c810fc"}, {file = "jsonschema-4.19.2.tar.gz", hash = "sha256:c9ff4d7447eed9592c23a12ccee508baf0dd0d59650615e847feb6cdca74f392"}, ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d ] [package.dependencies] @@ -2001,22 +1612,13 @@ referencing = ">=0.28.0" [[package]] name = "jupyter-client" -<<<<<<< HEAD -version = "8.4.0" -======= version = "8.5.0" ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d description = "Jupyter protocol implementation and client libraries" optional = false python-versions = ">=3.8" files = [ -<<<<<<< HEAD - {file = "jupyter_client-8.4.0-py3-none-any.whl", hash = "sha256:6a2a950ec23a8f62f9e4c66acec7f0ea6c7d1f80ba0992e747b10c56ce2e6dbe"}, - {file = "jupyter_client-8.4.0.tar.gz", hash = "sha256:dc1b857d5d7d76ac101766c6e9b646bf18742721126e72e5d484c75a993cada2"}, -======= {file = "jupyter_client-8.5.0-py3-none-any.whl", hash = "sha256:c3877aac7257ec68d79b5c622ce986bd2a992ca42f6ddc9b4dd1da50e89f7028"}, {file = "jupyter_client-8.5.0.tar.gz", hash = "sha256:e8754066510ce456358df363f97eae64b50860f30dc1fe8c6771440db3be9a63"}, ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d ] [package.dependencies] @@ -2033,22 +1635,13 @@ test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pyt [[package]] name = "jupyter-core" -<<<<<<< HEAD -version = "5.4.0" -======= version = "5.5.0" ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d description = "Jupyter core package. A base package on which Jupyter projects rely." optional = false python-versions = ">=3.8" files = [ -<<<<<<< HEAD - {file = "jupyter_core-5.4.0-py3-none-any.whl", hash = "sha256:66e252f675ac04dcf2feb6ed4afb3cd7f68cf92f483607522dc251f32d471571"}, - {file = "jupyter_core-5.4.0.tar.gz", hash = "sha256:e4b98344bb94ee2e3e6c4519a97d001656009f9cb2b7f2baf15b3c205770011d"}, -======= {file = "jupyter_core-5.5.0-py3-none-any.whl", hash = "sha256:e11e02cd8ae0a9de5c6c44abf5727df9f2581055afe00b22183f621ba3585805"}, {file = "jupyter_core-5.5.0.tar.gz", hash = "sha256:880b86053bf298a8724994f95e99b99130659022a4f7f45f563084b6223861d3"}, ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d ] [package.dependencies] @@ -2084,35 +1677,6 @@ traitlets = ">=5.3" cli = ["click", "rich"] docs = ["jupyterlite-sphinx", "myst-parser", "pydata-sphinx-theme", "sphinxcontrib-spelling"] test = ["click", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.19.0)", "pytest-console-scripts", "rich"] -<<<<<<< HEAD - -[[package]] -name = "jupyter-lsp" -version = "2.2.0" -description = "Multi-Language Server WebSocket proxy for Jupyter Notebook/Lab server" -category = "main" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter-lsp-2.2.0.tar.gz", hash = "sha256:8ebbcb533adb41e5d635eb8fe82956b0aafbf0fd443b6c4bfa906edeeb8635a1"}, - {file = "jupyter_lsp-2.2.0-py3-none-any.whl", hash = "sha256:9e06b8b4f7dd50300b70dd1a78c0c3b0c3d8fa68e0f2d8a5d1fbab62072aca3f"}, -] - -[package.dependencies] -importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} -jupyter-server = ">=1.1.2" - -[[package]] -name = "jupyter-server" -version = "2.8.0" -description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." -category = "main" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter_server-2.8.0-py3-none-any.whl", hash = "sha256:c57270faa6530393ae69783a2d2f1874c718b9f109080581ea076b05713249fa"}, - {file = "jupyter_server-2.8.0.tar.gz", hash = "sha256:b11e2ba80667c75f55630faf8ac3d5809f8734f9006d65cce117c46a0a516ab8"}, -======= [[package]] name = "jupyter-lsp" @@ -2138,7 +1702,6 @@ python-versions = ">=3.8" files = [ {file = "jupyter_server-2.9.1-py3-none-any.whl", hash = "sha256:21ad1a3d455d5a79ce4bef5201925cd17510c17898cf9d54e3ccfb6b12734948"}, {file = "jupyter_server-2.9.1.tar.gz", hash = "sha256:9ba71be4b9c16e479e4c50c929f8ac4b1015baf90237a08681397a98c76c7e5e"}, ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d ] [package.dependencies] @@ -2189,10 +1752,6 @@ test = ["coverage", "jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-cov", name = "jupyterlab" version = "4.0.7" description = "JupyterLab computational environment" -<<<<<<< HEAD -category = "main" -======= ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d optional = false python-versions = ">=3.8" files = [ @@ -2225,10 +1784,6 @@ test = ["coverage", "pytest (>=7.0)", "pytest-check-links (>=0.7)", "pytest-cons name = "jupyterlab-pygments" version = "0.2.2" description = "Pygments theme using JupyterLab CSS variables" -<<<<<<< HEAD -category = "main" -======= ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d optional = false python-versions = ">=3.7" files = [ @@ -2240,10 +1795,6 @@ files = [ name = "jupyterlab-server" version = "2.25.0" description = "A set of server components for JupyterLab and JupyterLab like applications." -<<<<<<< HEAD -category = "main" -======= ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d optional = false python-versions = ">=3.8" files = [ @@ -2479,44 +2030,11 @@ files = [ [[package]] name = "mypy" -<<<<<<< HEAD -version = "1.6.0" -======= version = "1.6.1" ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ -<<<<<<< HEAD - {file = "mypy-1.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:091f53ff88cb093dcc33c29eee522c087a438df65eb92acd371161c1f4380ff0"}, - {file = "mypy-1.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eb7ff4007865833c470a601498ba30462b7374342580e2346bf7884557e40531"}, - {file = "mypy-1.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49499cf1e464f533fc45be54d20a6351a312f96ae7892d8e9f1708140e27ce41"}, - {file = "mypy-1.6.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4c192445899c69f07874dabda7e931b0cc811ea055bf82c1ababf358b9b2a72c"}, - {file = "mypy-1.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:3df87094028e52766b0a59a3e46481bb98b27986ed6ded6a6cc35ecc75bb9182"}, - {file = "mypy-1.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c8835a07b8442da900db47ccfda76c92c69c3a575872a5b764332c4bacb5a0a"}, - {file = "mypy-1.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:24f3de8b9e7021cd794ad9dfbf2e9fe3f069ff5e28cb57af6f873ffec1cb0425"}, - {file = "mypy-1.6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:856bad61ebc7d21dbc019b719e98303dc6256cec6dcc9ebb0b214b81d6901bd8"}, - {file = "mypy-1.6.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:89513ddfda06b5c8ebd64f026d20a61ef264e89125dc82633f3c34eeb50e7d60"}, - {file = "mypy-1.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:9f8464ed410ada641c29f5de3e6716cbdd4f460b31cf755b2af52f2d5ea79ead"}, - {file = "mypy-1.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:971104bcb180e4fed0d7bd85504c9036346ab44b7416c75dd93b5c8c6bb7e28f"}, - {file = "mypy-1.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ab98b8f6fdf669711f3abe83a745f67f50e3cbaea3998b90e8608d2b459fd566"}, - {file = "mypy-1.6.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a69db3018b87b3e6e9dd28970f983ea6c933800c9edf8c503c3135b3274d5ad"}, - {file = "mypy-1.6.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:dccd850a2e3863891871c9e16c54c742dba5470f5120ffed8152956e9e0a5e13"}, - {file = "mypy-1.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:f8598307150b5722854f035d2e70a1ad9cc3c72d392c34fffd8c66d888c90f17"}, - {file = "mypy-1.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fea451a3125bf0bfe716e5d7ad4b92033c471e4b5b3e154c67525539d14dc15a"}, - {file = "mypy-1.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e28d7b221898c401494f3b77db3bac78a03ad0a0fff29a950317d87885c655d2"}, - {file = "mypy-1.6.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4b7a99275a61aa22256bab5839c35fe8a6887781862471df82afb4b445daae6"}, - {file = "mypy-1.6.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7469545380dddce5719e3656b80bdfbb217cfe8dbb1438532d6abc754b828fed"}, - {file = "mypy-1.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:7807a2a61e636af9ca247ba8494031fb060a0a744b9fee7de3a54bed8a753323"}, - {file = "mypy-1.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d2dad072e01764823d4b2f06bc7365bb1d4b6c2f38c4d42fade3c8d45b0b4b67"}, - {file = "mypy-1.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b19006055dde8a5425baa5f3b57a19fa79df621606540493e5e893500148c72f"}, - {file = "mypy-1.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31eba8a7a71f0071f55227a8057468b8d2eb5bf578c8502c7f01abaec8141b2f"}, - {file = "mypy-1.6.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e0db37ac4ebb2fee7702767dfc1b773c7365731c22787cb99f507285014fcaf"}, - {file = "mypy-1.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:c69051274762cccd13498b568ed2430f8d22baa4b179911ad0c1577d336ed849"}, - {file = "mypy-1.6.0-py3-none-any.whl", hash = "sha256:9e1589ca150a51d9d00bb839bfeca2f7a04f32cd62fad87a847bc0818e15d7dc"}, - {file = "mypy-1.6.0.tar.gz", hash = "sha256:4f3d27537abde1be6d5f2c96c29a454da333a2a271ae7d5bc7110e6d4b7beb3f"}, -======= {file = "mypy-1.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e5012e5cc2ac628177eaac0e83d622b2dd499e28253d4107a08ecc59ede3fc2c"}, {file = "mypy-1.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d8fbb68711905f8912e5af474ca8b78d077447d8f3918997fecbf26943ff3cbb"}, {file = "mypy-1.6.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21a1ad938fee7d2d96ca666c77b7c494c3c5bd88dff792220e1afbebb2925b5e"}, @@ -2544,7 +2062,6 @@ files = [ {file = "mypy-1.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:a43ef1c8ddfdb9575691720b6352761f3f53d85f1b57d7745701041053deff30"}, {file = "mypy-1.6.1-py3-none-any.whl", hash = "sha256:4cbe68ef919c28ea561165206a2dcb68591c50f3bcf777932323bc208d949cf1"}, {file = "mypy-1.6.1.tar.gz", hash = "sha256:4d01c00d09a0be62a4ca3f933e315455bde83f37f892ba4b08ce92f3cf44bcc1"}, ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d ] [package.dependencies] @@ -2561,10 +2078,6 @@ reports = ["lxml"] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." -<<<<<<< HEAD -category = "main" -======= ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d optional = false python-versions = ">=3.5" files = [ @@ -2596,22 +2109,13 @@ test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>= [[package]] name = "nbconvert" -<<<<<<< HEAD -version = "7.9.2" -======= version = "7.10.0" ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d description = "Converting Jupyter Notebooks" optional = false python-versions = ">=3.8" files = [ -<<<<<<< HEAD - {file = "nbconvert-7.9.2-py3-none-any.whl", hash = "sha256:39fe4b8bdd1b0104fdd86fc8a43a9077ba64c720bda4c6132690d917a0a154ee"}, - {file = "nbconvert-7.9.2.tar.gz", hash = "sha256:e56cc7588acc4f93e2bb5a34ec69028e4941797b2bfaf6462f18a41d1cc258c9"}, -======= {file = "nbconvert-7.10.0-py3-none-any.whl", hash = "sha256:8cf1d95e569730f136feb85e4bba25bdcf3a63fefb122d854ddff6771c0ac933"}, {file = "nbconvert-7.10.0.tar.gz", hash = "sha256:4bedff08848626be544de193b7594d98a048073f392178008ff4f171f5e21d26"}, ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d ] [package.dependencies] @@ -2693,16 +2197,6 @@ test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"] [[package]] name = "notebook" -<<<<<<< HEAD -version = "7.0.5" -description = "Jupyter Notebook - A web-based notebook environment for interactive computing" -category = "main" -optional = false -python-versions = ">=3.8" -files = [ - {file = "notebook-7.0.5-py3-none-any.whl", hash = "sha256:f26bd66accd54fcd96cc6696fb6c2911f15843b1c524318fd7cbdb32a763e6a6"}, - {file = "notebook-7.0.5.tar.gz", hash = "sha256:9e7c7a91de138bc8b5ee50486a20e70fa4d82d407b5622ec8beac9e13e773181"}, -======= version = "7.0.6" description = "Jupyter Notebook - A web-based notebook environment for interactive computing" optional = false @@ -2710,7 +2204,6 @@ python-versions = ">=3.8" files = [ {file = "notebook-7.0.6-py3-none-any.whl", hash = "sha256:0fe8f67102fea3744fedf652e4c15339390902ca70c5a31c4f547fa23da697cc"}, {file = "notebook-7.0.6.tar.gz", hash = "sha256:ec6113b06529019f7f287819af06c97a2baf7a95ac21a8f6e32192898e9f9a58"}, ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d ] [package.dependencies] @@ -2990,21 +2483,6 @@ files = [ ptyprocess = ">=0.5" [[package]] -<<<<<<< HEAD -name = "pickleshare" -version = "0.7.5" -description = "Tiny 'shelve'-like database with concurrency support" -category = "main" -optional = false -python-versions = "*" -files = [ - {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, - {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, -] - -[[package]] -======= ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d name = "platformdirs" version = "3.11.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." @@ -3039,17 +2517,10 @@ name = "prometheus-client" version = "0.18.0" description = "Python client for the Prometheus monitoring system." optional = false -<<<<<<< HEAD -python-versions = ">=3.6" -files = [ - {file = "prometheus_client-0.17.1-py3-none-any.whl", hash = "sha256:e537f37160f6807b8202a6fc4764cdd19bac5480ddd3e0d463c3002b34462101"}, - {file = "prometheus_client-0.17.1.tar.gz", hash = "sha256:21e674f39831ae3f8acde238afd9a27a37d0d2fb5a28ea094f0ce25d2cbf2091"}, -======= python-versions = ">=3.8" files = [ {file = "prometheus_client-0.18.0-py3-none-any.whl", hash = "sha256:8de3ae2755f890826f4b6479e5571d4f74ac17a81345fe69a6778fdb92579184"}, {file = "prometheus_client-0.18.0.tar.gz", hash = "sha256:35f7a8c22139e2bb7ca5a698e92d38145bc8dc74c1c0bf56f25cca886a764e17"}, ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d ] [package.extras] @@ -3328,17 +2799,10 @@ name = "pyopenssl" version = "23.3.0" description = "Python wrapper module around the OpenSSL library" optional = false -<<<<<<< HEAD -python-versions = ">=3.6" -files = [ - {file = "pyOpenSSL-23.2.0-py3-none-any.whl", hash = "sha256:24f0dc5227396b3e831f4c7f602b950a5e9833d292c8e4a2e06b709292806ae2"}, - {file = "pyOpenSSL-23.2.0.tar.gz", hash = "sha256:276f931f55a452e7dea69c7173e984eb2a4407ce413c918aa34b55f82f9b8bac"}, -======= python-versions = ">=3.7" files = [ {file = "pyOpenSSL-23.3.0-py3-none-any.whl", hash = "sha256:6756834481d9ed5470f4a9393455154bc92fe7a64b7bc6ee2c804e78c52099b2"}, {file = "pyOpenSSL-23.3.0.tar.gz", hash = "sha256:6b2cba5cc46e822750ec3e5a81ee12819850b11303630d575e98108a079c2b12"}, ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d ] [package.dependencies] @@ -3364,22 +2828,13 @@ diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pytest" -<<<<<<< HEAD -version = "7.4.2" -======= version = "7.4.3" ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" files = [ -<<<<<<< HEAD - {file = "pytest-7.4.2-py3-none-any.whl", hash = "sha256:1d881c6124e08ff0a1bb75ba3ec0bfd8b5354a01c194ddd5a0a870a48d99b002"}, - {file = "pytest-7.4.2.tar.gz", hash = "sha256:a766259cfab564a2ad52cb1aae1b881a75c3eb7e34ca3779697c23ed47c47069"}, -======= {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d ] [package.dependencies] @@ -3416,17 +2871,10 @@ name = "pytest-mock" version = "3.12.0" description = "Thin-wrapper around the mock package for easier use with pytest" optional = false -<<<<<<< HEAD -python-versions = ">=3.7" -files = [ - {file = "pytest-mock-3.11.1.tar.gz", hash = "sha256:7f6b125602ac6d743e523ae0bfa71e1a697a2f5534064528c6ff84c2f7c2fc7f"}, - {file = "pytest_mock-3.11.1-py3-none-any.whl", hash = "sha256:21c279fff83d70763b05f8874cc9cfb3fcacd6d354247a976f9529d19f9acf39"}, -======= python-versions = ">=3.8" files = [ {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d ] [package.dependencies] @@ -4046,67 +3494,49 @@ files = [ {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:d92f81886165cb14d7b067ef37e142256f1c6a90a65cd156b063a43da1708cfd"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412"}, -<<<<<<< HEAD -======= {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:840f0c7f194986a63d2c2465ca63af8ccbbc90ab1c6001b1978f05119b5e7334"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:024cfe1fc7c7f4e1aff4a81e718109e13409767e4f871443cbff3dba3578203d"}, ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win32.whl", hash = "sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:b5edda50e5e9e15e54a6a8a0070302b00c518a9d32accc2346ad6c984aacd279"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b"}, -<<<<<<< HEAD -======= {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:46d378daaac94f454b3a0e3d8d78cafd78a026b1d71443f4966c696b48a6d899"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:09b055c05697b38ecacb7ac50bdab2240bfca1a0c4872b0fd309bb07dc9aa3a9"}, ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win32.whl", hash = "sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:7048c338b6c86627afb27faecf418768acb6331fc24cfa56c93e8c9780f815fa"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92"}, -<<<<<<< HEAD -======= {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3213ece08ea033eb159ac52ae052a4899b56ecc124bb80020d9bbceeb50258e9"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aab7fd643f71d7946f2ee58cc88c9b7bfc97debd71dcc93e03e2d174628e7e2d"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win32.whl", hash = "sha256:5c365d91c88390c8d0a8545df0b5857172824b1c604e867161e6b3d59a827eaa"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win_amd64.whl", hash = "sha256:1758ce7d8e1a29d23de54a16ae867abd370f01b5a69e1a3ba75223eaa3ca1a1b"}, ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d {file = "ruamel.yaml.clib-0.2.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a5aa27bad2bb83670b71683aae140a1f52b0857a2deff56ad3f6c13a017a26ed"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c58ecd827313af6864893e7af0a3bb85fd529f862b6adbefe14643947cfe2942"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f481f16baec5290e45aebdc2a5168ebc6d35189ae6fea7a58787613a25f6e875"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3fcc54cb0c8b811ff66082de1680b4b14cf8a81dce0d4fbf665c2265a81e07a1"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7f67a1ee819dc4562d444bbafb135832b0b909f81cc90f7aa00260968c9ca1b3"}, -<<<<<<< HEAD -======= {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4ecbf9c3e19f9562c7fdd462e8d18dd902a47ca046a2e64dba80699f0b6c09b7"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:87ea5ff66d8064301a154b3933ae406b0863402a799b16e4a1d24d9fbbcbe0d3"}, ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win32.whl", hash = "sha256:75e1ed13e1f9de23c5607fe6bd1aeaae21e523b32d83bb33918245361e9cc51b"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win_amd64.whl", hash = "sha256:3f215c5daf6a9d7bbed4a0a4f760f3113b10e82ff4c5c44bec20a68c8014f675"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:665f58bfd29b167039f714c6998178d27ccd83984084c286110ef26b230f259f"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91"}, -<<<<<<< HEAD -======= {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e2b4c44b60eadec492926a7270abb100ef9f72798e18743939bdbf037aab8c28"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e79e5db08739731b0ce4850bed599235d601701d5694c36570a99a0c5ca41a9d"}, ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win32.whl", hash = "sha256:955eae71ac26c1ab35924203fda6220f84dce57d6d7884f189743e2abe3a9fbe"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:9eb5dee2772b0f704ca2e45b1713e4e5198c18f515b52743576d196348f374d3"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5"}, -<<<<<<< HEAD -======= {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:184565012b60405d93838167f425713180b949e9d8dd0bbc7b49f074407c5a8b"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a75879bacf2c987c003368cf14bed0ffe99e8e85acfa6c0bfffc21a090f16880"}, ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win32.whl", hash = "sha256:84b554931e932c46f94ab306913ad7e11bba988104c5cff26d90d03f68258cd5"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win_amd64.whl", hash = "sha256:25ac8c08322002b06fa1d49d1646181f0b2c72f5cbc15a85e80b4c30a544bb15"}, {file = "ruamel.yaml.clib-0.2.8.tar.gz", hash = "sha256:beb2e0404003de9a4cab9753a8805a8fe9320ee6673136ed7f04255fe60bb512"}, @@ -4119,13 +3549,8 @@ description = "" optional = false python-versions = ">=3.9,<4.0" files = [ -<<<<<<< HEAD - {file = "schematic_db-0.0.29-py3-none-any.whl", hash = "sha256:e43f1d7c06d877d47036c5a480ac8f22333daa967df67c4d8316091ff4ddc0a5"}, - {file = "schematic_db-0.0.29.tar.gz", hash = "sha256:77d338b34dd8f1e75b9df5b9b3f20de35087285079019d48d162de0d131f3ffb"}, -======= {file = "schematic_db-0.0.dev33-py3-none-any.whl", hash = "sha256:9a274b038e5d3f382fd22300350fb4c02e0f147e5846808b324714fb30bd9e75"}, {file = "schematic_db-0.0.dev33.tar.gz", hash = "sha256:01cadedbfa10915727c0bdf88c9184353db1294d8c941e69a824d16f12bb4701"}, ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d ] [package.dependencies] @@ -4353,10 +3778,6 @@ test = ["pytest"] name = "sphinxcontrib-devhelp" version = "1.0.5" description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" -<<<<<<< HEAD -category = "main" -======= ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d optional = false python-versions = ">=3.9" files = [ @@ -4407,10 +3828,6 @@ test = ["flake8", "mypy", "pytest"] name = "sphinxcontrib-qthelp" version = "1.0.6" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" -<<<<<<< HEAD -category = "main" -======= ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d optional = false python-versions = ">=3.9" files = [ @@ -4429,10 +3846,6 @@ test = ["pytest"] name = "sphinxcontrib-serializinghtml" version = "1.1.9" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" -<<<<<<< HEAD -category = "main" -======= ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d optional = false python-versions = ">=3.9" files = [ @@ -4452,58 +3865,6 @@ name = "sqlalchemy" version = "2.0.22" description = "Database Abstraction Library" optional = false -<<<<<<< HEAD -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" -files = [ - {file = "SQLAlchemy-1.4.49-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:2e126cf98b7fd38f1e33c64484406b78e937b1a280e078ef558b95bf5b6895f6"}, - {file = "SQLAlchemy-1.4.49-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:03db81b89fe7ef3857b4a00b63dedd632d6183d4ea5a31c5d8a92e000a41fc71"}, - {file = "SQLAlchemy-1.4.49-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:95b9df9afd680b7a3b13b38adf6e3a38995da5e162cc7524ef08e3be4e5ed3e1"}, - {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a63e43bf3f668c11bb0444ce6e809c1227b8f067ca1068898f3008a273f52b09"}, - {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca46de16650d143a928d10842939dab208e8d8c3a9a8757600cae9b7c579c5cd"}, - {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f835c050ebaa4e48b18403bed2c0fda986525896efd76c245bdd4db995e51a4c"}, - {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c21b172dfb22e0db303ff6419451f0cac891d2e911bb9fbf8003d717f1bcf91"}, - {file = "SQLAlchemy-1.4.49-cp310-cp310-win32.whl", hash = "sha256:5fb1ebdfc8373b5a291485757bd6431de8d7ed42c27439f543c81f6c8febd729"}, - {file = "SQLAlchemy-1.4.49-cp310-cp310-win_amd64.whl", hash = "sha256:f8a65990c9c490f4651b5c02abccc9f113a7f56fa482031ac8cb88b70bc8ccaa"}, - {file = "SQLAlchemy-1.4.49-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8923dfdf24d5aa8a3adb59723f54118dd4fe62cf59ed0d0d65d940579c1170a4"}, - {file = "SQLAlchemy-1.4.49-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9ab2c507a7a439f13ca4499db6d3f50423d1d65dc9b5ed897e70941d9e135b0"}, - {file = "SQLAlchemy-1.4.49-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5debe7d49b8acf1f3035317e63d9ec8d5e4d904c6e75a2a9246a119f5f2fdf3d"}, - {file = "SQLAlchemy-1.4.49-cp311-cp311-win32.whl", hash = "sha256:82b08e82da3756765c2e75f327b9bf6b0f043c9c3925fb95fb51e1567fa4ee87"}, - {file = "SQLAlchemy-1.4.49-cp311-cp311-win_amd64.whl", hash = "sha256:171e04eeb5d1c0d96a544caf982621a1711d078dbc5c96f11d6469169bd003f1"}, - {file = "SQLAlchemy-1.4.49-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f23755c384c2969ca2f7667a83f7c5648fcf8b62a3f2bbd883d805454964a800"}, - {file = "SQLAlchemy-1.4.49-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8396e896e08e37032e87e7fbf4a15f431aa878c286dc7f79e616c2feacdb366c"}, - {file = "SQLAlchemy-1.4.49-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66da9627cfcc43bbdebd47bfe0145bb662041472393c03b7802253993b6b7c90"}, - {file = "SQLAlchemy-1.4.49-cp312-cp312-win32.whl", hash = "sha256:9a06e046ffeb8a484279e54bda0a5abfd9675f594a2e38ef3133d7e4d75b6214"}, - {file = "SQLAlchemy-1.4.49-cp312-cp312-win_amd64.whl", hash = "sha256:7cf8b90ad84ad3a45098b1c9f56f2b161601e4670827d6b892ea0e884569bd1d"}, - {file = "SQLAlchemy-1.4.49-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:36e58f8c4fe43984384e3fbe6341ac99b6b4e083de2fe838f0fdb91cebe9e9cb"}, - {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b31e67ff419013f99ad6f8fc73ee19ea31585e1e9fe773744c0f3ce58c039c30"}, - {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ebc22807a7e161c0d8f3da34018ab7c97ef6223578fcdd99b1d3e7ed1100a5db"}, - {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c14b29d9e1529f99efd550cd04dbb6db6ba5d690abb96d52de2bff4ed518bc95"}, - {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c40f3470e084d31247aea228aa1c39bbc0904c2b9ccbf5d3cfa2ea2dac06f26d"}, - {file = "SQLAlchemy-1.4.49-cp36-cp36m-win32.whl", hash = "sha256:706bfa02157b97c136547c406f263e4c6274a7b061b3eb9742915dd774bbc264"}, - {file = "SQLAlchemy-1.4.49-cp36-cp36m-win_amd64.whl", hash = "sha256:a7f7b5c07ae5c0cfd24c2db86071fb2a3d947da7bd487e359cc91e67ac1c6d2e"}, - {file = "SQLAlchemy-1.4.49-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:4afbbf5ef41ac18e02c8dc1f86c04b22b7a2125f2a030e25bbb4aff31abb224b"}, - {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24e300c0c2147484a002b175f4e1361f102e82c345bf263242f0449672a4bccf"}, - {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:393cd06c3b00b57f5421e2133e088df9cabcececcea180327e43b937b5a7caa5"}, - {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:201de072b818f8ad55c80d18d1a788729cccf9be6d9dc3b9d8613b053cd4836d"}, - {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7653ed6817c710d0c95558232aba799307d14ae084cc9b1f4c389157ec50df5c"}, - {file = "SQLAlchemy-1.4.49-cp37-cp37m-win32.whl", hash = "sha256:647e0b309cb4512b1f1b78471fdaf72921b6fa6e750b9f891e09c6e2f0e5326f"}, - {file = "SQLAlchemy-1.4.49-cp37-cp37m-win_amd64.whl", hash = "sha256:ab73ed1a05ff539afc4a7f8cf371764cdf79768ecb7d2ec691e3ff89abbc541e"}, - {file = "SQLAlchemy-1.4.49-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:37ce517c011560d68f1ffb28af65d7e06f873f191eb3a73af5671e9c3fada08a"}, - {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1878ce508edea4a879015ab5215546c444233881301e97ca16fe251e89f1c55"}, - {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95ab792ca493891d7a45a077e35b418f68435efb3e1706cb8155e20e86a9013c"}, - {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0e8e608983e6f85d0852ca61f97e521b62e67969e6e640fe6c6b575d4db68557"}, - {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccf956da45290df6e809ea12c54c02ace7f8ff4d765d6d3dfb3655ee876ce58d"}, - {file = "SQLAlchemy-1.4.49-cp38-cp38-win32.whl", hash = "sha256:f167c8175ab908ce48bd6550679cc6ea20ae169379e73c7720a28f89e53aa532"}, - {file = "SQLAlchemy-1.4.49-cp38-cp38-win_amd64.whl", hash = "sha256:45806315aae81a0c202752558f0df52b42d11dd7ba0097bf71e253b4215f34f4"}, - {file = "SQLAlchemy-1.4.49-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:b6d0c4b15d65087738a6e22e0ff461b407533ff65a73b818089efc8eb2b3e1de"}, - {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a843e34abfd4c797018fd8d00ffffa99fd5184c421f190b6ca99def4087689bd"}, - {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:738d7321212941ab19ba2acf02a68b8ee64987b248ffa2101630e8fccb549e0d"}, - {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1c890421651b45a681181301b3497e4d57c0d01dc001e10438a40e9a9c25ee77"}, - {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d26f280b8f0a8f497bc10573849ad6dc62e671d2468826e5c748d04ed9e670d5"}, - {file = "SQLAlchemy-1.4.49-cp39-cp39-win32.whl", hash = "sha256:ec2268de67f73b43320383947e74700e95c6770d0c68c4e615e9897e46296294"}, - {file = "SQLAlchemy-1.4.49-cp39-cp39-win_amd64.whl", hash = "sha256:bbdf16372859b8ed3f4d05f925a984771cd2abd18bd187042f24be4886c2a15f"}, - {file = "SQLAlchemy-1.4.49.tar.gz", hash = "sha256:06ff25cbae30c396c4b7737464f2a7fc37a67b7da409993b182b024cec80aed9"}, -======= python-versions = ">=3.7" files = [ {file = "SQLAlchemy-2.0.22-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f146c61ae128ab43ea3a0955de1af7e1633942c2b2b4985ac51cc292daf33222"}, @@ -4555,7 +3916,6 @@ files = [ {file = "SQLAlchemy-2.0.22-cp39-cp39-win_amd64.whl", hash = "sha256:92e512a6af769e4725fa5b25981ba790335d42c5977e94ded07db7d641490a85"}, {file = "SQLAlchemy-2.0.22-py3-none-any.whl", hash = "sha256:3076740335e4aaadd7deb3fe6dcb96b3015f1613bd190a4e1634e1b99b02ec86"}, {file = "SQLAlchemy-2.0.22.tar.gz", hash = "sha256:5434cc601aa17570d79e5377f5fd45ff92f9379e2abed0be5e8c2fba8d353d2b"}, ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d ] [package.dependencies] @@ -4563,11 +3923,7 @@ greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or typing-extensions = ">=4.2.0" [package.extras] -<<<<<<< HEAD -aiomysql = ["aiomysql", "greenlet (!=0.4.17)"] -======= aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"] asyncio = ["greenlet (!=0.4.17)"] asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] @@ -4578,24 +3934,16 @@ mssql-pyodbc = ["pyodbc"] mypy = ["mypy (>=0.910)"] mysql = ["mysqlclient (>=1.4.0)"] mysql-connector = ["mysql-connector-python"] -<<<<<<< HEAD -oracle = ["cx-oracle (>=7)", "cx-oracle (>=7,<8)"] -======= oracle = ["cx-oracle (>=7)"] oracle-oracledb = ["oracledb (>=1.0.1)"] ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d postgresql = ["psycopg2 (>=2.7)"] postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] postgresql-pg8000 = ["pg8000 (>=1.29.1)"] postgresql-psycopg = ["psycopg (>=3.0.7)"] postgresql-psycopg2binary = ["psycopg2-binary"] postgresql-psycopg2cffi = ["psycopg2cffi"] -<<<<<<< HEAD -pymysql = ["pymysql", "pymysql (<1)"] -======= postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] pymysql = ["pymysql"] ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d sqlcipher = ["sqlcipher3-binary"] [[package]] @@ -4603,17 +3951,10 @@ name = "sqlalchemy-utils" version = "0.41.1" description = "Various utility functions for SQLAlchemy." optional = false -<<<<<<< HEAD -python-versions = "~=3.6" -files = [ - {file = "SQLAlchemy-Utils-0.38.3.tar.gz", hash = "sha256:9f9afba607a40455cf703adfa9846584bf26168a0c5a60a70063b70d65051f4d"}, - {file = "SQLAlchemy_Utils-0.38.3-py3-none-any.whl", hash = "sha256:5c13b5d08adfaa85f3d4e8ec09a75136216fad41346980d02974a70a77988bf9"}, -======= python-versions = ">=3.6" files = [ {file = "SQLAlchemy-Utils-0.41.1.tar.gz", hash = "sha256:a2181bff01eeb84479e38571d2c0718eb52042f9afd8c194d0d02877e84b7d74"}, {file = "SQLAlchemy_Utils-0.41.1-py3-none-any.whl", hash = "sha256:6c96b0768ea3f15c0dc56b363d386138c562752b84f647fb8d31a2223aaab801"}, ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d ] [package.dependencies] @@ -4671,17 +4012,10 @@ name = "synapseclient" version = "3.1.1" description = "A client for Synapse, a collaborative, open-source research platform that allows teams to share data, track analyses, and collaborate." optional = false -<<<<<<< HEAD -python-versions = ">=3.7" -files = [ - {file = "synapseclient-2.7.2-py3-none-any.whl", hash = "sha256:dd8b1a1b4667d08311bb651469431f43fe2eeab83c0ef1fe5a03c2929aeb26cd"}, - {file = "synapseclient-2.7.2.tar.gz", hash = "sha256:dc5a61f9f495109a0c89aa7d42b641b6ff278280d7961fb450dd5015704fe15b"}, -======= python-versions = ">=3.8" files = [ {file = "synapseclient-3.1.1-py3-none-any.whl", hash = "sha256:d9b732b450ee5c69a8d6875058af6739e0e076eb214fa0d05e7ebb21a0106846"}, {file = "synapseclient-3.1.1.tar.gz", hash = "sha256:ab78a1202725d22c21f92798191c1862e779b2b31c01ff06f59789bb05fa1174"}, ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d ] [package.dependencies] @@ -4780,10 +4114,6 @@ files = [ name = "tomli" version = "2.0.1" description = "A lil' TOML parser" -<<<<<<< HEAD -category = "main" -======= ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d optional = false python-versions = ">=3.7" files = [ @@ -4855,40 +4185,23 @@ telegram = ["requests"] [[package]] name = "traitlets" -<<<<<<< HEAD -version = "5.11.2" -======= version = "5.13.0" ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d description = "Traitlets Python configuration system" optional = false python-versions = ">=3.8" files = [ -<<<<<<< HEAD - {file = "traitlets-5.11.2-py3-none-any.whl", hash = "sha256:98277f247f18b2c5cabaf4af369187754f4fb0e85911d473f72329db8a7f4fae"}, - {file = "traitlets-5.11.2.tar.gz", hash = "sha256:7564b5bf8d38c40fa45498072bf4dc5e8346eb087bbf1e2ae2d8774f6a0f078e"}, -======= {file = "traitlets-5.13.0-py3-none-any.whl", hash = "sha256:baf991e61542da48fe8aef8b779a9ea0aa38d8a54166ee250d5af5ecf4486619"}, {file = "traitlets-5.13.0.tar.gz", hash = "sha256:9b232b9430c8f57288c1024b34a8f0251ddcc47268927367a0dd3eeaca40deb5"}, ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d ] [package.extras] docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -<<<<<<< HEAD -test = ["argcomplete (>=3.0.3)", "mypy (>=1.5.1)", "pre-commit", "pytest (>=7.0,<7.5)", "pytest-mock", "pytest-mypy-testing"] -======= test = ["argcomplete (>=3.0.3)", "mypy (>=1.6.0)", "pre-commit", "pytest (>=7.0,<7.5)", "pytest-mock", "pytest-mypy-testing"] ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d [[package]] name = "types-python-dateutil" version = "2.8.19.14" description = "Typing stubs for python-dateutil" -<<<<<<< HEAD -category = "main" -======= ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d optional = false python-versions = "*" files = [ @@ -4906,25 +4219,6 @@ files = [ {file = "typing_extensions-4.5.0-py3-none-any.whl", hash = "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"}, {file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"}, ] -<<<<<<< HEAD - -[[package]] -name = "typing-inspect" -version = "0.9.0" -description = "Runtime inspection utilities for typing module." -category = "main" -optional = false -python-versions = "*" -files = [ - {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, - {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, -] - -[package.dependencies] -mypy-extensions = ">=0.3.0" -typing-extensions = ">=3.7.4" -======= ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d [[package]] name = "tzdata" @@ -4939,24 +4233,13 @@ files = [ [[package]] name = "tzlocal" -<<<<<<< HEAD -version = "5.1" -======= version = "5.2" ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d description = "tzinfo object for the local timezone" optional = false -<<<<<<< HEAD -python-versions = ">=3.7" -files = [ - {file = "tzlocal-5.1-py3-none-any.whl", hash = "sha256:2938498395d5f6a898ab8009555cb37a4d360913ad375d4747ef16826b03ef23"}, - {file = "tzlocal-5.1.tar.gz", hash = "sha256:a5ccb2365b295ed964e0a98ad076fe10c495591e75505d34f154d60a7f1ed722"}, -======= python-versions = ">=3.8" files = [ {file = "tzlocal-5.2-py3-none-any.whl", hash = "sha256:49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8"}, {file = "tzlocal-5.2.tar.gz", hash = "sha256:8d399205578f1a9342816409cc1e46a93ebd5755e39ea2d85334bea911bf0e6e"}, ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d ] [package.dependencies] @@ -4992,22 +4275,13 @@ files = [ [[package]] name = "urllib3" -<<<<<<< HEAD -version = "1.26.17" -======= version = "1.26.18" ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ -<<<<<<< HEAD - {file = "urllib3-1.26.17-py2.py3-none-any.whl", hash = "sha256:94a757d178c9be92ef5539b8840d48dc9cf1b2709c9d6b588232a055c524458b"}, - {file = "urllib3-1.26.17.tar.gz", hash = "sha256:24d6a242c28d29af46c3fae832c36db3bbebcc533dd1bb549172cd739c82df21"}, -======= {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"}, {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"}, ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d ] [package.extras] @@ -5017,20 +4291,12 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "uwsgi" -<<<<<<< HEAD -version = "2.0.22" -======= version = "2.0.23" ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d description = "The uWSGI server" optional = false python-versions = "*" files = [ -<<<<<<< HEAD - {file = "uwsgi-2.0.22.tar.gz", hash = "sha256:4cc4727258671ac5fa17ab422155e9aaef8a2008ebb86e4404b66deaae965db2"}, -======= {file = "uwsgi-2.0.23.tar.gz", hash = "sha256:0cafda0c16f921db7fe42cfaf81b167cf884ee17350efbdd87d1ecece2d7de37"}, ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d ] [[package]] @@ -5051,22 +4317,13 @@ test = ["flake8 (>=2.4.0)", "isort (>=4.2.2)", "pytest (>=2.2.3)"] [[package]] name = "wcwidth" -<<<<<<< HEAD -version = "0.2.8" -======= version = "0.2.9" ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d description = "Measures the displayed width of unicode strings in a terminal" optional = false python-versions = "*" files = [ -<<<<<<< HEAD - {file = "wcwidth-0.2.8-py2.py3-none-any.whl", hash = "sha256:77f719e01648ed600dfa5402c347481c0992263b81a027344f3e1ba25493a704"}, - {file = "wcwidth-0.2.8.tar.gz", hash = "sha256:8705c569999ffbb4f6a87c6d1b80f324bd6db952f5eb0b95bc07517f4c1813d4"}, -======= {file = "wcwidth-0.2.9-py2.py3-none-any.whl", hash = "sha256:9a929bd8380f6cd9571a968a9c8f4353ca58d7cd812a4822bba831f8d685b223"}, {file = "wcwidth-0.2.9.tar.gz", hash = "sha256:a675d1a4a2d24ef67096a04b85b02deeecd8e226f57b5e3a72dbb9ed99d27da8"}, ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d ] [[package]] @@ -5224,10 +4481,6 @@ files = [ name = "zipp" version = "3.17.0" description = "Backport of pathlib-compatible object wrapper for zip files" -<<<<<<< HEAD -category = "main" -======= ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d optional = false python-versions = ">=3.8" files = [ @@ -5242,8 +4495,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = ">=3.9.0,<3.11" -<<<<<<< HEAD -content-hash = "c7fbaf60049ed6f4a60971720b8761423fa94647c1e428d8a04f994d0b1095d7" -======= -content-hash = "9b6667bb6094ab24849d6b47c8923dd8140a29ba14ffa45a0f4be52609b49f9c" ->>>>>>> 07d90e7e435303886f80b46aef24f216d7c4066d +content-hash = "9b6667bb6094ab24849d6b47c8923dd8140a29ba14ffa45a0f4be52609b49f9c" \ No newline at end of file From bce17f56c80370fbaa8333ca483557f58331d893 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Tue, 7 Nov 2023 10:25:14 -0800 Subject: [PATCH 214/239] regenerate poetry.lock file --- poetry.lock | 664 +++++++++++++++++++++++++++++++++++----------------- 1 file changed, 444 insertions(+), 220 deletions(-) diff --git a/poetry.lock b/poetry.lock index 4bf58751b..aa335ffa3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,9 +1,10 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry and should not be changed by hand. [[package]] name = "alabaster" version = "0.7.13" description = "A configurable sidebar-enabled Sphinx theme" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -15,6 +16,7 @@ files = [ name = "altair" version = "4.2.0" description = "Altair: A declarative statistical visualization library for Python." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -37,6 +39,7 @@ dev = ["black", "docutils", "flake8", "ipython", "m2r", "mistune (<2.0.0)", "pyt name = "anyio" version = "4.0.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -58,6 +61,7 @@ trio = ["trio (>=0.22)"] name = "appnope" version = "0.1.3" description = "Disable App Nap on macOS >= 10.9" +category = "main" optional = false python-versions = "*" files = [ @@ -69,6 +73,7 @@ files = [ name = "argon2-cffi" version = "23.1.0" description = "Argon2 for Python" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -89,6 +94,7 @@ typing = ["mypy"] name = "argon2-cffi-bindings" version = "21.2.0" description = "Low-level CFFI bindings for Argon2" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -126,6 +132,7 @@ tests = ["pytest"] name = "arrow" version = "1.3.0" description = "Better dates & times for Python" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -139,12 +146,13 @@ types-python-dateutil = ">=2.8.10" [package.extras] doc = ["doc8", "sphinx (>=7.0.0)", "sphinx-autobuild", "sphinx-autodoc-typehints", "sphinx_rtd_theme (>=1.3.0)"] -test = ["dateparser (==1.*)", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "pytz (==2021.1)", "simplejson (==3.*)"] +test = ["dateparser (>=1.0.0,<2.0.0)", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "pytz (==2021.1)", "simplejson (>=3.0.0,<4.0.0)"] [[package]] name = "astroid" version = "2.15.8" description = "An abstract syntax tree for Python with inference support." +category = "dev" optional = false python-versions = ">=3.7.2" files = [ @@ -161,6 +169,7 @@ wrapt = {version = ">=1.11,<2", markers = "python_version < \"3.11\""} name = "asttokens" version = "2.4.1" description = "Annotate AST trees with source code positions" +category = "main" optional = false python-versions = "*" files = [ @@ -179,6 +188,7 @@ test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] name = "async-lru" version = "2.0.4" description = "Simple LRU cache for asyncio" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -193,6 +203,7 @@ typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} name = "attrs" version = "23.1.0" description = "Classes Without Boilerplate" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -211,6 +222,7 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte name = "babel" version = "2.13.1" description = "Internationalization utilities" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -225,6 +237,7 @@ dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] name = "beautifulsoup4" version = "4.12.2" description = "Screen-scraping library" +category = "main" optional = false python-versions = ">=3.6.0" files = [ @@ -243,6 +256,7 @@ lxml = ["lxml"] name = "black" version = "23.10.1" description = "The uncompromising code formatter." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -285,6 +299,7 @@ uvloop = ["uvloop (>=0.15.2)"] name = "bleach" version = "6.1.0" description = "An easy safelist-based HTML-sanitizing tool." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -303,6 +318,7 @@ css = ["tinycss2 (>=1.1.0,<1.3)"] name = "cachetools" version = "5.3.2" description = "Extensible memoizing collections and decorators" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -314,6 +330,7 @@ files = [ name = "certifi" version = "2023.7.22" description = "Python package for providing Mozilla's CA Bundle." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -325,6 +342,7 @@ files = [ name = "cffi" version = "1.16.0" description = "Foreign Function Interface for Python calling C code." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -389,6 +407,7 @@ pycparser = "*" name = "charset-normalizer" version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -488,6 +507,7 @@ files = [ name = "click" version = "8.1.7" description = "Composable command line interface toolkit" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -502,6 +522,7 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "click-log" version = "0.4.0" description = "Logging integration for Click" +category = "main" optional = false python-versions = "*" files = [ @@ -516,6 +537,7 @@ click = "*" name = "clickclick" version = "20.10.2" description = "Click utility functions" +category = "main" optional = false python-versions = "*" files = [ @@ -531,6 +553,7 @@ PyYAML = ">=3.11" name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -540,27 +563,27 @@ files = [ [[package]] name = "comm" -version = "0.1.4" +version = "0.2.0" description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." +category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "comm-0.1.4-py3-none-any.whl", hash = "sha256:6d52794cba11b36ed9860999cd10fd02d6b2eac177068fdd585e1e2f8a96e67a"}, - {file = "comm-0.1.4.tar.gz", hash = "sha256:354e40a59c9dd6db50c5cc6b4acc887d82e9603787f83b68c01a80a923984d15"}, + {file = "comm-0.2.0-py3-none-any.whl", hash = "sha256:2da8d9ebb8dd7bfc247adaff99f24dce705638a8042b85cb995066793e391001"}, + {file = "comm-0.2.0.tar.gz", hash = "sha256:a517ea2ca28931c7007a7a99c562a0fa5883cfb48963140cf642c41c948498be"}, ] [package.dependencies] traitlets = ">=4" [package.extras] -lint = ["black (>=22.6.0)", "mdformat (>0.7)", "mdformat-gfm (>=0.3.5)", "ruff (>=0.0.156)"] test = ["pytest"] -typing = ["mypy (>=0.990)"] [[package]] name = "connexion" version = "2.14.2" description = "Connexion - API first applications with OpenAPI/Swagger and Flask" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -591,6 +614,7 @@ tests = ["MarkupSafe (>=0.23)", "aiohttp (>=2.3.10,<4)", "aiohttp-jinja2 (>=0.14 name = "coverage" version = "7.3.2" description = "Code coverage measurement for Python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -658,6 +682,7 @@ toml = ["tomli"] name = "cryptography" version = "41.0.5" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -699,10 +724,27 @@ ssh = ["bcrypt (>=3.1.5)"] test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] +[[package]] +name = "dataclasses-json" +version = "0.6.1" +description = "Easily serialize dataclasses to and from JSON." +category = "main" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "dataclasses_json-0.6.1-py3-none-any.whl", hash = "sha256:1bd8418a61fe3d588bb0079214d7fb71d44937da40742b787256fd53b26b6c80"}, + {file = "dataclasses_json-0.6.1.tar.gz", hash = "sha256:a53c220c35134ce08211a1057fd0e5bf76dc5331627c6b241cacbc570a89faae"}, +] + +[package.dependencies] +marshmallow = ">=3.18.0,<4.0.0" +typing-inspect = ">=0.4.0,<1" + [[package]] name = "dateparser" version = "1.1.8" description = "Date parsing library designed to parse dates from HTML pages" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -725,6 +767,7 @@ langdetect = ["langdetect"] name = "debugpy" version = "1.8.0" description = "An implementation of the Debug Adapter Protocol for Python" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -752,6 +795,7 @@ files = [ name = "decorator" version = "5.1.1" description = "Decorators for Humans" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -763,6 +807,7 @@ files = [ name = "defusedxml" version = "0.7.1" description = "XML bomb protection for Python stdlib modules" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -774,6 +819,7 @@ files = [ name = "deprecated" version = "1.2.14" description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -791,6 +837,7 @@ dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] name = "deprecation" version = "2.1.0" description = "A library to handle automated deprecations" +category = "main" optional = false python-versions = "*" files = [ @@ -805,6 +852,7 @@ packaging = "*" name = "dill" version = "0.3.7" description = "serialize all of Python" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -819,6 +867,7 @@ graph = ["objgraph (>=1.7.2)"] name = "docutils" version = "0.20.1" description = "Docutils -- Python Documentation Utilities" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -830,6 +879,7 @@ files = [ name = "entrypoints" version = "0.4" description = "Discover and load entry points from installed packages." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -841,6 +891,7 @@ files = [ name = "et-xmlfile" version = "1.1.0" description = "An implementation of lxml.xmlfile for the standard library" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -852,6 +903,7 @@ files = [ name = "exceptiongroup" version = "1.1.3" description = "Backport of PEP 654 (exception groups)" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -866,6 +918,7 @@ test = ["pytest (>=6)"] name = "executing" version = "2.0.1" description = "Get the currently executing AST node of a frame, and other information" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -880,6 +933,7 @@ tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipyth name = "fastjsonschema" version = "2.18.1" description = "Fastest Python implementation of JSON schema" +category = "main" optional = false python-versions = "*" files = [ @@ -894,6 +948,7 @@ devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benc name = "flake8" version = "6.1.0" description = "the modular source code checker: pep8 pyflakes and co" +category = "dev" optional = false python-versions = ">=3.8.1" files = [ @@ -910,6 +965,7 @@ pyflakes = ">=3.1.0,<3.2.0" name = "flask" version = "2.1.3" description = "A simple framework for building complex web applications." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -932,6 +988,7 @@ dotenv = ["python-dotenv"] name = "flask-cors" version = "3.0.10" description = "A Flask extension adding a decorator for CORS support" +category = "main" optional = false python-versions = "*" files = [ @@ -947,6 +1004,7 @@ Six = "*" name = "fqdn" version = "1.5.1" description = "Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers" +category = "main" optional = false python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4" files = [ @@ -958,6 +1016,7 @@ files = [ name = "google-api-core" version = "2.12.0" description = "Google API client core library" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -978,17 +1037,18 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-api-python-client" -version = "2.106.0" +version = "2.107.0" description = "Google API Client Library for Python" +category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-python-client-2.106.0.tar.gz", hash = "sha256:f6a3862be2f6e5e0536d7bd47b5af3f24ac0b9147c76c830cafb3329d71d5724"}, - {file = "google_api_python_client-2.106.0-py2.py3-none-any.whl", hash = "sha256:c47c0dae5dd20aa43e4ea184566fe59d0c8fd0b86dd223b29040d8ea4f7ed6ea"}, + {file = "google-api-python-client-2.107.0.tar.gz", hash = "sha256:ef6d4c1a17fe9ec0894fc6d4f61e751c4b859fb33f2ab5b881ceb0b80ba442ba"}, + {file = "google_api_python_client-2.107.0-py2.py3-none-any.whl", hash = "sha256:51d7bf676f41a77b00b7b9c72ace0c1db3dd5a4dd392a13ae897cf4f571a3539"}, ] [package.dependencies] -google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0.dev0" +google-api-core = ">=1.31.5,<2.0.0 || >2.3.0,<3.0.0.dev0" google-auth = ">=1.19.0,<3.0.0.dev0" google-auth-httplib2 = ">=0.1.0" httplib2 = ">=0.15.0,<1.dev0" @@ -998,6 +1058,7 @@ uritemplate = ">=3.0.1,<5" name = "google-auth" version = "2.23.4" description = "Google Authentication Library" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1021,6 +1082,7 @@ requests = ["requests (>=2.20.0,<3.0.0.dev0)"] name = "google-auth-httplib2" version = "0.1.1" description = "Google Authentication Library: httplib2 transport" +category = "main" optional = false python-versions = "*" files = [ @@ -1036,6 +1098,7 @@ httplib2 = ">=0.19.0" name = "google-auth-oauthlib" version = "0.8.0" description = "Google Authentication Library" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1054,6 +1117,7 @@ tool = ["click (>=6.0.0)"] name = "googleapis-common-protos" version = "1.61.0" description = "Common protobufs used in Google APIs" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1071,6 +1135,7 @@ grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] name = "graphviz" version = "0.20.1" description = "Simple Python interface for Graphviz" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1087,6 +1152,7 @@ test = ["coverage", "mock (>=4)", "pytest (>=7)", "pytest-cov", "pytest-mock (>= name = "great-expectations" version = "0.15.50" description = "Always know what to expect from your data." +category = "main" optional = false python-versions = "*" files = [ @@ -1161,6 +1227,7 @@ vertica = ["sqlalchemy (>=1.3.18,<2.0.0)", "sqlalchemy-vertica-python (>=0.5.10) name = "greenlet" version = "3.0.1" description = "Lightweight in-process concurrent programming" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1231,6 +1298,7 @@ test = ["objgraph", "psutil"] name = "httplib2" version = "0.22.0" description = "A comprehensive HTTP client library." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1245,6 +1313,7 @@ pyparsing = {version = ">=2.4.2,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.0.2 || >3.0 name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1256,6 +1325,7 @@ files = [ name = "imagesize" version = "1.4.1" description = "Getting image size from png/jpeg/jpeg2000/gif file" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1267,6 +1337,7 @@ files = [ name = "importlib-metadata" version = "6.8.0" description = "Read metadata from Python packages" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1286,6 +1357,7 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs name = "inflection" version = "0.5.1" description = "A port of Ruby on Rails inflector to Python" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1297,6 +1369,7 @@ files = [ name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1308,6 +1381,7 @@ files = [ name = "interrogate" version = "1.5.0" description = "Interrogate a codebase for docstring coverage." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1333,6 +1407,7 @@ tests = ["pytest", "pytest-cov", "pytest-mock"] name = "ipykernel" version = "6.26.0" description = "IPython Kernel for Jupyter" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1346,7 +1421,7 @@ comm = ">=0.1.1" debugpy = ">=1.6.5" ipython = ">=7.23.1" jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" matplotlib-inline = ">=0.1" nest-asyncio = "*" packaging = "*" @@ -1366,6 +1441,7 @@ test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio" name = "ipython" version = "8.17.2" description = "IPython: Productive Interactive Computing" +category = "main" optional = false python-versions = ">=3.9" files = [ @@ -1404,6 +1480,7 @@ test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.22)", "pa name = "ipywidgets" version = "8.1.1" description = "Jupyter interactive widgets" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1425,6 +1502,7 @@ test = ["ipykernel", "jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"] name = "isodate" version = "0.6.1" description = "An ISO 8601 date/time/duration parser and formatter" +category = "main" optional = false python-versions = "*" files = [ @@ -1439,6 +1517,7 @@ six = "*" name = "isoduration" version = "20.11.0" description = "Operations with ISO 8601 durations" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1453,6 +1532,7 @@ arrow = ">=0.15.0" name = "isort" version = "5.12.0" description = "A Python utility / library to sort Python imports." +category = "dev" optional = false python-versions = ">=3.8.0" files = [ @@ -1470,6 +1550,7 @@ requirements-deprecated-finder = ["pip-api", "pipreqs"] name = "itsdangerous" version = "2.1.2" description = "Safely pass data to untrusted environments and back." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1481,6 +1562,7 @@ files = [ name = "jedi" version = "0.19.1" description = "An autocompletion tool for Python that can be used for text editors." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1500,6 +1582,7 @@ testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] name = "jeepney" version = "0.8.0" description = "Low-level, pure Python DBus protocol wrapper." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1515,6 +1598,7 @@ trio = ["async_generator", "trio"] name = "jinja2" version = "3.1.2" description = "A very fast and expressive template engine." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1532,6 +1616,7 @@ i18n = ["Babel (>=2.7)"] name = "json5" version = "0.9.14" description = "A Python implementation of the JSON5 data format." +category = "main" optional = false python-versions = "*" files = [ @@ -1546,6 +1631,7 @@ dev = ["hypothesis"] name = "jsonpatch" version = "1.33" description = "Apply JSON-Patches (RFC 6902)" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" files = [ @@ -1560,6 +1646,7 @@ jsonpointer = ">=1.9" name = "jsonpointer" version = "2.4" description = "Identify specific nodes in a JSON document (RFC 6901)" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" files = [ @@ -1571,6 +1658,7 @@ files = [ name = "jsonschema" version = "4.19.2" description = "An implementation of JSON Schema validation for Python" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1600,6 +1688,7 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- name = "jsonschema-specifications" version = "2023.7.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1612,18 +1701,19 @@ referencing = ">=0.28.0" [[package]] name = "jupyter-client" -version = "8.5.0" +version = "8.6.0" description = "Jupyter protocol implementation and client libraries" +category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_client-8.5.0-py3-none-any.whl", hash = "sha256:c3877aac7257ec68d79b5c622ce986bd2a992ca42f6ddc9b4dd1da50e89f7028"}, - {file = "jupyter_client-8.5.0.tar.gz", hash = "sha256:e8754066510ce456358df363f97eae64b50860f30dc1fe8c6771440db3be9a63"}, + {file = "jupyter_client-8.6.0-py3-none-any.whl", hash = "sha256:909c474dbe62582ae62b758bca86d6518c85234bdee2d908c778db6d72f39d99"}, + {file = "jupyter_client-8.6.0.tar.gz", hash = "sha256:0642244bb83b4764ae60d07e010e15f0e2d275ec4e918a8f7b80fbbef3ca60c7"}, ] [package.dependencies] importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" python-dateutil = ">=2.8.2" pyzmq = ">=23.0" tornado = ">=6.2" @@ -1637,6 +1727,7 @@ test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pyt name = "jupyter-core" version = "5.5.0" description = "Jupyter core package. A base package on which Jupyter projects rely." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1655,13 +1746,14 @@ test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] [[package]] name = "jupyter-events" -version = "0.8.0" +version = "0.9.0" description = "Jupyter Event System library" +category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_events-0.8.0-py3-none-any.whl", hash = "sha256:81f07375c7673ff298bfb9302b4a981864ec64edaed75ca0fe6f850b9b045525"}, - {file = "jupyter_events-0.8.0.tar.gz", hash = "sha256:fda08f0defce5e16930542ce60634ba48e010830d50073c3dfd235759cee77bf"}, + {file = "jupyter_events-0.9.0-py3-none-any.whl", hash = "sha256:d853b3c10273ff9bc8bb8b30076d65e2c9685579db736873de6c2232dde148bf"}, + {file = "jupyter_events-0.9.0.tar.gz", hash = "sha256:81ad2e4bc710881ec274d31c6c50669d71bbaa5dd9d01e600b56faa85700d399"}, ] [package.dependencies] @@ -1682,6 +1774,7 @@ test = ["click", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.19.0)", "p name = "jupyter-lsp" version = "2.2.0" description = "Multi-Language Server WebSocket proxy for Jupyter Notebook/Lab server" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1695,13 +1788,14 @@ jupyter-server = ">=1.1.2" [[package]] name = "jupyter-server" -version = "2.9.1" +version = "2.10.0" description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." +category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_server-2.9.1-py3-none-any.whl", hash = "sha256:21ad1a3d455d5a79ce4bef5201925cd17510c17898cf9d54e3ccfb6b12734948"}, - {file = "jupyter_server-2.9.1.tar.gz", hash = "sha256:9ba71be4b9c16e479e4c50c929f8ac4b1015baf90237a08681397a98c76c7e5e"}, + {file = "jupyter_server-2.10.0-py3-none-any.whl", hash = "sha256:dde56c9bc3cb52d7b72cc0f696d15d7163603526f1a758eb4a27405b73eab2a5"}, + {file = "jupyter_server-2.10.0.tar.gz", hash = "sha256:47b8f5e63440125cb1bb8957bf12b18453ee5ed9efe42d2f7b2ca66a7019a278"}, ] [package.dependencies] @@ -1709,7 +1803,7 @@ anyio = ">=3.1.0" argon2-cffi = "*" jinja2 = "*" jupyter-client = ">=7.4.4" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" jupyter-events = ">=0.6.0" jupyter-server-terminals = "*" nbconvert = ">=6.4.4" @@ -1733,6 +1827,7 @@ test = ["flaky", "ipykernel", "pre-commit", "pytest (>=7.0)", "pytest-console-sc name = "jupyter-server-terminals" version = "0.4.4" description = "A Jupyter Server Extension Providing Terminals." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1750,13 +1845,14 @@ test = ["coverage", "jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-cov", [[package]] name = "jupyterlab" -version = "4.0.7" +version = "4.0.8" description = "JupyterLab computational environment" +category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "jupyterlab-4.0.7-py3-none-any.whl", hash = "sha256:08683045117cc495531fdb39c22ababb9aaac6977a45e67cfad20046564c9c7c"}, - {file = "jupyterlab-4.0.7.tar.gz", hash = "sha256:48792efd9f962b2bcda1f87d72168ff122c288b1d97d32109e4a11b33dc862be"}, + {file = "jupyterlab-4.0.8-py3-none-any.whl", hash = "sha256:2ff5aa2a51eb21df241d6011c236e88bd1ff9a5dbb75bebc54472f9c18bfffa4"}, + {file = "jupyterlab-4.0.8.tar.gz", hash = "sha256:c4fe93f977bcc987bd395d7fae5ab02e0c042bf4e0f7c95196f3e2e578c2fb3a"}, ] [package.dependencies] @@ -1775,7 +1871,7 @@ tornado = ">=6.2.0" traitlets = "*" [package.extras] -dev = ["black[jupyter] (==23.7.0)", "build", "bump2version", "coverage", "hatch", "pre-commit", "pytest-cov", "ruff (==0.0.286)"] +dev = ["black[jupyter] (==23.10.1)", "build", "bump2version", "coverage", "hatch", "pre-commit", "pytest-cov", "ruff (==0.0.292)"] docs = ["jsx-lexer", "myst-parser", "pydata-sphinx-theme (>=0.13.0)", "pytest", "pytest-check-links", "pytest-tornasync", "sphinx (>=1.8,<7.2.0)", "sphinx-copybutton"] docs-screenshots = ["altair (==5.0.1)", "ipython (==8.14.0)", "ipywidgets (==8.0.6)", "jupyterlab-geojson (==3.4.0)", "jupyterlab-language-pack-zh-cn (==4.0.post0)", "matplotlib (==3.7.1)", "nbconvert (>=7.0.0)", "pandas (==2.0.2)", "scipy (==1.10.1)", "vega-datasets (==0.9.0)"] test = ["coverage", "pytest (>=7.0)", "pytest-check-links (>=0.7)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter (>=0.5.3)", "pytest-timeout", "pytest-tornasync", "requests", "requests-cache", "virtualenv"] @@ -1784,6 +1880,7 @@ test = ["coverage", "pytest (>=7.0)", "pytest-check-links (>=0.7)", "pytest-cons name = "jupyterlab-pygments" version = "0.2.2" description = "Pygments theme using JupyterLab CSS variables" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1795,6 +1892,7 @@ files = [ name = "jupyterlab-server" version = "2.25.0" description = "A set of server components for JupyterLab and JupyterLab like applications." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1821,6 +1919,7 @@ test = ["hatch", "ipykernel", "openapi-core (>=0.18.0,<0.19.0)", "openapi-spec-v name = "jupyterlab-widgets" version = "3.0.9" description = "Jupyter interactive widgets for JupyterLab" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1832,6 +1931,7 @@ files = [ name = "keyring" version = "23.4.1" description = "Store and access your passwords safely." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1853,6 +1953,7 @@ testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", name = "keyrings-alt" version = "3.1" description = "Alternate keyring implementations" +category = "main" optional = false python-versions = ">=2.7" files = [ @@ -1871,6 +1972,7 @@ testing = ["backports.unittest-mock", "collective.checkdocs", "fs (>=0.5,<2)", " name = "lazy-object-proxy" version = "1.9.0" description = "A fast and thorough lazy object proxy." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1916,6 +2018,7 @@ files = [ name = "makefun" version = "1.15.1" description = "Small library to dynamically create python functions." +category = "main" optional = false python-versions = "*" files = [ @@ -1927,6 +2030,7 @@ files = [ name = "markupsafe" version = "2.1.0" description = "Safely add untrusted strings to HTML/XML markup." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1976,6 +2080,7 @@ files = [ name = "marshmallow" version = "3.20.1" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1996,6 +2101,7 @@ tests = ["pytest", "pytz", "simplejson"] name = "matplotlib-inline" version = "0.1.6" description = "Inline Matplotlib backend for Jupyter" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -2010,6 +2116,7 @@ traitlets = "*" name = "mccabe" version = "0.7.0" description = "McCabe checker, plugin for flake8" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2021,6 +2128,7 @@ files = [ name = "mistune" version = "3.0.2" description = "A sane and fast Markdown parser with useful plugins and renderers" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2032,6 +2140,7 @@ files = [ name = "mypy" version = "1.6.1" description = "Optional static typing for Python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2078,6 +2187,7 @@ reports = ["lxml"] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -2087,18 +2197,19 @@ files = [ [[package]] name = "nbclient" -version = "0.8.0" +version = "0.9.0" description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." +category = "main" optional = false python-versions = ">=3.8.0" files = [ - {file = "nbclient-0.8.0-py3-none-any.whl", hash = "sha256:25e861299e5303a0477568557c4045eccc7a34c17fc08e7959558707b9ebe548"}, - {file = "nbclient-0.8.0.tar.gz", hash = "sha256:f9b179cd4b2d7bca965f900a2ebf0db4a12ebff2f36a711cb66861e4ae158e55"}, + {file = "nbclient-0.9.0-py3-none-any.whl", hash = "sha256:a3a1ddfb34d4a9d17fc744d655962714a866639acd30130e9be84191cd97cd15"}, + {file = "nbclient-0.9.0.tar.gz", hash = "sha256:4b28c207877cf33ef3a9838cdc7a54c5ceff981194a82eac59d558f05487295e"}, ] [package.dependencies] jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" nbformat = ">=5.1" traitlets = ">=5.4" @@ -2109,13 +2220,14 @@ test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>= [[package]] name = "nbconvert" -version = "7.10.0" +version = "7.11.0" description = "Converting Jupyter Notebooks" +category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "nbconvert-7.10.0-py3-none-any.whl", hash = "sha256:8cf1d95e569730f136feb85e4bba25bdcf3a63fefb122d854ddff6771c0ac933"}, - {file = "nbconvert-7.10.0.tar.gz", hash = "sha256:4bedff08848626be544de193b7594d98a048073f392178008ff4f171f5e21d26"}, + {file = "nbconvert-7.11.0-py3-none-any.whl", hash = "sha256:d1d417b7f34a4e38887f8da5bdfd12372adf3b80f995d57556cb0972c68909fe"}, + {file = "nbconvert-7.11.0.tar.gz", hash = "sha256:abedc01cf543177ffde0bfc2a69726d5a478f6af10a332fc1bf29fcb4f0cf000"}, ] [package.dependencies] @@ -2142,13 +2254,14 @@ docs = ["ipykernel", "ipython", "myst-parser", "nbsphinx (>=0.2.12)", "pydata-sp qtpdf = ["nbconvert[qtpng]"] qtpng = ["pyqtwebengine (>=5.15)"] serve = ["tornado (>=6.1)"] -test = ["flaky", "ipykernel", "ipywidgets (>=7)", "pytest", "pytest-dependency"] +test = ["flaky", "ipykernel", "ipywidgets (>=7)", "pytest"] webpdf = ["playwright"] [[package]] name = "nbformat" version = "5.9.2" description = "The Jupyter Notebook format" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2170,6 +2283,7 @@ test = ["pep440", "pre-commit", "pytest", "testpath"] name = "nest-asyncio" version = "1.5.8" description = "Patch asyncio to allow nested event loops" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -2181,6 +2295,7 @@ files = [ name = "networkx" version = "2.8.8" description = "Python package for creating and manipulating graphs and networks" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2199,6 +2314,7 @@ test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"] name = "notebook" version = "7.0.6" description = "Jupyter Notebook - A web-based notebook environment for interactive computing" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2222,6 +2338,7 @@ test = ["importlib-resources (>=5.0)", "ipykernel", "jupyter-server[test] (>=2.4 name = "notebook-shim" version = "0.2.3" description = "A shim layer for notebook traits and config" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2239,6 +2356,7 @@ test = ["pytest", "pytest-console-scripts", "pytest-jupyter", "pytest-tornasync" name = "numpy" version = "1.26.1" description = "Fundamental package for array computing in Python" +category = "main" optional = false python-versions = "<3.13,>=3.9" files = [ @@ -2280,6 +2398,7 @@ files = [ name = "oauth2client" version = "4.1.3" description = "OAuth 2.0 client library" +category = "main" optional = false python-versions = "*" files = [ @@ -2298,6 +2417,7 @@ six = ">=1.6.1" name = "oauthlib" version = "3.2.2" description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2314,6 +2434,7 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] name = "openpyxl" version = "3.1.2" description = "A Python library to read/write Excel 2010 xlsx/xlsm files" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2328,6 +2449,7 @@ et-xmlfile = "*" name = "overrides" version = "7.4.0" description = "A decorator to automatically detect mismatch when overriding a method." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2339,6 +2461,7 @@ files = [ name = "packaging" version = "23.2" description = "Core utilities for Python packages" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2350,6 +2473,7 @@ files = [ name = "pandarallel" version = "1.6.5" description = "An easy to use library to speed up computation (by parallelizing on multi CPUs) with pandas." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2369,6 +2493,7 @@ doc = ["mkdocs-material"] name = "pandas" version = "1.5.3" description = "Powerful data structures for data analysis, time series, and statistics" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2416,6 +2541,7 @@ test = ["hypothesis (>=5.5.3)", "pytest (>=6.0)", "pytest-xdist (>=1.31)"] name = "pandocfilters" version = "1.5.0" description = "Utilities for writing pandoc filters in python" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2427,6 +2553,7 @@ files = [ name = "parso" version = "0.8.3" description = "A Python Parser" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2442,6 +2569,7 @@ testing = ["docopt", "pytest (<6.0.0)"] name = "pathspec" version = "0.11.2" description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2453,6 +2581,7 @@ files = [ name = "pdoc" version = "12.3.1" description = "API Documentation for Python Projects" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2472,6 +2601,7 @@ dev = ["black", "hypothesis", "mypy", "pytest", "pytest-cov", "pytest-timeout", name = "pexpect" version = "4.8.0" description = "Pexpect allows easy control of interactive console applications." +category = "main" optional = false python-versions = "*" files = [ @@ -2486,6 +2616,7 @@ ptyprocess = ">=0.5" name = "platformdirs" version = "3.11.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2501,6 +2632,7 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co name = "pluggy" version = "1.3.0" description = "plugin and hook calling mechanisms for python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2516,6 +2648,7 @@ testing = ["pytest", "pytest-benchmark"] name = "prometheus-client" version = "0.18.0" description = "Python client for the Prometheus monitoring system." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2530,6 +2663,7 @@ twisted = ["twisted"] name = "prompt-toolkit" version = "3.0.39" description = "Library for building powerful interactive command lines in Python" +category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -2542,30 +2676,30 @@ wcwidth = "*" [[package]] name = "protobuf" -version = "4.24.4" +version = "4.25.0" description = "" +category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "protobuf-4.24.4-cp310-abi3-win32.whl", hash = "sha256:ec9912d5cb6714a5710e28e592ee1093d68c5ebfeda61983b3f40331da0b1ebb"}, - {file = "protobuf-4.24.4-cp310-abi3-win_amd64.whl", hash = "sha256:1badab72aa8a3a2b812eacfede5020472e16c6b2212d737cefd685884c191085"}, - {file = "protobuf-4.24.4-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8e61a27f362369c2f33248a0ff6896c20dcd47b5d48239cb9720134bef6082e4"}, - {file = "protobuf-4.24.4-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:bffa46ad9612e6779d0e51ae586fde768339b791a50610d85eb162daeb23661e"}, - {file = "protobuf-4.24.4-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:b493cb590960ff863743b9ff1452c413c2ee12b782f48beca77c8da3e2ffe9d9"}, - {file = "protobuf-4.24.4-cp37-cp37m-win32.whl", hash = "sha256:dbbed8a56e56cee8d9d522ce844a1379a72a70f453bde6243e3c86c30c2a3d46"}, - {file = "protobuf-4.24.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6b7d2e1c753715dcfe9d284a25a52d67818dd43c4932574307daf836f0071e37"}, - {file = "protobuf-4.24.4-cp38-cp38-win32.whl", hash = "sha256:02212557a76cd99574775a81fefeba8738d0f668d6abd0c6b1d3adcc75503dbe"}, - {file = "protobuf-4.24.4-cp38-cp38-win_amd64.whl", hash = "sha256:2fa3886dfaae6b4c5ed2730d3bf47c7a38a72b3a1f0acb4d4caf68e6874b947b"}, - {file = "protobuf-4.24.4-cp39-cp39-win32.whl", hash = "sha256:b77272f3e28bb416e2071186cb39efd4abbf696d682cbb5dc731308ad37fa6dd"}, - {file = "protobuf-4.24.4-cp39-cp39-win_amd64.whl", hash = "sha256:9fee5e8aa20ef1b84123bb9232b3f4a5114d9897ed89b4b8142d81924e05d79b"}, - {file = "protobuf-4.24.4-py3-none-any.whl", hash = "sha256:80797ce7424f8c8d2f2547e2d42bfbb6c08230ce5832d6c099a37335c9c90a92"}, - {file = "protobuf-4.24.4.tar.gz", hash = "sha256:5a70731910cd9104762161719c3d883c960151eea077134458503723b60e3667"}, + {file = "protobuf-4.25.0-cp310-abi3-win32.whl", hash = "sha256:5c1203ac9f50e4853b0a0bfffd32c67118ef552a33942982eeab543f5c634395"}, + {file = "protobuf-4.25.0-cp310-abi3-win_amd64.whl", hash = "sha256:c40ff8f00aa737938c5378d461637d15c442a12275a81019cc2fef06d81c9419"}, + {file = "protobuf-4.25.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:cf21faba64cd2c9a3ed92b7a67f226296b10159dbb8fbc5e854fc90657d908e4"}, + {file = "protobuf-4.25.0-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:32ac2100b0e23412413d948c03060184d34a7c50b3e5d7524ee96ac2b10acf51"}, + {file = "protobuf-4.25.0-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:683dc44c61f2620b32ce4927de2108f3ebe8ccf2fd716e1e684e5a50da154054"}, + {file = "protobuf-4.25.0-cp38-cp38-win32.whl", hash = "sha256:1a3ba712877e6d37013cdc3476040ea1e313a6c2e1580836a94f76b3c176d575"}, + {file = "protobuf-4.25.0-cp38-cp38-win_amd64.whl", hash = "sha256:b2cf8b5d381f9378afe84618288b239e75665fe58d0f3fd5db400959274296e9"}, + {file = "protobuf-4.25.0-cp39-cp39-win32.whl", hash = "sha256:63714e79b761a37048c9701a37438aa29945cd2417a97076048232c1df07b701"}, + {file = "protobuf-4.25.0-cp39-cp39-win_amd64.whl", hash = "sha256:d94a33db8b7ddbd0af7c467475fb9fde0c705fb315a8433c0e2020942b863a1f"}, + {file = "protobuf-4.25.0-py3-none-any.whl", hash = "sha256:1a53d6f64b00eecf53b65ff4a8c23dc95df1fa1e97bb06b8122e5a64f49fc90a"}, + {file = "protobuf-4.25.0.tar.gz", hash = "sha256:68f7caf0d4f012fd194a301420cf6aa258366144d814f358c5b32558228afa7c"}, ] [[package]] name = "psutil" version = "5.9.6" description = "Cross-platform lib for process and system monitoring in Python." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -2594,6 +2728,7 @@ test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] name = "ptyprocess" version = "0.7.0" description = "Run a subprocess in a pseudo terminal" +category = "main" optional = false python-versions = "*" files = [ @@ -2605,6 +2740,7 @@ files = [ name = "pure-eval" version = "0.2.2" description = "Safely evaluate AST nodes without side effects" +category = "main" optional = false python-versions = "*" files = [ @@ -2619,6 +2755,7 @@ tests = ["pytest"] name = "py" version = "1.11.0" description = "library with cross-python path, ini-parsing, io, code, log facilities" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -2630,6 +2767,7 @@ files = [ name = "pyasn1" version = "0.5.0" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -2641,6 +2779,7 @@ files = [ name = "pyasn1-modules" version = "0.3.0" description = "A collection of ASN.1-based protocols modules" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -2655,6 +2794,7 @@ pyasn1 = ">=0.4.6,<0.6.0" name = "pycodestyle" version = "2.11.1" description = "Python style guide checker" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2666,6 +2806,7 @@ files = [ name = "pycparser" version = "2.21" description = "C parser in Python" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2677,6 +2818,7 @@ files = [ name = "pydantic" version = "1.10.13" description = "Data validation and settings management using python type hints" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2729,6 +2871,7 @@ email = ["email-validator (>=1.0.3)"] name = "pyflakes" version = "3.1.0" description = "passive checker of Python programs" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2740,6 +2883,7 @@ files = [ name = "pygments" version = "2.16.1" description = "Pygments is a syntax highlighting package written in Python." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2754,6 +2898,7 @@ plugins = ["importlib-metadata"] name = "pygsheets" version = "2.0.6" description = "Google Spreadsheets Python API v4" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2772,6 +2917,7 @@ pandas = ["pandas (>=0.14.0)"] name = "pylint" version = "2.17.7" description = "python code static checker" +category = "dev" optional = false python-versions = ">=3.7.2" files = [ @@ -2798,6 +2944,7 @@ testutils = ["gitpython (>3)"] name = "pyopenssl" version = "23.3.0" description = "Python wrapper module around the OpenSSL library" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2816,6 +2963,7 @@ test = ["flaky", "pretend", "pytest (>=3.0.1)"] name = "pyparsing" version = "3.1.1" description = "pyparsing module - Classes and methods to define and execute parsing grammars" +category = "main" optional = false python-versions = ">=3.6.8" files = [ @@ -2830,6 +2978,7 @@ diagrams = ["jinja2", "railroad-diagrams"] name = "pytest" version = "7.4.3" description = "pytest: simple powerful testing with Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2852,6 +3001,7 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest-cov" version = "4.1.0" description = "Pytest plugin for measuring coverage." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2870,6 +3020,7 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale name = "pytest-mock" version = "3.12.0" description = "Thin-wrapper around the mock package for easier use with pytest" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2887,6 +3038,7 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -2901,6 +3053,7 @@ six = ">=1.5" name = "python-dotenv" version = "0.21.1" description = "Read key-value pairs from a .env file and set them as environment variables" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2915,6 +3068,7 @@ cli = ["click (>=5.0)"] name = "python-json-logger" version = "2.0.7" description = "A python library adding a json log formatter" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2926,6 +3080,7 @@ files = [ name = "pytz" version = "2023.3.post1" description = "World timezone definitions, modern and historical" +category = "main" optional = false python-versions = "*" files = [ @@ -2937,6 +3092,7 @@ files = [ name = "pywin32" version = "306" description = "Python for Window Extensions" +category = "main" optional = false python-versions = "*" files = [ @@ -2960,6 +3116,7 @@ files = [ name = "pywin32-ctypes" version = "0.2.2" description = "A (partial) reimplementation of pywin32 using ctypes/cffi" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2971,6 +3128,7 @@ files = [ name = "pywinpty" version = "2.0.12" description = "Pseudo terminal support for Windows from Python." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2986,6 +3144,7 @@ files = [ name = "pyyaml" version = "6.0.1" description = "YAML parser and emitter for Python" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3045,6 +3204,7 @@ files = [ name = "pyzmq" version = "25.1.1" description = "Python bindings for 0MQ" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3150,6 +3310,7 @@ cffi = {version = "*", markers = "implementation_name == \"pypy\""} name = "rdflib" version = "6.3.2" description = "RDFLib is a Python library for working with RDF, a simple yet powerful language for representing information." +category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -3171,6 +3332,7 @@ networkx = ["networkx (>=2.0.0,<3.0.0)"] name = "referencing" version = "0.30.2" description = "JSON Referencing + Python" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3186,6 +3348,7 @@ rpds-py = ">=0.7.0" name = "regex" version = "2023.10.3" description = "Alternative regular expression module, to replace re." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3283,6 +3446,7 @@ files = [ name = "requests" version = "2.31.0" description = "Python HTTP for Humans." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3304,6 +3468,7 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "requests-oauthlib" version = "1.3.1" description = "OAuthlib authentication support for Requests." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -3322,6 +3487,7 @@ rsa = ["oauthlib[signedtoken] (>=3.0.0)"] name = "rfc3339-validator" version = "0.1.4" description = "A pure python RFC3339 validator" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -3336,6 +3502,7 @@ six = "*" name = "rfc3986-validator" version = "0.1.1" description = "Pure python rfc3986 validator" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -3345,116 +3512,118 @@ files = [ [[package]] name = "rpds-py" -version = "0.10.6" +version = "0.12.0" description = "Python bindings to Rust's persistent data structures (rpds)" +category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "rpds_py-0.10.6-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:6bdc11f9623870d75692cc33c59804b5a18d7b8a4b79ef0b00b773a27397d1f6"}, - {file = "rpds_py-0.10.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:26857f0f44f0e791f4a266595a7a09d21f6b589580ee0585f330aaccccb836e3"}, - {file = "rpds_py-0.10.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7f5e15c953ace2e8dde9824bdab4bec50adb91a5663df08d7d994240ae6fa31"}, - {file = "rpds_py-0.10.6-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:61fa268da6e2e1cd350739bb61011121fa550aa2545762e3dc02ea177ee4de35"}, - {file = "rpds_py-0.10.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c48f3fbc3e92c7dd6681a258d22f23adc2eb183c8cb1557d2fcc5a024e80b094"}, - {file = "rpds_py-0.10.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0503c5b681566e8b722fe8c4c47cce5c7a51f6935d5c7012c4aefe952a35eed"}, - {file = "rpds_py-0.10.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:734c41f9f57cc28658d98270d3436dba65bed0cfc730d115b290e970150c540d"}, - {file = "rpds_py-0.10.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a5d7ed104d158c0042a6a73799cf0eb576dfd5fc1ace9c47996e52320c37cb7c"}, - {file = "rpds_py-0.10.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e3df0bc35e746cce42579826b89579d13fd27c3d5319a6afca9893a9b784ff1b"}, - {file = "rpds_py-0.10.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:73e0a78a9b843b8c2128028864901f55190401ba38aae685350cf69b98d9f7c9"}, - {file = "rpds_py-0.10.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5ed505ec6305abd2c2c9586a7b04fbd4baf42d4d684a9c12ec6110deefe2a063"}, - {file = "rpds_py-0.10.6-cp310-none-win32.whl", hash = "sha256:d97dd44683802000277bbf142fd9f6b271746b4846d0acaf0cefa6b2eaf2a7ad"}, - {file = "rpds_py-0.10.6-cp310-none-win_amd64.whl", hash = "sha256:b455492cab07107bfe8711e20cd920cc96003e0da3c1f91297235b1603d2aca7"}, - {file = "rpds_py-0.10.6-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:e8cdd52744f680346ff8c1ecdad5f4d11117e1724d4f4e1874f3a67598821069"}, - {file = "rpds_py-0.10.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66414dafe4326bca200e165c2e789976cab2587ec71beb80f59f4796b786a238"}, - {file = "rpds_py-0.10.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc435d059f926fdc5b05822b1be4ff2a3a040f3ae0a7bbbe672babb468944722"}, - {file = "rpds_py-0.10.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8e7f2219cb72474571974d29a191714d822e58be1eb171f229732bc6fdedf0ac"}, - {file = "rpds_py-0.10.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3953c6926a63f8ea5514644b7afb42659b505ece4183fdaaa8f61d978754349e"}, - {file = "rpds_py-0.10.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2bb2e4826be25e72013916eecd3d30f66fd076110de09f0e750163b416500721"}, - {file = "rpds_py-0.10.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bf347b495b197992efc81a7408e9a83b931b2f056728529956a4d0858608b80"}, - {file = "rpds_py-0.10.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:102eac53bb0bf0f9a275b438e6cf6904904908562a1463a6fc3323cf47d7a532"}, - {file = "rpds_py-0.10.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40f93086eef235623aa14dbddef1b9fb4b22b99454cb39a8d2e04c994fb9868c"}, - {file = "rpds_py-0.10.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e22260a4741a0e7a206e175232867b48a16e0401ef5bce3c67ca5b9705879066"}, - {file = "rpds_py-0.10.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f4e56860a5af16a0fcfa070a0a20c42fbb2012eed1eb5ceeddcc7f8079214281"}, - {file = "rpds_py-0.10.6-cp311-none-win32.whl", hash = "sha256:0774a46b38e70fdde0c6ded8d6d73115a7c39d7839a164cc833f170bbf539116"}, - {file = "rpds_py-0.10.6-cp311-none-win_amd64.whl", hash = "sha256:4a5ee600477b918ab345209eddafde9f91c0acd931f3776369585a1c55b04c57"}, - {file = "rpds_py-0.10.6-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:5ee97c683eaface61d38ec9a489e353d36444cdebb128a27fe486a291647aff6"}, - {file = "rpds_py-0.10.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0713631d6e2d6c316c2f7b9320a34f44abb644fc487b77161d1724d883662e31"}, - {file = "rpds_py-0.10.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5a53f5998b4bbff1cb2e967e66ab2addc67326a274567697379dd1e326bded7"}, - {file = "rpds_py-0.10.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6a555ae3d2e61118a9d3e549737bb4a56ff0cec88a22bd1dfcad5b4e04759175"}, - {file = "rpds_py-0.10.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:945eb4b6bb8144909b203a88a35e0a03d22b57aefb06c9b26c6e16d72e5eb0f0"}, - {file = "rpds_py-0.10.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:52c215eb46307c25f9fd2771cac8135d14b11a92ae48d17968eda5aa9aaf5071"}, - {file = "rpds_py-0.10.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1b3cd23d905589cb205710b3988fc8f46d4a198cf12862887b09d7aaa6bf9b9"}, - {file = "rpds_py-0.10.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64ccc28683666672d7c166ed465c09cee36e306c156e787acef3c0c62f90da5a"}, - {file = "rpds_py-0.10.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:516a611a2de12fbea70c78271e558f725c660ce38e0006f75139ba337d56b1f6"}, - {file = "rpds_py-0.10.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9ff93d3aedef11f9c4540cf347f8bb135dd9323a2fc705633d83210d464c579d"}, - {file = "rpds_py-0.10.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d858532212f0650be12b6042ff4378dc2efbb7792a286bee4489eaa7ba010586"}, - {file = "rpds_py-0.10.6-cp312-none-win32.whl", hash = "sha256:3c4eff26eddac49d52697a98ea01b0246e44ca82ab09354e94aae8823e8bda02"}, - {file = "rpds_py-0.10.6-cp312-none-win_amd64.whl", hash = "sha256:150eec465dbc9cbca943c8e557a21afdcf9bab8aaabf386c44b794c2f94143d2"}, - {file = "rpds_py-0.10.6-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:cf693eb4a08eccc1a1b636e4392322582db2a47470d52e824b25eca7a3977b53"}, - {file = "rpds_py-0.10.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4134aa2342f9b2ab6c33d5c172e40f9ef802c61bb9ca30d21782f6e035ed0043"}, - {file = "rpds_py-0.10.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e782379c2028a3611285a795b89b99a52722946d19fc06f002f8b53e3ea26ea9"}, - {file = "rpds_py-0.10.6-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f6da6d842195fddc1cd34c3da8a40f6e99e4a113918faa5e60bf132f917c247"}, - {file = "rpds_py-0.10.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b4a9fe992887ac68256c930a2011255bae0bf5ec837475bc6f7edd7c8dfa254e"}, - {file = "rpds_py-0.10.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b788276a3c114e9f51e257f2a6f544c32c02dab4aa7a5816b96444e3f9ffc336"}, - {file = "rpds_py-0.10.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:caa1afc70a02645809c744eefb7d6ee8fef7e2fad170ffdeacca267fd2674f13"}, - {file = "rpds_py-0.10.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bddd4f91eede9ca5275e70479ed3656e76c8cdaaa1b354e544cbcf94c6fc8ac4"}, - {file = "rpds_py-0.10.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:775049dfa63fb58293990fc59473e659fcafd953bba1d00fc5f0631a8fd61977"}, - {file = "rpds_py-0.10.6-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:c6c45a2d2b68c51fe3d9352733fe048291e483376c94f7723458cfd7b473136b"}, - {file = "rpds_py-0.10.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0699ab6b8c98df998c3eacf51a3b25864ca93dab157abe358af46dc95ecd9801"}, - {file = "rpds_py-0.10.6-cp38-none-win32.whl", hash = "sha256:ebdab79f42c5961682654b851f3f0fc68e6cc7cd8727c2ac4ffff955154123c1"}, - {file = "rpds_py-0.10.6-cp38-none-win_amd64.whl", hash = "sha256:24656dc36f866c33856baa3ab309da0b6a60f37d25d14be916bd3e79d9f3afcf"}, - {file = "rpds_py-0.10.6-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:0898173249141ee99ffcd45e3829abe7bcee47d941af7434ccbf97717df020e5"}, - {file = "rpds_py-0.10.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9e9184fa6c52a74a5521e3e87badbf9692549c0fcced47443585876fcc47e469"}, - {file = "rpds_py-0.10.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5752b761902cd15073a527b51de76bbae63d938dc7c5c4ad1e7d8df10e765138"}, - {file = "rpds_py-0.10.6-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:99a57006b4ec39dbfb3ed67e5b27192792ffb0553206a107e4aadb39c5004cd5"}, - {file = "rpds_py-0.10.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09586f51a215d17efdb3a5f090d7cbf1633b7f3708f60a044757a5d48a83b393"}, - {file = "rpds_py-0.10.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e225a6a14ecf44499aadea165299092ab0cba918bb9ccd9304eab1138844490b"}, - {file = "rpds_py-0.10.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2039f8d545f20c4e52713eea51a275e62153ee96c8035a32b2abb772b6fc9e5"}, - {file = "rpds_py-0.10.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:34ad87a831940521d462ac11f1774edf867c34172010f5390b2f06b85dcc6014"}, - {file = "rpds_py-0.10.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dcdc88b6b01015da066da3fb76545e8bb9a6880a5ebf89e0f0b2e3ca557b3ab7"}, - {file = "rpds_py-0.10.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:25860ed5c4e7f5e10c496ea78af46ae8d8468e0be745bd233bab9ca99bfd2647"}, - {file = "rpds_py-0.10.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7854a207ef77319ec457c1eb79c361b48807d252d94348305db4f4b62f40f7f3"}, - {file = "rpds_py-0.10.6-cp39-none-win32.whl", hash = "sha256:e6fcc026a3f27c1282c7ed24b7fcac82cdd70a0e84cc848c0841a3ab1e3dea2d"}, - {file = "rpds_py-0.10.6-cp39-none-win_amd64.whl", hash = "sha256:e98c4c07ee4c4b3acf787e91b27688409d918212dfd34c872201273fdd5a0e18"}, - {file = "rpds_py-0.10.6-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:68fe9199184c18d997d2e4293b34327c0009a78599ce703e15cd9a0f47349bba"}, - {file = "rpds_py-0.10.6-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:3339eca941568ed52d9ad0f1b8eb9fe0958fa245381747cecf2e9a78a5539c42"}, - {file = "rpds_py-0.10.6-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a360cfd0881d36c6dc271992ce1eda65dba5e9368575663de993eeb4523d895f"}, - {file = "rpds_py-0.10.6-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:031f76fc87644a234883b51145e43985aa2d0c19b063e91d44379cd2786144f8"}, - {file = "rpds_py-0.10.6-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f36a9d751f86455dc5278517e8b65580eeee37d61606183897f122c9e51cef3"}, - {file = "rpds_py-0.10.6-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:052a832078943d2b2627aea0d19381f607fe331cc0eb5df01991268253af8417"}, - {file = "rpds_py-0.10.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:023574366002bf1bd751ebaf3e580aef4a468b3d3c216d2f3f7e16fdabd885ed"}, - {file = "rpds_py-0.10.6-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:defa2c0c68734f4a82028c26bcc85e6b92cced99866af118cd6a89b734ad8e0d"}, - {file = "rpds_py-0.10.6-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:879fb24304ead6b62dbe5034e7b644b71def53c70e19363f3c3be2705c17a3b4"}, - {file = "rpds_py-0.10.6-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:53c43e10d398e365da2d4cc0bcaf0854b79b4c50ee9689652cdc72948e86f487"}, - {file = "rpds_py-0.10.6-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:3777cc9dea0e6c464e4b24760664bd8831738cc582c1d8aacf1c3f546bef3f65"}, - {file = "rpds_py-0.10.6-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:40578a6469e5d1df71b006936ce95804edb5df47b520c69cf5af264d462f2cbb"}, - {file = "rpds_py-0.10.6-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:cf71343646756a072b85f228d35b1d7407da1669a3de3cf47f8bbafe0c8183a4"}, - {file = "rpds_py-0.10.6-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10f32b53f424fc75ff7b713b2edb286fdbfc94bf16317890260a81c2c00385dc"}, - {file = "rpds_py-0.10.6-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:81de24a1c51cfb32e1fbf018ab0bdbc79c04c035986526f76c33e3f9e0f3356c"}, - {file = "rpds_py-0.10.6-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac17044876e64a8ea20ab132080ddc73b895b4abe9976e263b0e30ee5be7b9c2"}, - {file = "rpds_py-0.10.6-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e8a78bd4879bff82daef48c14d5d4057f6856149094848c3ed0ecaf49f5aec2"}, - {file = "rpds_py-0.10.6-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78ca33811e1d95cac8c2e49cb86c0fb71f4d8409d8cbea0cb495b6dbddb30a55"}, - {file = "rpds_py-0.10.6-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c63c3ef43f0b3fb00571cff6c3967cc261c0ebd14a0a134a12e83bdb8f49f21f"}, - {file = "rpds_py-0.10.6-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:7fde6d0e00b2fd0dbbb40c0eeec463ef147819f23725eda58105ba9ca48744f4"}, - {file = "rpds_py-0.10.6-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:79edd779cfc46b2e15b0830eecd8b4b93f1a96649bcb502453df471a54ce7977"}, - {file = "rpds_py-0.10.6-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:9164ec8010327ab9af931d7ccd12ab8d8b5dc2f4c6a16cbdd9d087861eaaefa1"}, - {file = "rpds_py-0.10.6-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d29ddefeab1791e3c751e0189d5f4b3dbc0bbe033b06e9c333dca1f99e1d523e"}, - {file = "rpds_py-0.10.6-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:30adb75ecd7c2a52f5e76af50644b3e0b5ba036321c390b8e7ec1bb2a16dd43c"}, - {file = "rpds_py-0.10.6-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd609fafdcdde6e67a139898196698af37438b035b25ad63704fd9097d9a3482"}, - {file = "rpds_py-0.10.6-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6eef672de005736a6efd565577101277db6057f65640a813de6c2707dc69f396"}, - {file = "rpds_py-0.10.6-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cf4393c7b41abbf07c88eb83e8af5013606b1cdb7f6bc96b1b3536b53a574b8"}, - {file = "rpds_py-0.10.6-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ad857f42831e5b8d41a32437f88d86ead6c191455a3499c4b6d15e007936d4cf"}, - {file = "rpds_py-0.10.6-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d7360573f1e046cb3b0dceeb8864025aa78d98be4bb69f067ec1c40a9e2d9df"}, - {file = "rpds_py-0.10.6-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d08f63561c8a695afec4975fae445245386d645e3e446e6f260e81663bfd2e38"}, - {file = "rpds_py-0.10.6-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:f0f17f2ce0f3529177a5fff5525204fad7b43dd437d017dd0317f2746773443d"}, - {file = "rpds_py-0.10.6-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:442626328600bde1d09dc3bb00434f5374948838ce75c41a52152615689f9403"}, - {file = "rpds_py-0.10.6-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e9616f5bd2595f7f4a04b67039d890348ab826e943a9bfdbe4938d0eba606971"}, - {file = "rpds_py-0.10.6.tar.gz", hash = "sha256:4ce5a708d65a8dbf3748d2474b580d606b1b9f91b5c6ab2a316e0b0cf7a4ba50"}, + {file = "rpds_py-0.12.0-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:c694bee70ece3b232df4678448fdda245fd3b1bb4ba481fb6cd20e13bb784c46"}, + {file = "rpds_py-0.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:30e5ce9f501fb1f970e4a59098028cf20676dee64fc496d55c33e04bbbee097d"}, + {file = "rpds_py-0.12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d72a4315514e5a0b9837a086cb433b004eea630afb0cc129de76d77654a9606f"}, + {file = "rpds_py-0.12.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eebaf8c76c39604d52852366249ab807fe6f7a3ffb0dd5484b9944917244cdbe"}, + {file = "rpds_py-0.12.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a239303acb0315091d54c7ff36712dba24554993b9a93941cf301391d8a997ee"}, + {file = "rpds_py-0.12.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ced40cdbb6dd47a032725a038896cceae9ce267d340f59508b23537f05455431"}, + {file = "rpds_py-0.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c8c0226c71bd0ce9892eaf6afa77ae8f43a3d9313124a03df0b389c01f832de"}, + {file = "rpds_py-0.12.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b8e11715178f3608874508f08e990d3771e0b8c66c73eb4e183038d600a9b274"}, + {file = "rpds_py-0.12.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5210a0018c7e09c75fa788648617ebba861ae242944111d3079034e14498223f"}, + {file = "rpds_py-0.12.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:171d9a159f1b2f42a42a64a985e4ba46fc7268c78299272ceba970743a67ee50"}, + {file = "rpds_py-0.12.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:57ec6baec231bb19bb5fd5fc7bae21231860a1605174b11585660236627e390e"}, + {file = "rpds_py-0.12.0-cp310-none-win32.whl", hash = "sha256:7188ddc1a8887194f984fa4110d5a3d5b9b5cd35f6bafdff1b649049cbc0ce29"}, + {file = "rpds_py-0.12.0-cp310-none-win_amd64.whl", hash = "sha256:1e04581c6117ad9479b6cfae313e212fe0dfa226ac727755f0d539cd54792963"}, + {file = "rpds_py-0.12.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:0a38612d07a36138507d69646c470aedbfe2b75b43a4643f7bd8e51e52779624"}, + {file = "rpds_py-0.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f12d69d568f5647ec503b64932874dade5a20255736c89936bf690951a5e79f5"}, + {file = "rpds_py-0.12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f8a1d990dc198a6c68ec3d9a637ba1ce489b38cbfb65440a27901afbc5df575"}, + {file = "rpds_py-0.12.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8c567c664fc2f44130a20edac73e0a867f8e012bf7370276f15c6adc3586c37c"}, + {file = "rpds_py-0.12.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0e9e976e0dbed4f51c56db10831c9623d0fd67aac02853fe5476262e5a22acb7"}, + {file = "rpds_py-0.12.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:efddca2d02254a52078c35cadad34762adbae3ff01c6b0c7787b59d038b63e0d"}, + {file = "rpds_py-0.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9e7f29c00577aff6b318681e730a519b235af292732a149337f6aaa4d1c5e31"}, + {file = "rpds_py-0.12.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:389c0e38358fdc4e38e9995e7291269a3aead7acfcf8942010ee7bc5baee091c"}, + {file = "rpds_py-0.12.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:33ab498f9ac30598b6406e2be1b45fd231195b83d948ebd4bd77f337cb6a2bff"}, + {file = "rpds_py-0.12.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d56b1cd606ba4cedd64bb43479d56580e147c6ef3f5d1c5e64203a1adab784a2"}, + {file = "rpds_py-0.12.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1fa73ed22c40a1bec98d7c93b5659cd35abcfa5a0a95ce876b91adbda170537c"}, + {file = "rpds_py-0.12.0-cp311-none-win32.whl", hash = "sha256:dbc25baa6abb205766fb8606f8263b02c3503a55957fcb4576a6bb0a59d37d10"}, + {file = "rpds_py-0.12.0-cp311-none-win_amd64.whl", hash = "sha256:c6b52b7028b547866c2413f614ee306c2d4eafdd444b1ff656bf3295bf1484aa"}, + {file = "rpds_py-0.12.0-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:9620650c364c01ed5b497dcae7c3d4b948daeae6e1883ae185fef1c927b6b534"}, + {file = "rpds_py-0.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2124f9e645a94ab7c853bc0a3644e0ca8ffbe5bb2d72db49aef8f9ec1c285733"}, + {file = "rpds_py-0.12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281c8b219d4f4b3581b918b816764098d04964915b2f272d1476654143801aa2"}, + {file = "rpds_py-0.12.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:27ccc93c7457ef890b0dd31564d2a05e1aca330623c942b7e818e9e7c2669ee4"}, + {file = "rpds_py-0.12.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1c562a9bb72244fa767d1c1ab55ca1d92dd5f7c4d77878fee5483a22ffac808"}, + {file = "rpds_py-0.12.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e57919c32ee295a2fca458bb73e4b20b05c115627f96f95a10f9f5acbd61172d"}, + {file = "rpds_py-0.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa35ad36440aaf1ac8332b4a4a433d4acd28f1613f0d480995f5cfd3580e90b7"}, + {file = "rpds_py-0.12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e6aea5c0eb5b0faf52c7b5c4a47c8bb64437173be97227c819ffa31801fa4e34"}, + {file = "rpds_py-0.12.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:81cf9d306c04df1b45971c13167dc3bad625808aa01281d55f3cf852dde0e206"}, + {file = "rpds_py-0.12.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:08e6e7ff286254016b945e1ab632ee843e43d45e40683b66dd12b73791366dd1"}, + {file = "rpds_py-0.12.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4d0a675a7acbbc16179188d8c6d0afb8628604fc1241faf41007255957335a0b"}, + {file = "rpds_py-0.12.0-cp312-none-win32.whl", hash = "sha256:b2287c09482949e0ca0c0eb68b2aca6cf57f8af8c6dfd29dcd3bc45f17b57978"}, + {file = "rpds_py-0.12.0-cp312-none-win_amd64.whl", hash = "sha256:8015835494b21aa7abd3b43fdea0614ee35ef6b03db7ecba9beb58eadf01c24f"}, + {file = "rpds_py-0.12.0-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:6174d6ad6b58a6bcf67afbbf1723420a53d06c4b89f4c50763d6fa0a6ac9afd2"}, + {file = "rpds_py-0.12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a689e1ded7137552bea36305a7a16ad2b40be511740b80748d3140614993db98"}, + {file = "rpds_py-0.12.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f45321224144c25a62052035ce96cbcf264667bcb0d81823b1bbc22c4addd194"}, + {file = "rpds_py-0.12.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aa32205358a76bf578854bf31698a86dc8b2cb591fd1d79a833283f4a403f04b"}, + {file = "rpds_py-0.12.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91bd2b7cf0f4d252eec8b7046fa6a43cee17e8acdfc00eaa8b3dbf2f9a59d061"}, + {file = "rpds_py-0.12.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3acadbab8b59f63b87b518e09c4c64b142e7286b9ca7a208107d6f9f4c393c5c"}, + {file = "rpds_py-0.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:429349a510da82c85431f0f3e66212d83efe9fd2850f50f339341b6532c62fe4"}, + {file = "rpds_py-0.12.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05942656cb2cb4989cd50ced52df16be94d344eae5097e8583966a1d27da73a5"}, + {file = "rpds_py-0.12.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:0c5441b7626c29dbd54a3f6f3713ec8e956b009f419ffdaaa3c80eaf98ddb523"}, + {file = "rpds_py-0.12.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:b6b0e17d39d21698185097652c611f9cf30f7c56ccec189789920e3e7f1cee56"}, + {file = "rpds_py-0.12.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3b7a64d43e2a1fa2dd46b678e00cabd9a49ebb123b339ce799204c44a593ae1c"}, + {file = "rpds_py-0.12.0-cp38-none-win32.whl", hash = "sha256:e5bbe011a2cea9060fef1bb3d668a2fd8432b8888e6d92e74c9c794d3c101595"}, + {file = "rpds_py-0.12.0-cp38-none-win_amd64.whl", hash = "sha256:bec29b801b4adbf388314c0d050e851d53762ab424af22657021ce4b6eb41543"}, + {file = "rpds_py-0.12.0-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:1096ca0bf2d3426cbe79d4ccc91dc5aaa73629b08ea2d8467375fad8447ce11a"}, + {file = "rpds_py-0.12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48aa98987d54a46e13e6954880056c204700c65616af4395d1f0639eba11764b"}, + {file = "rpds_py-0.12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7979d90ee2190d000129598c2b0c82f13053dba432b94e45e68253b09bb1f0f6"}, + {file = "rpds_py-0.12.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:88857060b690a57d2ea8569bca58758143c8faa4639fb17d745ce60ff84c867e"}, + {file = "rpds_py-0.12.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4eb74d44776b0fb0782560ea84d986dffec8ddd94947f383eba2284b0f32e35e"}, + {file = "rpds_py-0.12.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f62581d7e884dd01ee1707b7c21148f61f2febb7de092ae2f108743fcbef5985"}, + {file = "rpds_py-0.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f5dcb658d597410bb7c967c1d24eaf9377b0d621358cbe9d2ff804e5dd12e81"}, + {file = "rpds_py-0.12.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9bf9acce44e967a5103fcd820fc7580c7b0ab8583eec4e2051aec560f7b31a63"}, + {file = "rpds_py-0.12.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:240687b5be0f91fbde4936a329c9b7589d9259742766f74de575e1b2046575e4"}, + {file = "rpds_py-0.12.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:25740fb56e8bd37692ed380e15ec734be44d7c71974d8993f452b4527814601e"}, + {file = "rpds_py-0.12.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a54917b7e9cd3a67e429a630e237a90b096e0ba18897bfb99ee8bd1068a5fea0"}, + {file = "rpds_py-0.12.0-cp39-none-win32.whl", hash = "sha256:b92aafcfab3d41580d54aca35a8057341f1cfc7c9af9e8bdfc652f83a20ced31"}, + {file = "rpds_py-0.12.0-cp39-none-win_amd64.whl", hash = "sha256:cd316dbcc74c76266ba94eb021b0cc090b97cca122f50bd7a845f587ff4bf03f"}, + {file = "rpds_py-0.12.0-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:0853da3d5e9bc6a07b2486054a410b7b03f34046c123c6561b535bb48cc509e1"}, + {file = "rpds_py-0.12.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:cb41ad20064e18a900dd427d7cf41cfaec83bcd1184001f3d91a1f76b3fcea4e"}, + {file = "rpds_py-0.12.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b710bf7e7ae61957d5c4026b486be593ed3ec3dca3e5be15e0f6d8cf5d0a4990"}, + {file = "rpds_py-0.12.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a952ae3eb460c6712388ac2ec706d24b0e651b9396d90c9a9e0a69eb27737fdc"}, + {file = "rpds_py-0.12.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0bedd91ae1dd142a4dc15970ed2c729ff6c73f33a40fa84ed0cdbf55de87c777"}, + {file = "rpds_py-0.12.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:761531076df51309075133a6bc1db02d98ec7f66e22b064b1d513bc909f29743"}, + {file = "rpds_py-0.12.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2baa6be130e8a00b6cbb9f18a33611ec150b4537f8563bddadb54c1b74b8193"}, + {file = "rpds_py-0.12.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f05450fa1cd7c525c0b9d1a7916e595d3041ac0afbed2ff6926e5afb6a781b7f"}, + {file = "rpds_py-0.12.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:81c4d1a3a564775c44732b94135d06e33417e829ff25226c164664f4a1046213"}, + {file = "rpds_py-0.12.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:e888be685fa42d8b8a3d3911d5604d14db87538aa7d0b29b1a7ea80d354c732d"}, + {file = "rpds_py-0.12.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:6f8d7fe73d1816eeb5378409adc658f9525ecbfaf9e1ede1e2d67a338b0c7348"}, + {file = "rpds_py-0.12.0-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:0831d3ecdea22e4559cc1793f22e77067c9d8c451d55ae6a75bf1d116a8e7f42"}, + {file = "rpds_py-0.12.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:513ccbf7420c30e283c25c82d5a8f439d625a838d3ba69e79a110c260c46813f"}, + {file = "rpds_py-0.12.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:301bd744a1adaa2f6a5e06c98f1ac2b6f8dc31a5c23b838f862d65e32fca0d4b"}, + {file = "rpds_py-0.12.0-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f8832a4f83d4782a8f5a7b831c47e8ffe164e43c2c148c8160ed9a6d630bc02a"}, + {file = "rpds_py-0.12.0-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b2416ed743ec5debcf61e1242e012652a4348de14ecc7df3512da072b074440"}, + {file = "rpds_py-0.12.0-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35585a8cb5917161f42c2104567bb83a1d96194095fc54a543113ed5df9fa436"}, + {file = "rpds_py-0.12.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d389ff1e95b6e46ebedccf7fd1fadd10559add595ac6a7c2ea730268325f832c"}, + {file = "rpds_py-0.12.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9b007c2444705a2dc4a525964fd4dd28c3320b19b3410da6517cab28716f27d3"}, + {file = "rpds_py-0.12.0-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:188912b22b6c8225f4c4ffa020a2baa6ad8fabb3c141a12dbe6edbb34e7f1425"}, + {file = "rpds_py-0.12.0-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:1b4cf9ab9a0ae0cb122685209806d3f1dcb63b9fccdf1424fb42a129dc8c2faa"}, + {file = "rpds_py-0.12.0-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:2d34a5450a402b00d20aeb7632489ffa2556ca7b26f4a63c35f6fccae1977427"}, + {file = "rpds_py-0.12.0-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:466030a42724780794dea71eb32db83cc51214d66ab3fb3156edd88b9c8f0d78"}, + {file = "rpds_py-0.12.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:68172622a5a57deb079a2c78511c40f91193548e8ab342c31e8cb0764d362459"}, + {file = "rpds_py-0.12.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54cdfcda59251b9c2f87a05d038c2ae02121219a04d4a1e6fc345794295bdc07"}, + {file = "rpds_py-0.12.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6b75b912a0baa033350367a8a07a8b2d44fd5b90c890bfbd063a8a5f945f644b"}, + {file = "rpds_py-0.12.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:47aeceb4363851d17f63069318ba5721ae695d9da55d599b4d6fb31508595278"}, + {file = "rpds_py-0.12.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0525847f83f506aa1e28eb2057b696fe38217e12931c8b1b02198cfe6975e142"}, + {file = "rpds_py-0.12.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efbe0b5e0fd078ed7b005faa0170da4f72666360f66f0bb2d7f73526ecfd99f9"}, + {file = "rpds_py-0.12.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0fadfdda275c838cba5102c7f90a20f2abd7727bf8f4a2b654a5b617529c5c18"}, + {file = "rpds_py-0.12.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:56dd500411d03c5e9927a1eb55621e906837a83b02350a9dc401247d0353717c"}, + {file = "rpds_py-0.12.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:6915fc9fa6b3ec3569566832e1bb03bd801c12cea030200e68663b9a87974e76"}, + {file = "rpds_py-0.12.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:5f1519b080d8ce0a814f17ad9fb49fb3a1d4d7ce5891f5c85fc38631ca3a8dc4"}, + {file = "rpds_py-0.12.0.tar.gz", hash = "sha256:7036316cc26b93e401cedd781a579be606dad174829e6ad9e9c5a0da6e036f80"}, ] [[package]] name = "rsa" version = "4.9" description = "Pure-Python RSA implementation" +category = "main" optional = false python-versions = ">=3.6,<4" files = [ @@ -3469,6 +3638,7 @@ pyasn1 = ">=0.1.3" name = "ruamel-yaml" version = "0.17.17" description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" +category = "main" optional = false python-versions = ">=3" files = [ @@ -3487,6 +3657,7 @@ jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] name = "ruamel-yaml-clib" version = "0.2.8" description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3494,49 +3665,35 @@ files = [ {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:d92f81886165cb14d7b067ef37e142256f1c6a90a65cd156b063a43da1708cfd"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:840f0c7f194986a63d2c2465ca63af8ccbbc90ab1c6001b1978f05119b5e7334"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:024cfe1fc7c7f4e1aff4a81e718109e13409767e4f871443cbff3dba3578203d"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win32.whl", hash = "sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:b5edda50e5e9e15e54a6a8a0070302b00c518a9d32accc2346ad6c984aacd279"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:46d378daaac94f454b3a0e3d8d78cafd78a026b1d71443f4966c696b48a6d899"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:09b055c05697b38ecacb7ac50bdab2240bfca1a0c4872b0fd309bb07dc9aa3a9"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win32.whl", hash = "sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:7048c338b6c86627afb27faecf418768acb6331fc24cfa56c93e8c9780f815fa"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3213ece08ea033eb159ac52ae052a4899b56ecc124bb80020d9bbceeb50258e9"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aab7fd643f71d7946f2ee58cc88c9b7bfc97debd71dcc93e03e2d174628e7e2d"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win32.whl", hash = "sha256:5c365d91c88390c8d0a8545df0b5857172824b1c604e867161e6b3d59a827eaa"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win_amd64.whl", hash = "sha256:1758ce7d8e1a29d23de54a16ae867abd370f01b5a69e1a3ba75223eaa3ca1a1b"}, {file = "ruamel.yaml.clib-0.2.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a5aa27bad2bb83670b71683aae140a1f52b0857a2deff56ad3f6c13a017a26ed"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c58ecd827313af6864893e7af0a3bb85fd529f862b6adbefe14643947cfe2942"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f481f16baec5290e45aebdc2a5168ebc6d35189ae6fea7a58787613a25f6e875"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3fcc54cb0c8b811ff66082de1680b4b14cf8a81dce0d4fbf665c2265a81e07a1"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7f67a1ee819dc4562d444bbafb135832b0b909f81cc90f7aa00260968c9ca1b3"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4ecbf9c3e19f9562c7fdd462e8d18dd902a47ca046a2e64dba80699f0b6c09b7"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:87ea5ff66d8064301a154b3933ae406b0863402a799b16e4a1d24d9fbbcbe0d3"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win32.whl", hash = "sha256:75e1ed13e1f9de23c5607fe6bd1aeaae21e523b32d83bb33918245361e9cc51b"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win_amd64.whl", hash = "sha256:3f215c5daf6a9d7bbed4a0a4f760f3113b10e82ff4c5c44bec20a68c8014f675"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:665f58bfd29b167039f714c6998178d27ccd83984084c286110ef26b230f259f"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e2b4c44b60eadec492926a7270abb100ef9f72798e18743939bdbf037aab8c28"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e79e5db08739731b0ce4850bed599235d601701d5694c36570a99a0c5ca41a9d"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win32.whl", hash = "sha256:955eae71ac26c1ab35924203fda6220f84dce57d6d7884f189743e2abe3a9fbe"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:9eb5dee2772b0f704ca2e45b1713e4e5198c18f515b52743576d196348f374d3"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:184565012b60405d93838167f425713180b949e9d8dd0bbc7b49f074407c5a8b"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a75879bacf2c987c003368cf14bed0ffe99e8e85acfa6c0bfffc21a090f16880"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win32.whl", hash = "sha256:84b554931e932c46f94ab306913ad7e11bba988104c5cff26d90d03f68258cd5"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win_amd64.whl", hash = "sha256:25ac8c08322002b06fa1d49d1646181f0b2c72f5cbc15a85e80b4c30a544bb15"}, {file = "ruamel.yaml.clib-0.2.8.tar.gz", hash = "sha256:beb2e0404003de9a4cab9753a8805a8fe9320ee6673136ed7f04255fe60bb512"}, @@ -3546,6 +3703,7 @@ files = [ name = "schematic-db" version = "0.0.dev33" description = "" +category = "main" optional = false python-versions = ">=3.9,<4.0" files = [ @@ -3576,6 +3734,7 @@ synapse = ["synapseclient (>=3.0.0,<4.0.0)"] name = "scipy" version = "1.11.3" description = "Fundamental algorithms for scientific computing in Python" +category = "main" optional = false python-versions = "<3.13,>=3.9" files = [ @@ -3618,6 +3777,7 @@ test = ["asv", "gmpy2", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeo name = "secretstorage" version = "3.3.3" description = "Python bindings to FreeDesktop.org Secret Service API" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3633,6 +3793,7 @@ jeepney = ">=0.6" name = "send2trash" version = "1.8.2" description = "Send file to trash natively under Mac OS X, Windows and Linux" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" files = [ @@ -3649,6 +3810,7 @@ win32 = ["pywin32"] name = "setuptools" version = "66.1.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3665,6 +3827,7 @@ testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs ( name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -3676,6 +3839,7 @@ files = [ name = "sniffio" version = "1.3.0" description = "Sniff out which async library your code is running under" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3687,6 +3851,7 @@ files = [ name = "snowballstemmer" version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +category = "main" optional = false python-versions = "*" files = [ @@ -3698,6 +3863,7 @@ files = [ name = "soupsieve" version = "2.5" description = "A modern CSS selector implementation for Beautiful Soup." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3709,6 +3875,7 @@ files = [ name = "sphinx" version = "7.2.6" description = "Python documentation generator" +category = "main" optional = false python-versions = ">=3.9" files = [ @@ -3744,6 +3911,7 @@ test = ["cython (>=3.0)", "filelock", "html5lib", "pytest (>=4.6)", "setuptools name = "sphinx-click" version = "4.4.0" description = "Sphinx extension that automatically documents click applications" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3760,6 +3928,7 @@ sphinx = ">=2.0" name = "sphinxcontrib-applehelp" version = "1.0.7" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" +category = "main" optional = false python-versions = ">=3.9" files = [ @@ -3778,6 +3947,7 @@ test = ["pytest"] name = "sphinxcontrib-devhelp" version = "1.0.5" description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" +category = "main" optional = false python-versions = ">=3.9" files = [ @@ -3796,6 +3966,7 @@ test = ["pytest"] name = "sphinxcontrib-htmlhelp" version = "2.0.4" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +category = "main" optional = false python-versions = ">=3.9" files = [ @@ -3814,6 +3985,7 @@ test = ["html5lib", "pytest"] name = "sphinxcontrib-jsmath" version = "1.0.1" description = "A sphinx extension which renders display math in HTML via JavaScript" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -3828,6 +4000,7 @@ test = ["flake8", "mypy", "pytest"] name = "sphinxcontrib-qthelp" version = "1.0.6" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" +category = "main" optional = false python-versions = ">=3.9" files = [ @@ -3846,6 +4019,7 @@ test = ["pytest"] name = "sphinxcontrib-serializinghtml" version = "1.1.9" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" +category = "main" optional = false python-versions = ">=3.9" files = [ @@ -3862,60 +4036,61 @@ test = ["pytest"] [[package]] name = "sqlalchemy" -version = "2.0.22" +version = "2.0.23" description = "Database Abstraction Library" +category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-2.0.22-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f146c61ae128ab43ea3a0955de1af7e1633942c2b2b4985ac51cc292daf33222"}, - {file = "SQLAlchemy-2.0.22-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:875de9414393e778b655a3d97d60465eb3fae7c919e88b70cc10b40b9f56042d"}, - {file = "SQLAlchemy-2.0.22-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13790cb42f917c45c9c850b39b9941539ca8ee7917dacf099cc0b569f3d40da7"}, - {file = "SQLAlchemy-2.0.22-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e04ab55cf49daf1aeb8c622c54d23fa4bec91cb051a43cc24351ba97e1dd09f5"}, - {file = "SQLAlchemy-2.0.22-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a42c9fa3abcda0dcfad053e49c4f752eef71ecd8c155221e18b99d4224621176"}, - {file = "SQLAlchemy-2.0.22-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:14cd3bcbb853379fef2cd01e7c64a5d6f1d005406d877ed9509afb7a05ff40a5"}, - {file = "SQLAlchemy-2.0.22-cp310-cp310-win32.whl", hash = "sha256:d143c5a9dada696bcfdb96ba2de4a47d5a89168e71d05a076e88a01386872f97"}, - {file = "SQLAlchemy-2.0.22-cp310-cp310-win_amd64.whl", hash = "sha256:ccd87c25e4c8559e1b918d46b4fa90b37f459c9b4566f1dfbce0eb8122571547"}, - {file = "SQLAlchemy-2.0.22-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4f6ff392b27a743c1ad346d215655503cec64405d3b694228b3454878bf21590"}, - {file = "SQLAlchemy-2.0.22-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f776c2c30f0e5f4db45c3ee11a5f2a8d9de68e81eb73ec4237de1e32e04ae81c"}, - {file = "SQLAlchemy-2.0.22-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8f1792d20d2f4e875ce7a113f43c3561ad12b34ff796b84002a256f37ce9437"}, - {file = "SQLAlchemy-2.0.22-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d80eeb5189d7d4b1af519fc3f148fe7521b9dfce8f4d6a0820e8f5769b005051"}, - {file = "SQLAlchemy-2.0.22-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:69fd9e41cf9368afa034e1c81f3570afb96f30fcd2eb1ef29cb4d9371c6eece2"}, - {file = "SQLAlchemy-2.0.22-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:54bcceaf4eebef07dadfde424f5c26b491e4a64e61761dea9459103ecd6ccc95"}, - {file = "SQLAlchemy-2.0.22-cp311-cp311-win32.whl", hash = "sha256:7ee7ccf47aa503033b6afd57efbac6b9e05180f492aeed9fcf70752556f95624"}, - {file = "SQLAlchemy-2.0.22-cp311-cp311-win_amd64.whl", hash = "sha256:b560f075c151900587ade06706b0c51d04b3277c111151997ea0813455378ae0"}, - {file = "SQLAlchemy-2.0.22-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:2c9bac865ee06d27a1533471405ad240a6f5d83195eca481f9fc4a71d8b87df8"}, - {file = "SQLAlchemy-2.0.22-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:625b72d77ac8ac23da3b1622e2da88c4aedaee14df47c8432bf8f6495e655de2"}, - {file = "SQLAlchemy-2.0.22-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b39a6e21110204a8c08d40ff56a73ba542ec60bab701c36ce721e7990df49fb9"}, - {file = "SQLAlchemy-2.0.22-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53a766cb0b468223cafdf63e2d37f14a4757476157927b09300c8c5832d88560"}, - {file = "SQLAlchemy-2.0.22-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0e1ce8ebd2e040357dde01a3fb7d30d9b5736b3e54a94002641dfd0aa12ae6ce"}, - {file = "SQLAlchemy-2.0.22-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:505f503763a767556fa4deae5194b2be056b64ecca72ac65224381a0acab7ebe"}, - {file = "SQLAlchemy-2.0.22-cp312-cp312-win32.whl", hash = "sha256:154a32f3c7b00de3d090bc60ec8006a78149e221f1182e3edcf0376016be9396"}, - {file = "SQLAlchemy-2.0.22-cp312-cp312-win_amd64.whl", hash = "sha256:129415f89744b05741c6f0b04a84525f37fbabe5dc3774f7edf100e7458c48cd"}, - {file = "SQLAlchemy-2.0.22-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3940677d341f2b685a999bffe7078697b5848a40b5f6952794ffcf3af150c301"}, - {file = "SQLAlchemy-2.0.22-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55914d45a631b81a8a2cb1a54f03eea265cf1783241ac55396ec6d735be14883"}, - {file = "SQLAlchemy-2.0.22-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2096d6b018d242a2bcc9e451618166f860bb0304f590d205173d317b69986c95"}, - {file = "SQLAlchemy-2.0.22-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:19c6986cf2fb4bc8e0e846f97f4135a8e753b57d2aaaa87c50f9acbe606bd1db"}, - {file = "SQLAlchemy-2.0.22-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6ac28bd6888fe3c81fbe97584eb0b96804bd7032d6100b9701255d9441373ec1"}, - {file = "SQLAlchemy-2.0.22-cp37-cp37m-win32.whl", hash = "sha256:cb9a758ad973e795267da334a92dd82bb7555cb36a0960dcabcf724d26299db8"}, - {file = "SQLAlchemy-2.0.22-cp37-cp37m-win_amd64.whl", hash = "sha256:40b1206a0d923e73aa54f0a6bd61419a96b914f1cd19900b6c8226899d9742ad"}, - {file = "SQLAlchemy-2.0.22-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3aa1472bf44f61dd27987cd051f1c893b7d3b17238bff8c23fceaef4f1133868"}, - {file = "SQLAlchemy-2.0.22-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:56a7e2bb639df9263bf6418231bc2a92a773f57886d371ddb7a869a24919face"}, - {file = "SQLAlchemy-2.0.22-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ccca778c0737a773a1ad86b68bda52a71ad5950b25e120b6eb1330f0df54c3d0"}, - {file = "SQLAlchemy-2.0.22-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c6c3e9350f9fb16de5b5e5fbf17b578811a52d71bb784cc5ff71acb7de2a7f9"}, - {file = "SQLAlchemy-2.0.22-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:564e9f9e4e6466273dbfab0e0a2e5fe819eec480c57b53a2cdee8e4fdae3ad5f"}, - {file = "SQLAlchemy-2.0.22-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:af66001d7b76a3fab0d5e4c1ec9339ac45748bc4a399cbc2baa48c1980d3c1f4"}, - {file = "SQLAlchemy-2.0.22-cp38-cp38-win32.whl", hash = "sha256:9e55dff5ec115316dd7a083cdc1a52de63693695aecf72bc53a8e1468ce429e5"}, - {file = "SQLAlchemy-2.0.22-cp38-cp38-win_amd64.whl", hash = "sha256:4e869a8ff7ee7a833b74868a0887e8462445ec462432d8cbeff5e85f475186da"}, - {file = "SQLAlchemy-2.0.22-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9886a72c8e6371280cb247c5d32c9c8fa141dc560124348762db8a8b236f8692"}, - {file = "SQLAlchemy-2.0.22-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a571bc8ac092a3175a1d994794a8e7a1f2f651e7c744de24a19b4f740fe95034"}, - {file = "SQLAlchemy-2.0.22-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8db5ba8b7da759b727faebc4289a9e6a51edadc7fc32207a30f7c6203a181592"}, - {file = "SQLAlchemy-2.0.22-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b0b3f2686c3f162123adba3cb8b626ed7e9b8433ab528e36ed270b4f70d1cdb"}, - {file = "SQLAlchemy-2.0.22-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0c1fea8c0abcb070ffe15311853abfda4e55bf7dc1d4889497b3403629f3bf00"}, - {file = "SQLAlchemy-2.0.22-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4bb062784f37b2d75fd9b074c8ec360ad5df71f933f927e9e95c50eb8e05323c"}, - {file = "SQLAlchemy-2.0.22-cp39-cp39-win32.whl", hash = "sha256:58a3aba1bfb32ae7af68da3f277ed91d9f57620cf7ce651db96636790a78b736"}, - {file = "SQLAlchemy-2.0.22-cp39-cp39-win_amd64.whl", hash = "sha256:92e512a6af769e4725fa5b25981ba790335d42c5977e94ded07db7d641490a85"}, - {file = "SQLAlchemy-2.0.22-py3-none-any.whl", hash = "sha256:3076740335e4aaadd7deb3fe6dcb96b3015f1613bd190a4e1634e1b99b02ec86"}, - {file = "SQLAlchemy-2.0.22.tar.gz", hash = "sha256:5434cc601aa17570d79e5377f5fd45ff92f9379e2abed0be5e8c2fba8d353d2b"}, + {file = "SQLAlchemy-2.0.23-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:638c2c0b6b4661a4fd264f6fb804eccd392745c5887f9317feb64bb7cb03b3ea"}, + {file = "SQLAlchemy-2.0.23-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e3b5036aa326dc2df50cba3c958e29b291a80f604b1afa4c8ce73e78e1c9f01d"}, + {file = "SQLAlchemy-2.0.23-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:787af80107fb691934a01889ca8f82a44adedbf5ef3d6ad7d0f0b9ac557e0c34"}, + {file = "SQLAlchemy-2.0.23-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c14eba45983d2f48f7546bb32b47937ee2cafae353646295f0e99f35b14286ab"}, + {file = "SQLAlchemy-2.0.23-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0666031df46b9badba9bed00092a1ffa3aa063a5e68fa244acd9f08070e936d3"}, + {file = "SQLAlchemy-2.0.23-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:89a01238fcb9a8af118eaad3ffcc5dedaacbd429dc6fdc43fe430d3a941ff965"}, + {file = "SQLAlchemy-2.0.23-cp310-cp310-win32.whl", hash = "sha256:cabafc7837b6cec61c0e1e5c6d14ef250b675fa9c3060ed8a7e38653bd732ff8"}, + {file = "SQLAlchemy-2.0.23-cp310-cp310-win_amd64.whl", hash = "sha256:87a3d6b53c39cd173990de2f5f4b83431d534a74f0e2f88bd16eabb5667e65c6"}, + {file = "SQLAlchemy-2.0.23-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d5578e6863eeb998980c212a39106ea139bdc0b3f73291b96e27c929c90cd8e1"}, + {file = "SQLAlchemy-2.0.23-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:62d9e964870ea5ade4bc870ac4004c456efe75fb50404c03c5fd61f8bc669a72"}, + {file = "SQLAlchemy-2.0.23-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c80c38bd2ea35b97cbf7c21aeb129dcbebbf344ee01a7141016ab7b851464f8e"}, + {file = "SQLAlchemy-2.0.23-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75eefe09e98043cff2fb8af9796e20747ae870c903dc61d41b0c2e55128f958d"}, + {file = "SQLAlchemy-2.0.23-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd45a5b6c68357578263d74daab6ff9439517f87da63442d244f9f23df56138d"}, + {file = "SQLAlchemy-2.0.23-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a86cb7063e2c9fb8e774f77fbf8475516d270a3e989da55fa05d08089d77f8c4"}, + {file = "SQLAlchemy-2.0.23-cp311-cp311-win32.whl", hash = "sha256:b41f5d65b54cdf4934ecede2f41b9c60c9f785620416e8e6c48349ab18643855"}, + {file = "SQLAlchemy-2.0.23-cp311-cp311-win_amd64.whl", hash = "sha256:9ca922f305d67605668e93991aaf2c12239c78207bca3b891cd51a4515c72e22"}, + {file = "SQLAlchemy-2.0.23-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d0f7fb0c7527c41fa6fcae2be537ac137f636a41b4c5a4c58914541e2f436b45"}, + {file = "SQLAlchemy-2.0.23-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7c424983ab447dab126c39d3ce3be5bee95700783204a72549c3dceffe0fc8f4"}, + {file = "SQLAlchemy-2.0.23-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f508ba8f89e0a5ecdfd3761f82dda2a3d7b678a626967608f4273e0dba8f07ac"}, + {file = "SQLAlchemy-2.0.23-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6463aa765cf02b9247e38b35853923edbf2f6fd1963df88706bc1d02410a5577"}, + {file = "SQLAlchemy-2.0.23-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e599a51acf3cc4d31d1a0cf248d8f8d863b6386d2b6782c5074427ebb7803bda"}, + {file = "SQLAlchemy-2.0.23-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fd54601ef9cc455a0c61e5245f690c8a3ad67ddb03d3b91c361d076def0b4c60"}, + {file = "SQLAlchemy-2.0.23-cp312-cp312-win32.whl", hash = "sha256:42d0b0290a8fb0165ea2c2781ae66e95cca6e27a2fbe1016ff8db3112ac1e846"}, + {file = "SQLAlchemy-2.0.23-cp312-cp312-win_amd64.whl", hash = "sha256:227135ef1e48165f37590b8bfc44ed7ff4c074bf04dc8d6f8e7f1c14a94aa6ca"}, + {file = "SQLAlchemy-2.0.23-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:14aebfe28b99f24f8a4c1346c48bc3d63705b1f919a24c27471136d2f219f02d"}, + {file = "SQLAlchemy-2.0.23-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e983fa42164577d073778d06d2cc5d020322425a509a08119bdcee70ad856bf"}, + {file = "SQLAlchemy-2.0.23-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e0dc9031baa46ad0dd5a269cb7a92a73284d1309228be1d5935dac8fb3cae24"}, + {file = "SQLAlchemy-2.0.23-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5f94aeb99f43729960638e7468d4688f6efccb837a858b34574e01143cf11f89"}, + {file = "SQLAlchemy-2.0.23-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:63bfc3acc970776036f6d1d0e65faa7473be9f3135d37a463c5eba5efcdb24c8"}, + {file = "SQLAlchemy-2.0.23-cp37-cp37m-win32.whl", hash = "sha256:f48ed89dd11c3c586f45e9eec1e437b355b3b6f6884ea4a4c3111a3358fd0c18"}, + {file = "SQLAlchemy-2.0.23-cp37-cp37m-win_amd64.whl", hash = "sha256:1e018aba8363adb0599e745af245306cb8c46b9ad0a6fc0a86745b6ff7d940fc"}, + {file = "SQLAlchemy-2.0.23-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:64ac935a90bc479fee77f9463f298943b0e60005fe5de2aa654d9cdef46c54df"}, + {file = "SQLAlchemy-2.0.23-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c4722f3bc3c1c2fcc3702dbe0016ba31148dd6efcd2a2fd33c1b4897c6a19693"}, + {file = "SQLAlchemy-2.0.23-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4af79c06825e2836de21439cb2a6ce22b2ca129bad74f359bddd173f39582bf5"}, + {file = "SQLAlchemy-2.0.23-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:683ef58ca8eea4747737a1c35c11372ffeb84578d3aab8f3e10b1d13d66f2bc4"}, + {file = "SQLAlchemy-2.0.23-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d4041ad05b35f1f4da481f6b811b4af2f29e83af253bf37c3c4582b2c68934ab"}, + {file = "SQLAlchemy-2.0.23-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aeb397de65a0a62f14c257f36a726945a7f7bb60253462e8602d9b97b5cbe204"}, + {file = "SQLAlchemy-2.0.23-cp38-cp38-win32.whl", hash = "sha256:42ede90148b73fe4ab4a089f3126b2cfae8cfefc955c8174d697bb46210c8306"}, + {file = "SQLAlchemy-2.0.23-cp38-cp38-win_amd64.whl", hash = "sha256:964971b52daab357d2c0875825e36584d58f536e920f2968df8d581054eada4b"}, + {file = "SQLAlchemy-2.0.23-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:616fe7bcff0a05098f64b4478b78ec2dfa03225c23734d83d6c169eb41a93e55"}, + {file = "SQLAlchemy-2.0.23-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0e680527245895aba86afbd5bef6c316831c02aa988d1aad83c47ffe92655e74"}, + {file = "SQLAlchemy-2.0.23-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9585b646ffb048c0250acc7dad92536591ffe35dba624bb8fd9b471e25212a35"}, + {file = "SQLAlchemy-2.0.23-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4895a63e2c271ffc7a81ea424b94060f7b3b03b4ea0cd58ab5bb676ed02f4221"}, + {file = "SQLAlchemy-2.0.23-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:cc1d21576f958c42d9aec68eba5c1a7d715e5fc07825a629015fe8e3b0657fb0"}, + {file = "SQLAlchemy-2.0.23-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:967c0b71156f793e6662dd839da54f884631755275ed71f1539c95bbada9aaab"}, + {file = "SQLAlchemy-2.0.23-cp39-cp39-win32.whl", hash = "sha256:0a8c6aa506893e25a04233bc721c6b6cf844bafd7250535abb56cb6cc1368884"}, + {file = "SQLAlchemy-2.0.23-cp39-cp39-win_amd64.whl", hash = "sha256:f3420d00d2cb42432c1d0e44540ae83185ccbbc67a6054dcc8ab5387add6620b"}, + {file = "SQLAlchemy-2.0.23-py3-none-any.whl", hash = "sha256:31952bbc527d633b9479f5f81e8b9dfada00b91d6baba021a869095f1a97006d"}, + {file = "SQLAlchemy-2.0.23.tar.gz", hash = "sha256:c1bda93cbbe4aa2aa0aa8655c5aeda505cd219ff3e8da91d1d329e143e4aff69"}, ] [package.dependencies] @@ -3924,6 +4099,7 @@ typing-extensions = ">=4.2.0" [package.extras] aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] +aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"] asyncio = ["greenlet (!=0.4.17)"] asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] @@ -3934,7 +4110,7 @@ mssql-pyodbc = ["pyodbc"] mypy = ["mypy (>=0.910)"] mysql = ["mysqlclient (>=1.4.0)"] mysql-connector = ["mysql-connector-python"] -oracle = ["cx-oracle (>=7)"] +oracle = ["cx-oracle (>=8)"] oracle-oracledb = ["oracledb (>=1.0.1)"] postgresql = ["psycopg2 (>=2.7)"] postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] @@ -3950,6 +4126,7 @@ sqlcipher = ["sqlcipher3-binary"] name = "sqlalchemy-utils" version = "0.41.1" description = "Various utility functions for SQLAlchemy." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3978,6 +4155,7 @@ url = ["furl (>=0.4.1)"] name = "stack-data" version = "0.6.3" description = "Extract data from python stack frames and tracebacks for informative displays" +category = "main" optional = false python-versions = "*" files = [ @@ -3997,6 +4175,7 @@ tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] name = "swagger-ui-bundle" version = "0.0.9" description = "swagger_ui_bundle - swagger-ui files in a pip package" +category = "main" optional = false python-versions = "*" files = [ @@ -4011,6 +4190,7 @@ Jinja2 = ">=2.0" name = "synapseclient" version = "3.1.1" description = "A client for Synapse, a collaborative, open-source research platform that allows teams to share data, track analyses, and collaborate." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -4037,6 +4217,7 @@ tests = ["flake8 (>=3.7.0,<4.0)", "func-timeout (>=4.3,<5.0)", "pytest (>=6.0.0, name = "tabulate" version = "0.9.0" description = "Pretty-print tabular data" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4051,6 +4232,7 @@ widechars = ["wcwidth"] name = "tenacity" version = "8.2.3" description = "Retry code until it succeeds" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4065,6 +4247,7 @@ doc = ["reno", "sphinx", "tornado (>=4.5)"] name = "terminado" version = "0.17.1" description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4085,6 +4268,7 @@ test = ["pre-commit", "pytest (>=7.0)", "pytest-timeout"] name = "tinycss2" version = "1.2.1" description = "A tiny CSS parser" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4103,6 +4287,7 @@ test = ["flake8", "isort", "pytest"] name = "toml" version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" +category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -4114,6 +4299,7 @@ files = [ name = "tomli" version = "2.0.1" description = "A lil' TOML parser" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4123,19 +4309,21 @@ files = [ [[package]] name = "tomlkit" -version = "0.12.1" +version = "0.12.2" description = "Style preserving TOML library" +category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "tomlkit-0.12.1-py3-none-any.whl", hash = "sha256:712cbd236609acc6a3e2e97253dfc52d4c2082982a88f61b640ecf0817eab899"}, - {file = "tomlkit-0.12.1.tar.gz", hash = "sha256:38e1ff8edb991273ec9f6181244a6a391ac30e9f5098e7535640ea6be97a7c86"}, + {file = "tomlkit-0.12.2-py3-none-any.whl", hash = "sha256:eeea7ac7563faeab0a1ed8fe12c2e5a51c61f933f2502f7e9db0241a65163ad0"}, + {file = "tomlkit-0.12.2.tar.gz", hash = "sha256:df32fab589a81f0d7dc525a4267b6d7a64ee99619cbd1eeb0fae32c1dd426977"}, ] [[package]] name = "toolz" version = "0.12.0" description = "List processing tools and functional utilities" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -4147,6 +4335,7 @@ files = [ name = "tornado" version = "6.3.3" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +category = "main" optional = false python-versions = ">= 3.8" files = [ @@ -4167,6 +4356,7 @@ files = [ name = "tqdm" version = "4.66.1" description = "Fast, Extensible Progress Meter" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4187,6 +4377,7 @@ telegram = ["requests"] name = "traitlets" version = "5.13.0" description = "Traitlets Python configuration system" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -4202,6 +4393,7 @@ test = ["argcomplete (>=3.0.3)", "mypy (>=1.6.0)", "pre-commit", "pytest (>=7.0, name = "types-python-dateutil" version = "2.8.19.14" description = "Typing stubs for python-dateutil" +category = "main" optional = false python-versions = "*" files = [ @@ -4213,6 +4405,7 @@ files = [ name = "typing-extensions" version = "4.5.0" description = "Backported and Experimental Type Hints for Python 3.7+" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4220,10 +4413,27 @@ files = [ {file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"}, ] +[[package]] +name = "typing-inspect" +version = "0.9.0" +description = "Runtime inspection utilities for typing module." +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, + {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, +] + +[package.dependencies] +mypy-extensions = ">=0.3.0" +typing-extensions = ">=3.7.4" + [[package]] name = "tzdata" version = "2023.3" description = "Provider of IANA time zone data" +category = "main" optional = false python-versions = ">=2" files = [ @@ -4235,6 +4445,7 @@ files = [ name = "tzlocal" version = "5.2" description = "tzinfo object for the local timezone" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -4252,6 +4463,7 @@ devenv = ["check-manifest", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3) name = "uri-template" version = "1.3.0" description = "RFC 6570 URI Template Processor" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4266,6 +4478,7 @@ dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake name = "uritemplate" version = "4.1.1" description = "Implementation of RFC 6570 URI Templates" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -4277,6 +4490,7 @@ files = [ name = "urllib3" version = "1.26.18" description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -4293,6 +4507,7 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] name = "uwsgi" version = "2.0.23" description = "The uWSGI server" +category = "dev" optional = false python-versions = "*" files = [ @@ -4303,6 +4518,7 @@ files = [ name = "validators" version = "0.20.0" description = "Python Data Validation for Humans™." +category = "main" optional = false python-versions = ">=3.4" files = [ @@ -4319,6 +4535,7 @@ test = ["flake8 (>=2.4.0)", "isort (>=4.2.2)", "pytest (>=2.2.3)"] name = "wcwidth" version = "0.2.9" description = "Measures the displayed width of unicode strings in a terminal" +category = "main" optional = false python-versions = "*" files = [ @@ -4330,6 +4547,7 @@ files = [ name = "webcolors" version = "1.13" description = "A library for working with the color formats defined by HTML and CSS." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4345,6 +4563,7 @@ tests = ["pytest", "pytest-cov"] name = "webencodings" version = "0.5.1" description = "Character encoding aliases for legacy web content" +category = "main" optional = false python-versions = "*" files = [ @@ -4356,6 +4575,7 @@ files = [ name = "websocket-client" version = "1.6.4" description = "WebSocket client for Python with low level API options" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -4372,6 +4592,7 @@ test = ["websockets"] name = "werkzeug" version = "2.1.2" description = "The comprehensive WSGI web application library." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4386,6 +4607,7 @@ watchdog = ["watchdog"] name = "widgetsnbextension" version = "4.0.9" description = "Jupyter interactive widgets for Jupyter Notebook" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4397,6 +4619,7 @@ files = [ name = "wrapt" version = "1.15.0" description = "Module for decorators, wrappers and monkey patching." +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" files = [ @@ -4481,6 +4704,7 @@ files = [ name = "zipp" version = "3.17.0" description = "Backport of pathlib-compatible object wrapper for zip files" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -4495,4 +4719,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = ">=3.9.0,<3.11" -content-hash = "9b6667bb6094ab24849d6b47c8923dd8140a29ba14ffa45a0f4be52609b49f9c" \ No newline at end of file +content-hash = "5ff6062062ccfe8b7c7a64723bdde8cdf5805e97cf574f718d8d3f855b947abe" From 7e1c168a5362ff9d73d57839217f5712e3cf876f Mon Sep 17 00:00:00 2001 From: linglp Date: Wed, 15 Nov 2023 13:47:40 -0500 Subject: [PATCH 215/239] modify workflow to trigger a release to pypi --- .github/workflows/publish.yml | 39 ++++++++++++++++++----------------- override_version.py | 4 +++- 2 files changed, 23 insertions(+), 20 deletions(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 804ef19e2..752785a8c 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -1,9 +1,10 @@ name: Publish to PyPI -on: - push: - tags: - - 'v[1-9][0-9].[0-9]+.[0-9]+' - branches: [main] +on: [pull_request] +# on: +# push: +# tags: +# - 'v[1-9][0-9].[0-9]+.[0-9]+' +# branches: [main] jobs: pypi_release: @@ -107,17 +108,17 @@ jobs: # post a message to slack #---------------------------------------------- - - name: Post to a Slack channel - if: steps.publish-to-pypi.outcome == 'success' - id: slack - uses: slackapi/slack-github-action@v1.23.0 - with: - # Slack channel id, channel name, or user id to post message. - # See also: https://api.slack.com/methods/chat.postMessage#channels - # You can pass in multiple channels to post to by providing a comma-delimited list of channel IDs. - # ibc-fair-data channel and data-curator-schematic channel - channel-id: 'C050YD75QRL,C01ANC02U59' - # For posting a simple plain text message - slack-message: "Schematic has just been released. Check out new version: ${{ github.ref_name }}" - env: - SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} + # - name: Post to a Slack channel + # if: steps.publish-to-pypi.outcome == 'success' + # id: slack + # uses: slackapi/slack-github-action@v1.23.0 + # with: + # # Slack channel id, channel name, or user id to post message. + # # See also: https://api.slack.com/methods/chat.postMessage#channels + # # You can pass in multiple channels to post to by providing a comma-delimited list of channel IDs. + # # ibc-fair-data channel and data-curator-schematic channel + # channel-id: 'C050YD75QRL,C01ANC02U59' + # # For posting a simple plain text message + # slack-message: "Schematic has just been released. Check out new version: ${{ github.ref_name }}" + # env: + # SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} diff --git a/override_version.py b/override_version.py index c6f61c9d6..874197ab6 100644 --- a/override_version.py +++ b/override_version.py @@ -3,7 +3,9 @@ data = toml.load("pyproject.toml") #get release version -RELEASE_VERSION = os.getenv('RELEASE_VERSION') +# RELEASE_VERSION = os.getenv('RELEASE_VERSION') +# temporarily hard coded release version +RELEASE_VERSION = 'v23.11.2bN' # Modify field data['tool']['poetry']['version']=RELEASE_VERSION print('the version number of this release is: ', RELEASE_VERSION) From 93a112191c12c4c202c68c21e4e5decebe53dee8 Mon Sep 17 00:00:00 2001 From: linglp Date: Wed, 15 Nov 2023 13:51:05 -0500 Subject: [PATCH 216/239] test changing the requirements when publishing to pypi --- .github/workflows/publish.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 752785a8c..e4a78a123 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -1,5 +1,7 @@ name: Publish to PyPI -on: [pull_request] +on: + push: + branches: [develop-refactor-schemas] # on: # push: # tags: From b6f6ccc1a08b8b283287511aa99836c32c26485c Mon Sep 17 00:00:00 2001 From: linglp Date: Wed, 15 Nov 2023 13:52:23 -0500 Subject: [PATCH 217/239] commented out the if statement --- .github/workflows/publish.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index e4a78a123..02037ab76 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -13,7 +13,7 @@ jobs: runs-on: ubuntu-latest env: POETRY_VERSION: 1.3.0 - if: github.event_name == 'push' && contains(github.ref, 'refs/tags') + # if: github.event_name == 'push' && contains(github.ref, 'refs/tags') steps: #---------------------------------------------- # check-out repo and set-up python From eed68b102c9d8008682dfbb7183db3f64700b3ce Mon Sep 17 00:00:00 2001 From: linglp Date: Wed, 15 Nov 2023 13:57:56 -0500 Subject: [PATCH 218/239] comment out another if in workflow --- .github/workflows/publish.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 02037ab76..768ff5ce1 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -99,7 +99,7 @@ jobs: #---------------------------------------------- - name: Publish package to Pypi id: publish-to-pypi - if: steps.check-tag.outputs.match == 'true' + # if: steps.check-tag.outputs.match == 'true' env: PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }} PYPI_USERNAME: __token__ From d19bf0390f7fc8288e08e05c4878b89bede7adf5 Mon Sep 17 00:00:00 2001 From: linglp Date: Wed, 15 Nov 2023 14:13:45 -0500 Subject: [PATCH 219/239] release version --- override_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/override_version.py b/override_version.py index 874197ab6..199e2e210 100644 --- a/override_version.py +++ b/override_version.py @@ -5,7 +5,7 @@ #get release version # RELEASE_VERSION = os.getenv('RELEASE_VERSION') # temporarily hard coded release version -RELEASE_VERSION = 'v23.11.2bN' +RELEASE_VERSION = 'v23.11.dev2' # Modify field data['tool']['poetry']['version']=RELEASE_VERSION print('the version number of this release is: ', RELEASE_VERSION) From 6f55626851bd0f37383f6846a9d30e4c1721a0a8 Mon Sep 17 00:00:00 2001 From: linglp Date: Thu, 16 Nov 2023 11:22:45 -0500 Subject: [PATCH 220/239] add import --- schematic/store/synapse.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index b24663128..f8d02623b 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -50,6 +50,8 @@ get_dir_size, convert_gb_to_bytes, create_temp_folder, + check_synapse_cache_size, + clear_synapse_cache, profile, calculate_datetime) from schematic.utils.schema_utils import get_class_label_from_display_name From b1fb47dc40630f5258616623c306d3dcb436ee80 Mon Sep 17 00:00:00 2001 From: linglp Date: Thu, 16 Nov 2023 12:52:04 -0500 Subject: [PATCH 221/239] add comments --- schematic/store/synapse.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index f8d02623b..bbc1bd295 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -46,6 +46,8 @@ from schematic.utils.df_utils import update_df, load_df, col_in_dataframe from schematic.utils.validate_utils import comma_separated_list_regex, rule_in_rule_list +# entity_type_mapping, get_dir_size, create_temp_folder, check_synapse_cache_size, and clear_synapse_cache functions are used for AWS deployment +# Please do not remove these import statements from schematic.utils.general import (entity_type_mapping, get_dir_size, convert_gb_to_bytes, From b0788c7b1b38474bd2f6e540c5ad2e0256467d86 Mon Sep 17 00:00:00 2001 From: linglp Date: Thu, 16 Nov 2023 12:57:52 -0500 Subject: [PATCH 222/239] revert changes related to workflow --- .github/workflows/publish.yml | 41 ++++++++++++++++------------------- override_version.py | 4 +--- 2 files changed, 20 insertions(+), 25 deletions(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 768ff5ce1..804ef19e2 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -1,19 +1,16 @@ name: Publish to PyPI on: push: - branches: [develop-refactor-schemas] -# on: -# push: -# tags: -# - 'v[1-9][0-9].[0-9]+.[0-9]+' -# branches: [main] + tags: + - 'v[1-9][0-9].[0-9]+.[0-9]+' + branches: [main] jobs: pypi_release: runs-on: ubuntu-latest env: POETRY_VERSION: 1.3.0 - # if: github.event_name == 'push' && contains(github.ref, 'refs/tags') + if: github.event_name == 'push' && contains(github.ref, 'refs/tags') steps: #---------------------------------------------- # check-out repo and set-up python @@ -99,7 +96,7 @@ jobs: #---------------------------------------------- - name: Publish package to Pypi id: publish-to-pypi - # if: steps.check-tag.outputs.match == 'true' + if: steps.check-tag.outputs.match == 'true' env: PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }} PYPI_USERNAME: __token__ @@ -110,17 +107,17 @@ jobs: # post a message to slack #---------------------------------------------- - # - name: Post to a Slack channel - # if: steps.publish-to-pypi.outcome == 'success' - # id: slack - # uses: slackapi/slack-github-action@v1.23.0 - # with: - # # Slack channel id, channel name, or user id to post message. - # # See also: https://api.slack.com/methods/chat.postMessage#channels - # # You can pass in multiple channels to post to by providing a comma-delimited list of channel IDs. - # # ibc-fair-data channel and data-curator-schematic channel - # channel-id: 'C050YD75QRL,C01ANC02U59' - # # For posting a simple plain text message - # slack-message: "Schematic has just been released. Check out new version: ${{ github.ref_name }}" - # env: - # SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} + - name: Post to a Slack channel + if: steps.publish-to-pypi.outcome == 'success' + id: slack + uses: slackapi/slack-github-action@v1.23.0 + with: + # Slack channel id, channel name, or user id to post message. + # See also: https://api.slack.com/methods/chat.postMessage#channels + # You can pass in multiple channels to post to by providing a comma-delimited list of channel IDs. + # ibc-fair-data channel and data-curator-schematic channel + channel-id: 'C050YD75QRL,C01ANC02U59' + # For posting a simple plain text message + slack-message: "Schematic has just been released. Check out new version: ${{ github.ref_name }}" + env: + SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} diff --git a/override_version.py b/override_version.py index 199e2e210..c6f61c9d6 100644 --- a/override_version.py +++ b/override_version.py @@ -3,9 +3,7 @@ data = toml.load("pyproject.toml") #get release version -# RELEASE_VERSION = os.getenv('RELEASE_VERSION') -# temporarily hard coded release version -RELEASE_VERSION = 'v23.11.dev2' +RELEASE_VERSION = os.getenv('RELEASE_VERSION') # Modify field data['tool']['poetry']['version']=RELEASE_VERSION print('the version number of this release is: ', RELEASE_VERSION) From 0c00e580e7f58d7b6f64544fa13d9e21302b6b53 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Fri, 17 Nov 2023 14:02:30 -0800 Subject: [PATCH 223/239] only add values to parsed entry list, if it has a value --- schematic/schemas/data_model_parser.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/schematic/schemas/data_model_parser.py b/schematic/schemas/data_model_parser.py index f1684da1c..c154bba1e 100644 --- a/schematic/schemas/data_model_parser.py +++ b/schematic/schemas/data_model_parser.py @@ -163,10 +163,10 @@ def parse_entry(self, attr: dict, relationship: str) -> Any: # If the entry should be preserved as a bool dont convert to str. if rel_val_type == bool and type(attr[relationship]) == bool: parsed_rel_entry = attr[relationship] - # Move strings to list if they are comma separated. Schema order is preserved. + # Move strings to list if they are comma separated. Schema order is preserved, remove any empty strings added by trailing commas elif rel_val_type == list: parsed_rel_entry = attr[relationship].strip().split(",") - parsed_rel_entry = [r.strip() for r in parsed_rel_entry] + parsed_rel_entry = [r.strip() for r in parsed_rel_entry if r] # Convert value string if dictated by rel_val_type, strip whitespace. elif rel_val_type == str: parsed_rel_entry = str(attr[relationship]).strip() From 78ba12cf154fa7cba5b80b530029356bf458c013 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 11 Dec 2023 14:29:01 -0800 Subject: [PATCH 224/239] change parentOf to subclassOf to reference relationship properly and fix bug where attribute name is recorded twice --- schematic/schemas/data_model_parser.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/schematic/schemas/data_model_parser.py b/schematic/schemas/data_model_parser.py index f1684da1c..b15454513 100644 --- a/schematic/schemas/data_model_parser.py +++ b/schematic/schemas/data_model_parser.py @@ -327,6 +327,7 @@ def gather_jsonld_attributes_relationships(self, model_jsonld: List[dict]) -> Di # If not we wil use the get the label. attr_key = entry[label_jsonld_key] + # If the entry has not already been added to the dictionary, add it. if attr_key not in attr_rel_dictionary.keys(): attr_rel_dictionary.update(attr_dict_template(attr_key)) @@ -348,7 +349,7 @@ def gather_jsonld_attributes_relationships(self, model_jsonld: List[dict]) -> Di rel_entry=rel_entry, id_jsonld_key=id_jsonld_key ) rel_csv_header = self.rel_dict[rel_key]["csv_header"] - if rel_key == 'domainIncludes' or rel_key == 'parentOf': + if rel_key == 'domainIncludes' or rel_key == 'subClassOf': # In the JSONLD the domain includes field contains the ids of attributes that the current attribute is the property/parent of. # Because of this we need to handle these values differently. # We will get the values in the field (parsed_val), then add the current attribute as to the property key in the attr_rel_dictionary[p_attr_key]. From 93274faff2e01eb0769fadde1e802fe18ea1d1da Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 11 Dec 2023 14:31:43 -0800 Subject: [PATCH 225/239] Revert "change parentOf to subclassOf to reference relationship properly and fix bug where attribute name is recorded twice" This reverts commit 78ba12cf154fa7cba5b80b530029356bf458c013. --- schematic/schemas/data_model_parser.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/schematic/schemas/data_model_parser.py b/schematic/schemas/data_model_parser.py index b15454513..f1684da1c 100644 --- a/schematic/schemas/data_model_parser.py +++ b/schematic/schemas/data_model_parser.py @@ -327,7 +327,6 @@ def gather_jsonld_attributes_relationships(self, model_jsonld: List[dict]) -> Di # If not we wil use the get the label. attr_key = entry[label_jsonld_key] - # If the entry has not already been added to the dictionary, add it. if attr_key not in attr_rel_dictionary.keys(): attr_rel_dictionary.update(attr_dict_template(attr_key)) @@ -349,7 +348,7 @@ def gather_jsonld_attributes_relationships(self, model_jsonld: List[dict]) -> Di rel_entry=rel_entry, id_jsonld_key=id_jsonld_key ) rel_csv_header = self.rel_dict[rel_key]["csv_header"] - if rel_key == 'domainIncludes' or rel_key == 'subClassOf': + if rel_key == 'domainIncludes' or rel_key == 'parentOf': # In the JSONLD the domain includes field contains the ids of attributes that the current attribute is the property/parent of. # Because of this we need to handle these values differently. # We will get the values in the field (parsed_val), then add the current attribute as to the property key in the attr_rel_dictionary[p_attr_key]. From 3944133ad63d5164b696a9068ed98caac8462d7a Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Thu, 14 Dec 2023 12:46:57 -0800 Subject: [PATCH 226/239] Revert "Refactor Schemas: Initial Release BugFix, incorrect name causing issues with Manifest generation -- FDS-1442" From 0ecda5ad955c7f440e6a42ddfd8754d13cb46904 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Thu, 14 Dec 2023 12:49:43 -0800 Subject: [PATCH 227/239] re-add change to change parentOf to subClassOf --- schematic/schemas/data_model_parser.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic/schemas/data_model_parser.py b/schematic/schemas/data_model_parser.py index f1684da1c..e898c296d 100644 --- a/schematic/schemas/data_model_parser.py +++ b/schematic/schemas/data_model_parser.py @@ -348,7 +348,7 @@ def gather_jsonld_attributes_relationships(self, model_jsonld: List[dict]) -> Di rel_entry=rel_entry, id_jsonld_key=id_jsonld_key ) rel_csv_header = self.rel_dict[rel_key]["csv_header"] - if rel_key == 'domainIncludes' or rel_key == 'parentOf': + if rel_key == 'domainIncludes' or rel_key == 'subClassOf': # In the JSONLD the domain includes field contains the ids of attributes that the current attribute is the property/parent of. # Because of this we need to handle these values differently. # We will get the values in the field (parsed_val), then add the current attribute as to the property key in the attr_rel_dictionary[p_attr_key]. From 0784d4363e41ae05f75f1624816dd7ac46f813ac Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 8 Jan 2024 18:20:15 -0800 Subject: [PATCH 228/239] add new data model dictionary to map between label and display name --- schematic/schemas/data_model_parser.py | 52 ++++++++++++++++++++++++-- 1 file changed, 49 insertions(+), 3 deletions(-) diff --git a/schematic/schemas/data_model_parser.py b/schematic/schemas/data_model_parser.py index 5169c6f49..da9498d44 100644 --- a/schematic/schemas/data_model_parser.py +++ b/schematic/schemas/data_model_parser.py @@ -1,7 +1,7 @@ import logging import pandas as pd import pathlib -from typing import Any, Dict, Optional, Text, List +from typing import Any, Dict, Optional, Text, List, Union from schematic.utils.df_utils import load_df from schematic.utils.io_utils import load_json @@ -274,11 +274,13 @@ def parse_entry(self, rel_entry: any, id_jsonld_key: str) -> Any: parsed_rel_entry = rel_entry return parsed_rel_entry + ''' def get_display_name_from_label(self, label, model_jsonld): jsonld_keys_to_extract = ["label", "displayName"] label_jsonld_key, dn_jsonld_key = [ self.rel_dict[key]["jsonld_key"] for key in jsonld_keys_to_extract ] + breakpoint() for entry in model_jsonld: # Get the attr key for the dictionary if dn_jsonld_key in entry: @@ -288,6 +290,42 @@ def get_display_name_from_label(self, label, model_jsonld): # If not we wil use the get the label. attr_key = entry[label_jsonld_key] return attr_key + ''' + def label_to_dn_dict(self, model_jsonld: list[dict]): + """ Generate a dictionary of labels to display name, so can easily look up display names using the label. + Args: + model_jsonld: list of dictionaries, each dictionary is an entry in the jsonld data model + Returns: + dn_label_dict: dict of model labels to display names + """ + jsonld_keys_to_extract = ["label", "displayName"] + label_jsonld_key, dn_jsonld_key = [ + self.rel_dict[key]["jsonld_key"] for key in jsonld_keys_to_extract + ] + dn_label_dict = {} + for entry in model_jsonld: + dn_label_dict[entry[label_jsonld_key]]=entry[dn_jsonld_key] + return dn_label_dict + + def convert_entry_to_dn_label(self, parsed_rel_entry:Union[str,list], model_jsonld:list[dict]) -> Union[str,list]: + """Convert a parsed entry to display name, taking into account the entry type + Args: + parsed_rel_entry: an entry that has been parsed base on its input type and characteristics. + model_jsonld: list of dictionaries, each dictionary is an entry in the jsonld data model + Returns: + parsed_rel_entry: an entry that has been parsed based on its input type and characteristics, and converted to display names. + """ + # Get a dictionary of display_names mapped to labels + dn_label_dict = self.label_to_dn_dict(model_jsonld=model_jsonld) + + # Handle if using the display name as the label + if type(parsed_rel_entry) == list: + parsed_rel_entry = [dn_label_dict.get(entry) if dn_label_dict.get(entry) else entry for entry in parsed_rel_entry ] + elif type(parsed_rel_entry) == str: + converted_label = dn_label_dict.get(parsed_rel_entry) + if converted_label: + parsed_rel_entry = dn_label_dict.get(parsed_rel_entry) + return parsed_rel_entry def gather_jsonld_attributes_relationships(self, model_jsonld: List[dict]) -> Dict: """ @@ -315,6 +353,10 @@ def gather_jsonld_attributes_relationships(self, model_jsonld: List[dict]) -> Di label_jsonld_key, subclassof_jsonld_key, id_jsonld_key, dn_jsonld_key = [ self.rel_dict[key]["jsonld_key"] for key in jsonld_keys_to_extract ] + + # Get a dictionary of display names to labels to identify values explicitly recorded + dn_label_dict = self.label_to_dn_dict(model_jsonld=model_jsonld) + # Build the attr_rel_dictionary attr_rel_dictionary = {} # Move through each entry in the jsonld model @@ -348,7 +390,9 @@ def gather_jsonld_attributes_relationships(self, model_jsonld: List[dict]) -> Di rel_entry=rel_entry, id_jsonld_key=id_jsonld_key ) rel_csv_header = self.rel_dict[rel_key]["csv_header"] - if rel_key == 'domainIncludes' or rel_key == 'subClassOf': + + #if rel_key == 'domainIncludes' or rel_key == 'subClassOf': + if rel_key == 'domainIncludes': # In the JSONLD the domain includes field contains the ids of attributes that the current attribute is the property/parent of. # Because of this we need to handle these values differently. # We will get the values in the field (parsed_val), then add the current attribute as to the property key in the attr_rel_dictionary[p_attr_key]. @@ -363,6 +407,7 @@ def gather_jsonld_attributes_relationships(self, model_jsonld: List[dict]) -> Di attr_in_dict = True # If it is part of the dictionary update add current attribute as a property of the parsed value if attr_in_dict == True: + if not rel_csv_header in attr_rel_dictionary[p_attr_key]["Relationships"]: attr_rel_dictionary[p_attr_key]["Relationships"].update({rel_csv_header:[entry[label_jsonld_key]]}) else: @@ -370,7 +415,7 @@ def gather_jsonld_attributes_relationships(self, model_jsonld: List[dict]) -> Di # If the parsed_val is not already recorded in the dictionary, add it elif attr_in_dict == False: # Get the display name for the parsed value - p_attr_key = self.get_display_name_from_label(parsed_val, model_jsonld) + p_attr_key = self.convert_entry_to_dn_label(parsed_val, model_jsonld) attr_rel_dictionary.update(attr_dict_template(p_attr_key)) attr_rel_dictionary[p_attr_key]["Relationships"].update({rel_csv_header:[entry[label_jsonld_key]]}) @@ -378,6 +423,7 @@ def gather_jsonld_attributes_relationships(self, model_jsonld: List[dict]) -> Di attr_rel_dictionary[attr_key]["Relationships"].update( {rel_csv_header: parsed_rel_entry} ) + elif ( rel_vals["jsonld_key"] in entry.keys() and not rel_vals["csv_header"] From 052d172f0037402a7bfeb312c8e59f23737330c8 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 8 Jan 2024 18:45:57 -0800 Subject: [PATCH 229/239] remove breaks --- schematic/schemas/data_model_parser.py | 1 - tests/test_schemas.py | 1 - 2 files changed, 2 deletions(-) diff --git a/schematic/schemas/data_model_parser.py b/schematic/schemas/data_model_parser.py index 5d6f08f5e..da9498d44 100644 --- a/schematic/schemas/data_model_parser.py +++ b/schematic/schemas/data_model_parser.py @@ -439,7 +439,6 @@ def gather_jsonld_attributes_relationships(self, model_jsonld: List[dict]) -> Di attr_rel_dictionary[attr_key]["Relationships"].update( {rel_key: parsed_rel_entry} ) - breakpoint() return attr_rel_dictionary def parse_jsonld_model( diff --git a/tests/test_schemas.py b/tests/test_schemas.py index f107590e4..7436712d1 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -417,7 +417,6 @@ def test_generate_data_model_graph(self, helpers, data_model): ) # Check Edge directions - breakpoint() assert 4 == (len(graph.out_edges("TissueStatus"))) assert 2 == (len(graph.in_edges("TissueStatus"))) From 722e4cab696533b45d9a27c847dc44458397f89e Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 8 Jan 2024 18:58:02 -0800 Subject: [PATCH 230/239] update data model jsonld --- tests/data/example.model.jsonld | 34 ++++++++++++++++----------------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/tests/data/example.model.jsonld b/tests/data/example.model.jsonld index f49346f0e..a58d36323 100644 --- a/tests/data/example.model.jsonld +++ b/tests/data/example.model.jsonld @@ -7,6 +7,23 @@ "xsd": "http://www.w3.org/2001/XMLSchema#" }, "@graph": [ + { + "@id": "bts:Component", + "@type": "rdfs:Class", + "rdfs:comment": "TBD", + "rdfs:label": "Component", + "rdfs:subClassOf": [ + { + "@id": "bts:Thing" + } + ], + "schema:isPartOf": { + "@id": "http://schema.biothings.io" + }, + "sms:displayName": "Component", + "sms:required": "sms:true", + "sms:validationRules": [] + }, { "@id": "bts:Patient", "@type": "rdfs:Class", @@ -128,23 +145,6 @@ "sms:required": "sms:true", "sms:validationRules": [] }, - { - "@id": "bts:Component", - "@type": "rdfs:Class", - "rdfs:comment": "TBD", - "rdfs:label": "Component", - "rdfs:subClassOf": [ - { - "@id": "bts:Thing" - } - ], - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "sms:displayName": "Component", - "sms:required": "sms:false", - "sms:validationRules": [] - }, { "@id": "bts:DataType", "@type": "rdfs:Class", From 4918ef7cb2566863b5480eae302f18cb124ea1a4 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Tue, 16 Jan 2024 14:27:50 -0800 Subject: [PATCH 231/239] WIP: fix JSONLD parsing so display names are used instead of labels --- schematic/schemas/data_model_nodes.py | 26 +++++++++-- schematic/schemas/data_model_parser.py | 60 +++++++++++++------------- 2 files changed, 53 insertions(+), 33 deletions(-) diff --git a/schematic/schemas/data_model_nodes.py b/schematic/schemas/data_model_nodes.py index a1681c469..e11fdbffd 100644 --- a/schematic/schemas/data_model_nodes.py +++ b/schematic/schemas/data_model_nodes.py @@ -3,6 +3,7 @@ from rdflib import Namespace from typing import Any, Dict, Optional, Text, List, Callable +from schematic.schemas.data_model_parser import DataModelJSONLDParser from schematic.schemas.data_model_relationships import DataModelRelationships from schematic.utils.schema_utils import ( @@ -32,7 +33,13 @@ def __init__(self, attribute_relationships_dict): ) # retrieve a list of relationship types that will produce nodes. self.node_relationships = list(self.edge_relationships_dictionary.values()) - + ''' + def labels_to_display_names(self, attr_rel_dict): + labels_to_display_names={} + for attr, rels in attr_rel_dict.items(): + labels_to_display_names.update({rels['Relationships']['label']:attr}) + return labels_to_display_names + ''' def gather_nodes(self, attr_info: tuple) -> list: """Take in a tuple containing attriute name and relationship dictionary, and find all nodes defined in attribute information. Args: @@ -52,7 +59,7 @@ def gather_nodes(self, attr_info: tuple) -> list: nodes.append(attribute) for rel in self.node_relationships: if rel in relationships.keys(): - nodes.extend([node.strip() for node in relationships[rel]]) + nodes.extend([node for node in relationships[rel] if node is not None]) return nodes def gather_all_nodes_in_model(self, attr_rel_dict: dict) -> list: @@ -194,7 +201,15 @@ def generate_node_dict(self, node_display_name: str, attr_rel_dict: dict) -> dic """ # Strip whitespace from node display name node_display_name = node_display_name.strip() - + ''' + label_to_dns_dict = self.labels_to_display_names(attr_rel_dict=attr_rel_dict) + true_dn = '' + true_dn = label_to_dns_dict.get(node_display_name) + if true_dn: + node_display_name=true_dn + #if true_dn == 'individualID': + #breakpoint() + ''' # Determine if property or class entry_type = self.get_entry_type(node_display_name=node_display_name) @@ -231,6 +246,8 @@ def generate_node_dict(self, node_display_name: str, attr_rel_dict: dict) -> dic } ) else: + #if node_display_name == "IndividualID": + #breakpoint() # For standard entries, get information from attr_relationship dictionary node_dict.update({rel_key: attr_relationships[csv_header]}) # else, add default values @@ -254,7 +271,8 @@ def generate_node_dict(self, node_display_name: str, attr_rel_dict: dict) -> dic else: # Set value to defaults. node_dict.update({rel_key: rel_node_dict["default"]}) - + #if true_dn == 'individualID': + #breakpoint() return node_dict def generate_node(self, G: nx.MultiDiGraph, node_dict: dict) -> nx.MultiDiGraph: diff --git a/schematic/schemas/data_model_parser.py b/schematic/schemas/data_model_parser.py index da9498d44..df9b367b3 100644 --- a/schematic/schemas/data_model_parser.py +++ b/schematic/schemas/data_model_parser.py @@ -243,7 +243,7 @@ def __init__( # Load relationships dictionary. self.rel_dict = self.dmr.define_data_model_relationships() - def parse_entry(self, rel_entry: any, id_jsonld_key: str) -> Any: + def parse_entry(self, rel_entry: any, id_jsonld_key: str, dn_label_dict:dict[str:str], model_jsonld:dict) -> Any: """Parse an input entry based on certain attributes Args: rel_entry: Given a single entry and relationship in a JSONLD data model, the recorded value @@ -256,7 +256,11 @@ def parse_entry(self, rel_entry: any, id_jsonld_key: str) -> Any: parsed_rel_entry = rel_entry["@id"] # Parse list of dictionaries to make a list of entries with context stripped (will update this section when contexts added.) elif type(rel_entry) == list and type(rel_entry[0]) == dict: - parsed_rel_entry = [r[id_jsonld_key].split(":")[1] for r in rel_entry] + #parsed_rel_entry = [r[id_jsonld_key].split(":")[1] for r in rel_entry] + #parsed_rel_entry = [dn_label_dict.get(r[id_jsonld_key].split(":")[1]) for r in rel_entry] + parsed_rel_entry = self.convert_entry_to_dn_label([r[id_jsonld_key].split(":")[1] for r in rel_entry], model_jsonld) + #if len([r[id_jsonld_key].split(":")[1] for r in rel_entry]) != len(parsed_rel_entry): + #breakpoint() # Strip context from string and convert true/false to bool elif type(rel_entry) == str: # Remove contexts and treat strings as appropriate. @@ -268,29 +272,24 @@ def parse_entry(self, rel_entry: any, id_jsonld_key: str) -> Any: elif parsed_rel_entry.lower == "false": parsed_rel_entry = False else: - parsed_rel_entry = rel_entry + parsed_rel_entry=self.convert_entry_to_dn_label(rel_entry, model_jsonld) + ''' + if dn_label_dict.get(rel_entry): + parsed_rel_entry = dn_label_dict[rel_entry] + else: + parsed_rel_entry = rel_entry + ''' # For anything else get that else: - parsed_rel_entry = rel_entry + parsed_rel_entry=self.convert_entry_to_dn_label(rel_entry, model_jsonld) + ''' + if dn_label_dict.get(rel_entry): + parsed_rel_entry = dn_label_dict[rel_entry] + else: + parsed_rel_entry = rel_entry + ''' return parsed_rel_entry - ''' - def get_display_name_from_label(self, label, model_jsonld): - jsonld_keys_to_extract = ["label", "displayName"] - label_jsonld_key, dn_jsonld_key = [ - self.rel_dict[key]["jsonld_key"] for key in jsonld_keys_to_extract - ] - breakpoint() - for entry in model_jsonld: - # Get the attr key for the dictionary - if dn_jsonld_key in entry: - # The attr_key is the entry display name if one was recorded - attr_key = entry[dn_jsonld_key] - else: - # If not we wil use the get the label. - attr_key = entry[label_jsonld_key] - return attr_key - ''' def label_to_dn_dict(self, model_jsonld: list[dict]): """ Generate a dictionary of labels to display name, so can easily look up display names using the label. Args: @@ -317,7 +316,6 @@ def convert_entry_to_dn_label(self, parsed_rel_entry:Union[str,list], model_json """ # Get a dictionary of display_names mapped to labels dn_label_dict = self.label_to_dn_dict(model_jsonld=model_jsonld) - # Handle if using the display name as the label if type(parsed_rel_entry) == list: parsed_rel_entry = [dn_label_dict.get(entry) if dn_label_dict.get(entry) else entry for entry in parsed_rel_entry ] @@ -387,10 +385,9 @@ def gather_jsonld_attributes_relationships(self, model_jsonld: List[dict]) -> Di # If there is an entry parse it by type and add to the attr:relationships dictionary. if rel_entry: parsed_rel_entry = self.parse_entry( - rel_entry=rel_entry, id_jsonld_key=id_jsonld_key + rel_entry=rel_entry, id_jsonld_key=id_jsonld_key, dn_label_dict=dn_label_dict, model_jsonld=model_jsonld, ) rel_csv_header = self.rel_dict[rel_key]["csv_header"] - #if rel_key == 'domainIncludes' or rel_key == 'subClassOf': if rel_key == 'domainIncludes': # In the JSONLD the domain includes field contains the ids of attributes that the current attribute is the property/parent of. @@ -402,16 +399,18 @@ def gather_jsonld_attributes_relationships(self, model_jsonld: List[dict]) -> Di p_attr_key='' # Check if the parsed value is already a part of the attr_rel_dictionary for attr_dn, rels in attr_rel_dictionary.items(): - if parsed_val == rels["Relationships"].get('label'): + #if parsed_val == rels["Relationships"].get('label'): + if parsed_val == attr_dn: p_attr_key = attr_dn attr_in_dict = True # If it is part of the dictionary update add current attribute as a property of the parsed value if attr_in_dict == True: - if not rel_csv_header in attr_rel_dictionary[p_attr_key]["Relationships"]: - attr_rel_dictionary[p_attr_key]["Relationships"].update({rel_csv_header:[entry[label_jsonld_key]]}) + #attr_rel_dictionary[p_attr_key]["Relationships"].update({rel_csv_header:[entry[label_jsonld_key]]}) + attr_rel_dictionary[p_attr_key]["Relationships"].update({rel_csv_header:[entry[dn_jsonld_key]]}) else: - attr_rel_dictionary[p_attr_key]["Relationships"][rel_csv_header].append(entry[label_jsonld_key]) + #attr_rel_dictionary[p_attr_key]["Relationships"][rel_csv_header].append(entry[label_jsonld_key]) + attr_rel_dictionary[p_attr_key]["Relationships"].update({rel_csv_header:[entry[dn_jsonld_key]]}) # If the parsed_val is not already recorded in the dictionary, add it elif attr_in_dict == False: # Get the display name for the parsed value @@ -419,7 +418,10 @@ def gather_jsonld_attributes_relationships(self, model_jsonld: List[dict]) -> Di attr_rel_dictionary.update(attr_dict_template(p_attr_key)) attr_rel_dictionary[p_attr_key]["Relationships"].update({rel_csv_header:[entry[label_jsonld_key]]}) + else: + #if attr_key == 'assay_autorad_metadata_template': + #breakpoint() attr_rel_dictionary[attr_key]["Relationships"].update( {rel_csv_header: parsed_rel_entry} ) @@ -433,7 +435,7 @@ def gather_jsonld_attributes_relationships(self, model_jsonld: List[dict]) -> Di # If there is an entry parset it by type and add to the attr:relationships dictionary. if rel_entry: parsed_rel_entry = self.parse_entry( - rel_entry=rel_entry, id_jsonld_key=id_jsonld_key + rel_entry=rel_entry, id_jsonld_key=id_jsonld_key, dn_label_dict=dn_label_dict, model_jsonld=model_jsonld, ) # Add relationships for each attribute and relationship to the dictionary attr_rel_dictionary[attr_key]["Relationships"].update( From 0f99f51e419a8d9b901b4adfc09d8afeac90998b Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Tue, 16 Jan 2024 14:51:28 -0800 Subject: [PATCH 232/239] update tests/test_schemas.py to conform to JSONLD parsing using display names like csv --- tests/test_schemas.py | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 7436712d1..cccdb0208 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -543,8 +543,7 @@ def test_gather_nodes(self, helpers, data_model): # Make sure the nodes returned conform to expectations (values and order) ## The parsing records display names for relationships for CSV and labels for JSONLD, so the expectations are different between the two. - if DATA_MODEL_DICT[data_model] == "CSV": - expected_nodes = [ + expected_nodes = [ "Patient", "Patient ID", "Sex", @@ -553,16 +552,6 @@ def test_gather_nodes(self, helpers, data_model): "Component", "DataType", ] - elif DATA_MODEL_DICT[data_model] == "JSONLD": - expected_nodes = [ - "Patient", - "PatientID", - "Sex", - "YearofBirth", - "Diagnosis", - "Component", - "DataType", - ] assert nodes == expected_nodes From 3d662fa1c0df9e4c822e3371483e5ff895b938c1 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Tue, 16 Jan 2024 15:07:23 -0800 Subject: [PATCH 233/239] update CLI tests for new path_to_data_model argument name --- tests/test_cli.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/test_cli.py b/tests/test_cli.py index f3cd19a59..5498a5900 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -64,7 +64,7 @@ def test_get_example_manifest_default(self, runner, helpers, config: Configurati config.load_config("config_example.yml") result = runner.invoke( - manifest, ["--config", config.config_path, "get", "--data_type", "Patient", "--jsonld", data_model_jsonld] + manifest, ["--config", config.config_path, "get", "--data_type", "Patient", "--path_to_data_model", data_model_jsonld] ) @@ -79,7 +79,7 @@ def test_get_example_manifest_csv(self, runner, helpers, config: Configuration, config.load_config("config_example.yml") result = runner.invoke( - manifest, ["--config", config.config_path, "get", "--data_type", "Patient", "--jsonld", data_model_jsonld, "--output_csv", output_path] + manifest, ["--config", config.config_path, "get", "--data_type", "Patient", "--path_to_data_model", data_model_jsonld, "--output_csv", output_path] ) assert result.exit_code == 0 self.assert_expected_file(result, output_path) @@ -91,7 +91,7 @@ def test_get_example_manifest_excel(self, runner, helpers, config: Configuration config.load_config("config_example.yml") result = runner.invoke( - manifest, ["--config", config.config_path, "get", "--data_type", "Patient", "--jsonld", data_model_jsonld, "--output_xlsx", output_path] + manifest, ["--config", config.config_path, "get", "--data_type", "Patient", "--path_to_data_model", data_model_jsonld, "--output_xlsx", output_path] ) assert result.exit_code == 0 From dd5e5358736ce2ba3a3db60364bf1e9d67ee2209 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Tue, 16 Jan 2024 15:08:53 -0800 Subject: [PATCH 234/239] clean up earlier WIP commit --- schematic/schemas/data_model_nodes.py | 23 +++-------------------- schematic/schemas/data_model_parser.py | 24 ++---------------------- 2 files changed, 5 insertions(+), 42 deletions(-) diff --git a/schematic/schemas/data_model_nodes.py b/schematic/schemas/data_model_nodes.py index e11fdbffd..e82369789 100644 --- a/schematic/schemas/data_model_nodes.py +++ b/schematic/schemas/data_model_nodes.py @@ -33,13 +33,7 @@ def __init__(self, attribute_relationships_dict): ) # retrieve a list of relationship types that will produce nodes. self.node_relationships = list(self.edge_relationships_dictionary.values()) - ''' - def labels_to_display_names(self, attr_rel_dict): - labels_to_display_names={} - for attr, rels in attr_rel_dict.items(): - labels_to_display_names.update({rels['Relationships']['label']:attr}) - return labels_to_display_names - ''' + def gather_nodes(self, attr_info: tuple) -> list: """Take in a tuple containing attriute name and relationship dictionary, and find all nodes defined in attribute information. Args: @@ -201,15 +195,7 @@ def generate_node_dict(self, node_display_name: str, attr_rel_dict: dict) -> dic """ # Strip whitespace from node display name node_display_name = node_display_name.strip() - ''' - label_to_dns_dict = self.labels_to_display_names(attr_rel_dict=attr_rel_dict) - true_dn = '' - true_dn = label_to_dns_dict.get(node_display_name) - if true_dn: - node_display_name=true_dn - #if true_dn == 'individualID': - #breakpoint() - ''' + # Determine if property or class entry_type = self.get_entry_type(node_display_name=node_display_name) @@ -246,8 +232,6 @@ def generate_node_dict(self, node_display_name: str, attr_rel_dict: dict) -> dic } ) else: - #if node_display_name == "IndividualID": - #breakpoint() # For standard entries, get information from attr_relationship dictionary node_dict.update({rel_key: attr_relationships[csv_header]}) # else, add default values @@ -271,8 +255,7 @@ def generate_node_dict(self, node_display_name: str, attr_rel_dict: dict) -> dic else: # Set value to defaults. node_dict.update({rel_key: rel_node_dict["default"]}) - #if true_dn == 'individualID': - #breakpoint() + return node_dict def generate_node(self, G: nx.MultiDiGraph, node_dict: dict) -> nx.MultiDiGraph: diff --git a/schematic/schemas/data_model_parser.py b/schematic/schemas/data_model_parser.py index df9b367b3..a541eb3ed 100644 --- a/schematic/schemas/data_model_parser.py +++ b/schematic/schemas/data_model_parser.py @@ -256,11 +256,7 @@ def parse_entry(self, rel_entry: any, id_jsonld_key: str, dn_label_dict:dict[str parsed_rel_entry = rel_entry["@id"] # Parse list of dictionaries to make a list of entries with context stripped (will update this section when contexts added.) elif type(rel_entry) == list and type(rel_entry[0]) == dict: - #parsed_rel_entry = [r[id_jsonld_key].split(":")[1] for r in rel_entry] - #parsed_rel_entry = [dn_label_dict.get(r[id_jsonld_key].split(":")[1]) for r in rel_entry] parsed_rel_entry = self.convert_entry_to_dn_label([r[id_jsonld_key].split(":")[1] for r in rel_entry], model_jsonld) - #if len([r[id_jsonld_key].split(":")[1] for r in rel_entry]) != len(parsed_rel_entry): - #breakpoint() # Strip context from string and convert true/false to bool elif type(rel_entry) == str: # Remove contexts and treat strings as appropriate. @@ -273,21 +269,11 @@ def parse_entry(self, rel_entry: any, id_jsonld_key: str, dn_label_dict:dict[str parsed_rel_entry = False else: parsed_rel_entry=self.convert_entry_to_dn_label(rel_entry, model_jsonld) - ''' - if dn_label_dict.get(rel_entry): - parsed_rel_entry = dn_label_dict[rel_entry] - else: - parsed_rel_entry = rel_entry - ''' + # For anything else get that else: parsed_rel_entry=self.convert_entry_to_dn_label(rel_entry, model_jsonld) - ''' - if dn_label_dict.get(rel_entry): - parsed_rel_entry = dn_label_dict[rel_entry] - else: - parsed_rel_entry = rel_entry - ''' + return parsed_rel_entry def label_to_dn_dict(self, model_jsonld: list[dict]): @@ -388,7 +374,6 @@ def gather_jsonld_attributes_relationships(self, model_jsonld: List[dict]) -> Di rel_entry=rel_entry, id_jsonld_key=id_jsonld_key, dn_label_dict=dn_label_dict, model_jsonld=model_jsonld, ) rel_csv_header = self.rel_dict[rel_key]["csv_header"] - #if rel_key == 'domainIncludes' or rel_key == 'subClassOf': if rel_key == 'domainIncludes': # In the JSONLD the domain includes field contains the ids of attributes that the current attribute is the property/parent of. # Because of this we need to handle these values differently. @@ -399,17 +384,14 @@ def gather_jsonld_attributes_relationships(self, model_jsonld: List[dict]) -> Di p_attr_key='' # Check if the parsed value is already a part of the attr_rel_dictionary for attr_dn, rels in attr_rel_dictionary.items(): - #if parsed_val == rels["Relationships"].get('label'): if parsed_val == attr_dn: p_attr_key = attr_dn attr_in_dict = True # If it is part of the dictionary update add current attribute as a property of the parsed value if attr_in_dict == True: if not rel_csv_header in attr_rel_dictionary[p_attr_key]["Relationships"]: - #attr_rel_dictionary[p_attr_key]["Relationships"].update({rel_csv_header:[entry[label_jsonld_key]]}) attr_rel_dictionary[p_attr_key]["Relationships"].update({rel_csv_header:[entry[dn_jsonld_key]]}) else: - #attr_rel_dictionary[p_attr_key]["Relationships"][rel_csv_header].append(entry[label_jsonld_key]) attr_rel_dictionary[p_attr_key]["Relationships"].update({rel_csv_header:[entry[dn_jsonld_key]]}) # If the parsed_val is not already recorded in the dictionary, add it elif attr_in_dict == False: @@ -420,8 +402,6 @@ def gather_jsonld_attributes_relationships(self, model_jsonld: List[dict]) -> Di attr_rel_dictionary[p_attr_key]["Relationships"].update({rel_csv_header:[entry[label_jsonld_key]]}) else: - #if attr_key == 'assay_autorad_metadata_template': - #breakpoint() attr_rel_dictionary[attr_key]["Relationships"].update( {rel_csv_header: parsed_rel_entry} ) From 64b16e0978106b900d00aa967e4676f7e9c24a23 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Tue, 16 Jan 2024 15:18:29 -0800 Subject: [PATCH 235/239] fix issue with poetry.lock merge --- poetry.lock | 4 ---- 1 file changed, 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index 3de284062..7978aee1e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,8 +1,4 @@ -<<<<<<< HEAD -# This file is automatically @generated by Poetry and should not be changed by hand. -======= # This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. ->>>>>>> bddcc390ed3190d2b9a34fa0771fa4cacf023230 [[package]] name = "alabaster" From c4cb3994b3f0aac496d1251e8b2b779e6f7d3f76 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Tue, 16 Jan 2024 15:20:53 -0800 Subject: [PATCH 236/239] fix issue with poetry.lock merge --- poetry.lock | 1 - 1 file changed, 1 deletion(-) diff --git a/poetry.lock b/poetry.lock index 7978aee1e..746ce0027 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1813,7 +1813,6 @@ jupyter-server = ">=1.1.2" [[package]] name = "jupyter-server" version = "2.11.1" ->>>>>>> bddcc390ed3190d2b9a34fa0771fa4cacf023230 description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." category = "main" optional = false From c61c182e453fbfee5457690a6627fca6a519476a Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Tue, 16 Jan 2024 16:25:38 -0800 Subject: [PATCH 237/239] change from app.logger to logger --- schematic/manifest/generator.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index b08d445f1..b9fb85987 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -1587,7 +1587,7 @@ def create_manifests(path_to_data_model:str, data_types:list, access_token:Optio result = ManifestGenerator.create_single_manifest(path_to_data_model=path_to_data_model, data_type=component, graph_data_model=graph_data_model, output_format=output_format, title=t, access_token=access_token) all_results.append(result) else: - app.logger.error('Currently we do not support returning multiple files as Excel format at once. Please choose a different output format. ') + logger.error('Currently we do not support returning multiple files as Excel format at once. Please choose a different output format. ') else: for i, dt in enumerate(data_types): if not title: @@ -1608,7 +1608,7 @@ def create_manifests(path_to_data_model:str, data_types:list, access_token:Optio all_results.append(result) else: if len(data_types) > 1: - app.logger.warning(f'Currently we do not support returning multiple files as Excel format at once. Only {t} would get returned. ') + logger.warning(f'Currently we do not support returning multiple files as Excel format at once. Only {t} would get returned. ') return result return all_results From 6563f6395e1afa51c1f71e83f6ecf39ee0d1cb7e Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Tue, 16 Jan 2024 17:06:53 -0800 Subject: [PATCH 238/239] WIP: add temp models --- tests/test_api.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/tests/test_api.py b/tests/test_api.py index 71525327d..ccfb6ae7f 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -63,12 +63,14 @@ def test_manifest_json(helpers): @pytest.fixture(scope="class") def data_model_jsonld(): - data_model_jsonld ="https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld" + #data_model_jsonld ="https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld" + data_model_jsonld = "https://raw.githubusercontent.com/mialy-defelice/data_models/main/example.model.jsonld" yield data_model_jsonld @pytest.fixture(scope="class") def benchmark_data_model_jsonld(): - benchmark_data_model_jsonld = "https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.single_rule.model.jsonld" + #benchmark_data_model_jsonld = "https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.single_rule.model.jsonld" + benchmark_data_model_jsonld = "https://raw.githubusercontent.com/mialy-defelice/data_models/main/example.single_rule.model.jsonld" yield benchmark_data_model_jsonld def get_MockComponent_attribute(): @@ -76,7 +78,8 @@ def get_MockComponent_attribute(): Yield all of the mock conponent attributes one at a time TODO: pull in jsonld from fixture """ - schema_url = "https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.single_rule.model.jsonld" + #schema_url = "https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.single_rule.model.jsonld" + schema_url = "https://raw.githubusercontent.com/mialy-defelice/data_models/main/example.single_rule.model.jsonld" data_model_parser = DataModelParser(path_to_data_model = schema_url) #Parse Model parsed_data_model = data_model_parser.parse_model() @@ -559,6 +562,7 @@ def test_generate_manifest_file_based_annotations(self, client, use_annotations, # and also make sure that entityId column appears in the end assert google_sheet_df.columns.to_list()[-1] == "entityId" + assert sorted(google_sheet_df.columns.to_list()) == sorted(expected) # make sure Filename, entityId, and component get filled with correct value From 97ecc1572c3cb81f6fd58c4fb2fb8e3965e122ac Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Tue, 16 Jan 2024 17:22:59 -0800 Subject: [PATCH 239/239] fix dev merge issue, use_annotations added to create_single manifest --- schematic/manifest/generator.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index b9fb85987..049941ff2 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -1599,9 +1599,9 @@ def create_manifests(path_to_data_model:str, data_types:list, access_token:Optio t = title if dataset_ids: # if a dataset_id is provided add this to the function call. - result = ManifestGenerator.create_single_manifest(path_to_data_model=path_to_data_model, data_type=dt, graph_data_model=graph_data_model, dataset_id=dataset_ids[i], output_format=output_format, title=t, access_token=access_token) + result = ManifestGenerator.create_single_manifest(path_to_data_model=path_to_data_model, data_type=dt, graph_data_model=graph_data_model, dataset_id=dataset_ids[i], output_format=output_format, title=t, access_token=access_token, use_annotations=use_annotations) else: - result = ManifestGenerator.create_single_manifest(path_to_data_model=path_to_data_model, data_type=dt, graph_data_model=graph_data_model, output_format=output_format, title=t, access_token=access_token) + result = ManifestGenerator.create_single_manifest(path_to_data_model=path_to_data_model, data_type=dt, graph_data_model=graph_data_model, output_format=output_format, title=t, access_token=access_token, use_annotations=use_annotations) # if output is pandas dataframe or google sheet url if isinstance(result, str) or isinstance(result, pd.DataFrame):