From def18c81708ea5a819a672c336b548cad5d21334 Mon Sep 17 00:00:00 2001 From: Kyle Burton Date: Thu, 24 Aug 2023 10:58:32 -0500 Subject: [PATCH 01/47] Skeleton code for Single Table Indexd --- deployment/Secrets/indexd_settings.py | 4 +- indexd/default_settings.py | 3 +- indexd/index/drivers/single_table_alchemy.py | 134 +++++++++++++++++++ 3 files changed, 138 insertions(+), 3 deletions(-) create mode 100644 indexd/index/drivers/single_table_alchemy.py diff --git a/deployment/Secrets/indexd_settings.py b/deployment/Secrets/indexd_settings.py index e7572d4f..ca09f10a 100644 --- a/deployment/Secrets/indexd_settings.py +++ b/deployment/Secrets/indexd_settings.py @@ -4,7 +4,7 @@ from indexd.index.drivers.alchemy import SQLAlchemyIndexDriver from indexd.alias.drivers.alchemy import SQLAlchemyAliasDriver from indexd.auth.drivers.alchemy import SQLAlchemyAuthDriver - +from indexd.index.drivers.single_table_alchemy import SingleTableSQLAlchemyIndexDriver APP_NAME = "indexd" @@ -34,7 +34,7 @@ def load_json(file_name): CONFIG["DRS_SERVICE_INFO"] = json.loads(drs_service_info) CONFIG["INDEX"] = { - "driver": SQLAlchemyIndexDriver( + "driver": SingleTableSQLAlchemyIndexDriver( "postgresql+psycopg2://{usr}:{psw}@{pghost}:{pgport}/{db}".format( usr=usr, psw=psw, diff --git a/indexd/default_settings.py b/indexd/default_settings.py index f57f1054..4a68dc94 100644 --- a/indexd/default_settings.py +++ b/indexd/default_settings.py @@ -1,6 +1,7 @@ from .index.drivers.alchemy import SQLAlchemyIndexDriver from .alias.drivers.alchemy import SQLAlchemyAliasDriver from .auth.drivers.alchemy import SQLAlchemyAuthDriver +from .index.drivers.single_table_alchemy import SingleTableSQLAlchemyIndexDriver CONFIG = {} @@ -16,7 +17,7 @@ # Do NOT set both ADD_PREFIX_ALIAS and PREPEND_PREFIX to True, or aliases # will be created as "". CONFIG["INDEX"] = { - "driver": SQLAlchemyIndexDriver( + "driver": SingleTableSQLAlchemyIndexDriver( "sqlite:///index.sq3", echo=True, index_config={ diff --git a/indexd/index/drivers/single_table_alchemy.py b/indexd/index/drivers/single_table_alchemy.py new file mode 100644 index 00000000..83a7eed7 --- /dev/null +++ b/indexd/index/drivers/single_table_alchemy.py @@ -0,0 +1,134 @@ +import datetime + +from sqlalchemy import Column, String, ForeignKey, BigInteger, DateTime, ARRAY +from sqlalchemy.dialects.postgresql import JSONB +from sqlalchemy.ext.declarative import declarative_base + +from indexd.index.driver import IndexDriverABC + +Base = declarative_base() + + +class IndexRecord(Base): + """ + Base index record representation. + """ + + __tablename__ = "index_record" + + did = Column(String, primary_key=True) + + baseid = Column(String, ForeignKey("base_version.baseid"), index=True) + rev = Column(String) + form = Column(String) + size = Column(BigInteger, index=True) + created_date = Column(DateTime, default=datetime.datetime.utcnow) + updated_date = Column(DateTime, default=datetime.datetime.utcnow) + file_name = Column(String, index=True) + version = Column(String, index=True) + uploader = Column(String, index=True) + description = Column(String) + content_created_date = Column(DateTime) + content_updated_date = Column(DateTime) + hashes = Column(JSONB) + acl = Column(ARRAY(String)) + authz = Column(ARRAY(String)) + urls = Column(ARRAY(String)) + metadata = Column(JSONB) + url_metadata = Column(JSONB) + alias = Column(ARRAY(String)) + + +class SingleTableSQLAlchemyIndexDriver(IndexDriverABC): + def ids( + self, + limit=100, + start=None, + size=None, + urls=None, + acl=None, + authz=None, + hashes=None, + file_name=None, + version=None, + uploader=None, + metadata=None, + ids=None, + urls_metadata=None, + negate_params=None, + ): + pass + + def get_urls(self, size=None, hashes=None, ids=None, start=0, limit=100): + pass + + def add( + self, + form, + did=None, + size=None, + file_name=None, + metadata=None, + urls_metadata=None, + version=None, + urls=None, + acl=None, + authz=None, + hashes=None, + baseid=None, + uploader=None, + description=None, + content_created_date=None, + content_updated_date=None, + ): + pass + + def get(self, did): + pass + + def update(self, did, rev, changing_fields): + pass + + def delete(self, did, rev): + pass + + def add_version( + self, + current_did, + form, + new_did=None, + size=None, + file_name=None, + metadata=None, + urls_metadata=None, + version=None, + urls=None, + acl=None, + authz=None, + hashes=None, + description=None, + content_created_date=None, + content_updated_date=None, + ): + pass + + def get_all_versions(self, did): + pass + + def get_latest_version(self, did, has_version=None): + pass + + def health_check(self): + pass + + def __contains__(self, did): + pass + + def __iter__(self): + pass + + def totalbytes(self): + pass + + def len(self): + pass From d522abed0ce82d6180ad8e777fc140338111abfc Mon Sep 17 00:00:00 2001 From: BinamB Date: Mon, 18 Sep 2023 12:51:52 -0500 Subject: [PATCH 02/47] single table alchemy --- indexd/index/drivers/single_table_alchemy.py | 449 ++++++++++++++++++- 1 file changed, 431 insertions(+), 18 deletions(-) diff --git a/indexd/index/drivers/single_table_alchemy.py b/indexd/index/drivers/single_table_alchemy.py index 83a7eed7..47cda3a4 100644 --- a/indexd/index/drivers/single_table_alchemy.py +++ b/indexd/index/drivers/single_table_alchemy.py @@ -9,14 +9,14 @@ Base = declarative_base() -class IndexRecord(Base): +class Record(Base): """ Base index record representation. """ - __tablename__ = "index_record" + __tablename__ = "record" - did = Column(String, primary_key=True) + guid = Column(String, primary_key=True) baseid = Column(String, ForeignKey("base_version.baseid"), index=True) rev = Column(String) @@ -57,15 +57,123 @@ def ids( urls_metadata=None, negate_params=None, ): - pass + with self.session as session: + query = session.query(Record) + + if start is not None: + query = query.filter(Record.guid > start) + + if size is not None: + query = query.filter(Record.size == size) + + if file_name is not None: + query = query.filter(Record.file_name == file_name) + + if version is not None: + query = query.filter(Record.version == version) + + if uploader is not None: + query = query.filter(Record.uploader == uploader) + + if urls: + for u in urls: + query = query.filter(Record.urls.contains(u)).all() + + if acl: + for u in acl: + query = query.filter(Record.acl.contains(u).all()) + elif acl == []: + query = query.filter(Record.acl == None) + + if authz: + for u in authz: + query = query.filter(Record.authz.contains(u)).all() + elif authz == []: + query = query.filter(Record.authz == None) + + if hashes: + for h, v in hashes.items(): + query = query.filter(Record.hashes.contains({h: v})) + + if metadata: + for k, v in metadata.items(): + query = query.filter(Record.metadata.contains({k: v})) + + if urls_metadata: + for url_key, url_dict in urls_metadata.items(): + query = query.filter(Record.urls_metadata.contains(url_key)) + for k, v in url_dict.items(): + query = query.filter(Record.urls_metadata.any({k: v})) + + if negate_params: + query = self._negate_filter(session, query, **negate_params) + + if page is not None: + query = query.order_by(Record.updated_date) + else: + query = query.order_by(Record.guid) + + if ids: + DEFAULT_PREFIX = self.config.get("DEFAULT_PREFIX") + found_ids = [] + new_ids = [] + + if not DEFAULT_PREFIX: + self.logger.info("NO DEFAULT_PREFIX") + else: + subquery = query.filter(Record.guid.in_(ids)) + found_ids = [i.guid for i in subquery] + + for i in ids: + if i not in found_ids: + if not i.startswith(DEFAULT_PREFIX): + new_ids.append(DEFAULT_PREFIX + i) + else: + stripped = i.split(DEFAULT_PREFIX, 1)[1] + new_ids.append(stripped) + + query = query.filter(Record.guid.in_(found_ids + new_ids)) + else: + query = query.limit(limit) + + if page is not None: + query = query.offset(limit * page) + + return [i.to_document_dict() for i in query] def get_urls(self, size=None, hashes=None, ids=None, start=0, limit=100): - pass + """ + Returns a list of urls matching supplied size/hashes/guids. + """ + if size is None and hashes is None and ids is None: + raise UserError("Please provide size/hashes/ids to filter") + + with self.session as session: + query = session.query(Record) + + if size: + query = query.filter(Record.size == size) + if hashes: + for h, v in hashes.items(): + query = query.filter(Record.hashes.contains({h: v})) + if ids: + query = query.filter(Record.guid.in_(ids)) + # Remove duplicates. + query = query.distinct() + + # Return only specified window. + query = query.offset(start) + query = query.limit(limit) + + return [ + {"url": r.urls, "metadata": {m.key: m.value for m in r.url_metadata}} + for r in query + ] def add( self, form, - did=None, + guid=None, size=None, file_name=None, metadata=None, @@ -81,22 +189,258 @@ def add( content_created_date=None, content_updated_date=None, ): - pass + """ + Creates a new record given size, urls, acl, authz, hashes, metadata, + urls_metadata file name and version + if guid is provided, update the new record with the guid otherwise create it + """ - def get(self, did): - pass + urls = urls or [] + acl = acl or [] + authz = authz or [] + hashes = hashes or {} + metadata = metadata or {} + urls_metadata = urls_metadata or {} - def update(self, did, rev, changing_fields): - pass + with self.session as session: + record = Record() - def delete(self, did, rev): - pass + if not baseid: + baseid = str(uuid.uuid4()) + + record.baseid = baseid + record.file_name = file_name + record.version = version + + if guid: + record.guid = guid + else: + new_guid = str(uuid.uuid4()) + if self.config.get("PREPEND_PREFIX"): + new_guid = self.config["DEFAULT_PREFIX"] + new_guid + record.guid = new_guid + + record.rev = str(uuid.uuid4())[:8] + + record.form, record.size = form, size + + record.uploader = uploader + + record.urls = urls + + record.acl = acl + + record.authz = authz + + record.hashes = hashes + + record.metadata = metadata + + record.description = description + + if content_created_date is not None: + record.content_created_date = datetime.datetime.fromisoformat( + content_created_date + ) + # Users cannot set content_updated_date without a content_created_date + record.content_updated_date = ( + datetime.datetime.fromisoformat(content_updated_date) + if content_updated_date is not None + else record.content_created_date # Set updated to created if no updated is provided + ) + + try: + checked_urls_metadata = check_urls_metadata( + urls_metadata, record, session + ) + record.url_metadata = checked_urls_metadata + + if self.config.get("Add_PREFIX_ALIAS"): + self.add_prefix_alias(record, session) + session.add(record) + session.commit() + except IntegrityError: + raise MultipleRecordsFound( + 'guid "{guid}" already exists'.format(guid=record.guid) + ) + + return record.guid, record.rev, record.baseid + + def get(self, guid): + """ + Gets a record given the record id or baseid. + If the given id is a baseid, it will return the latest version + """ + with self.session as session: + query = session.query(Record) + query = query.filter( + or_(Record.guid == guid, Record.baseid == guid) + ).order_by(Record.created_date.desc()) + + record = query.first() + if record is None: + try: + record = self.get_bundle(bundle_id=guid, expand=expand) + return record + except NoRecordFound: + raise NoRecordFound("no record found") + + return record.to_document_dict() + + def get_with_nonstrict_prefix(self, guid, expand=True): + """ + Attempt to retrieve a record both with and without a prefix. + Proxies 'get' with provided id. + If not found but prefix matches default, attempt with prefix stripped. + If not found and id has no prefix, attempt with default prefix prepended. + """ + try: + record = self.get(guid, expand=expand) + except NoRecordFound as e: + DEFAULT_PREFIX = self.config.get("DEFAULT_PREFIX") + if not DEFAULT_PREFIX: + raise e + + if not guid.startswith(DEFAULT_PREFIX): + record = self.get(DEFAULT_PREFIX + guid, expand=expand) + else: + stripped = guid.split(DEFAULT_PREFIX, 1)[1] + record = self.get(stripped, expand=expand) + + return record + + def update(self, guid, rev, changing_fields): + """ + Updates an existing record with new values. + """ + authz_err_msg = "Auth error when attempting to update a record. User must have '{}' access on '{}' for service 'indexd'." + + composite_fields = [ + "urls", + "acl", + "authz", + "metadata", + "urls_metadata", + "content_created_date", + "content_updated_date", + ] + + with self.session as session: + query = session.query(Record).filter(Record.guid == guid) + + try: + record = query.one() + except NoResultFound: + raise NoRecordFound("no record found") + except MultipleResultsFound: + raise MultipleRecordsFound("multiple records found") + + if rev != record.rev: + raise RevisionMismatch("Revision mismatch") + + # Some operations are dependant on other operations. For example + # urls has to be updated before urls_metadata because of schema + # constraints. + if "urls" in changing_fields: + session.delete(record.urls) + + record.urls = Record(guid=record.guid, urls=changing_fields["urls"]) + + if "acl" in changing_fields: + session.delete(record.acl) + + record.acl = Record(guid=record.guid, acl=changing_fields["acl"]) + + all_authz = list(set(record.authz)) + if "authz" in changing_fields: + new_authz = list(set(changing_fields["authz"])) + all_authz += new_authz + + session.delete(record.authz) + + record.authz = Record(guid=record.guid, authz=new_authz) + + # authorization check: `update` access on old AND new resources + try: + auth.authorize("update", all_authz) + except AuthError: + self.logger.error(authz_err_msg.format("update", all_authz)) + raise + + if "metadata" in changing_fields: + session.delete(record.metadata) + + record.metadata = changing_fields["metadata"].items() + + if "urls_metadata" in changing_fields: + session.delete(record.url_metadata) + + checked_urls_metadata = check_urls_metadata( + changing_fields["urls_metadata"], record + ) + record.url_metadata = checked_urls_metadata + + if changing_fields.get("content_created_date") is not None: + record.content_created_date = datetime.datetime.fromisoformat( + changing_fields["content_created_date"] + ) + if changing_fields.get("content_updated_date") is not None: + if record.content_created_date is None: + raise UserError( + "Cannot set content_updated_date on record that does not have a content_created_date" + ) + if record.content_created_date > datetime.datetime.fromisoformat( + changing_fields["content_updated_date"] + ): + raise UserError( + "Cannot set content_updated_date before the content_created_date" + ) + + record.content_updated_date = datetime.datetime.fromisoformat( + changing_fields["content_updated_date"] + ) + + for key, value in changing_fields.items(): + if key not in composite_fields: + # No special logic needed for other updates. + # ie file_name, version, etc + setattr(record, key, value) + + record.rev = str(uuid.uuid4())[:8] + + record.updated_date = datetime.datetime.utcnow() + + session.add(record) + + return record.guid, record.baseid, record.rev + + def delete(self, guid, rev): + """ + Removes record if stored by backend. + """ + with self.session as session: + query = session.query(Record) + query = query.filter(Record.guid == guid) + + try: + record = query.one() + except NoResultFound: + raise NoRecordFound("no record found") + except MultipleResultsFound: + raise MultipleRecordsFound("multiple records found") + + if rev != record.rev: + raise RevisionMismatch("revision mismatch") + + auth.authorize("delete", [u.resource for u in record.authz]) + + session.delete(record) def add_version( self, - current_did, + current_guid, form, - new_did=None, + new_guid=None, size=None, file_name=None, metadata=None, @@ -109,19 +453,77 @@ def add_version( description=None, content_created_date=None, content_updated_date=None, + ): + """ + Add a record version given guid + """ + urls = urls or [] + acl = acl or [] + authz = authz or [] + hashes = hashes or {} + metadata = metadata or {} + urls_metadata = urls_metadata or {} + + with self.session as session: + query = session.query(Record).filter_by(guid=current_guid) + + try: + record = query.one() + except NoResultFound: + raise NoRecordFound("no record found") + except MultipleResultsFound: + raise MultipleRecordsFound("multiple records found") + + auth.authorize("update", [u.resource for u in record.authz] + authz) + + baseid = record.baseid + record = IndexRecord() + guid = new_guid + if not guid: + guid = str(uuid.uuid4()) + if self.config.get("PREPEND_PREFIX"): + guid = self.config["DEFAULT_PREFIX"] + guid + + record.guid = guid + record.baseid = baseid + record.rev = str(uuid.uuid4())[:8] + record.form = form + record.size = size + record.file_name = file_name + record.version = version + record.description = description + record.content_created_date = content_created_date + record.content_updated_date = content_updated_date + record.urls = urls + record.acl = acl + record.authz = authz + record.hashes = hashes + record.metadata = metadata + record.url_metadata = check_urls_metadata(urls_metadata, record) + + try: + session.add(record) + session.commit() + except IntegrityError: + raise MultipleRecordsFound("{guid} already exists".format(guid=guid)) + + return record.guid, record.baseid, record.rev + + def add_blank_version( + self, current_guid, new_guid=None, file_name=None, uploader=None, authz=None ): pass - def get_all_versions(self, did): + def get_all_versions(self, guid): pass - def get_latest_version(self, did, has_version=None): + def get_latest_version(self, guid, has_version=None): pass def health_check(self): pass - def __contains__(self, did): + def __contains__(self, guid): pass def __iter__(self): @@ -132,3 +534,14 @@ def totalbytes(self): def len(self): pass + + +def check_urls_metadata(urls_metadata, record): + """ + create url metadata record in database + """ + urls = {u.url for u in record.urls} + for url, url_metadata in urls_metadata.items(): + if url not in urls: + raise UserError("url {} in urls_metadata does not exist".format(url)) + return url_metadata From 62561e0d8644c14ee6a10ee1069206c43fe48a3a Mon Sep 17 00:00:00 2001 From: Kyle Burton Date: Wed, 20 Sep 2023 17:43:21 -0500 Subject: [PATCH 03/47] Parameterized app fixture to run single table for all existing tests Implemented migrate_db for single_table_alchemy Implemented session for single_table_alchemy --- .secrets.baseline | 4 +- indexd/default_settings.py | 2 +- indexd/index/drivers/single_table_alchemy.py | 54 ++++++++++++++++++-- indexd/single_table_settings.py | 21 ++++++++ tests/conftest.py | 25 ++++++++- tests/default_test_settings.py | 1 - 6 files changed, 97 insertions(+), 10 deletions(-) create mode 100644 indexd/single_table_settings.py diff --git a/.secrets.baseline b/.secrets.baseline index d2fc203d..9f1caad5 100644 --- a/.secrets.baseline +++ b/.secrets.baseline @@ -239,7 +239,7 @@ "filename": "tests/default_test_settings.py", "hashed_secret": "afc848c316af1a89d49826c5ae9d00ed769415f3", "is_verified": false, - "line_number": 40 + "line_number": 39 } ], "tests/postgres/migrations/test_15f2e9345ade_create_tables.py": [ @@ -413,5 +413,5 @@ } ] }, - "generated_at": "2023-04-20T22:58:41Z" + "generated_at": "2023-09-20T22:43:18Z" } diff --git a/indexd/default_settings.py b/indexd/default_settings.py index 4a68dc94..a0fe3156 100644 --- a/indexd/default_settings.py +++ b/indexd/default_settings.py @@ -17,7 +17,7 @@ # Do NOT set both ADD_PREFIX_ALIAS and PREPEND_PREFIX to True, or aliases # will be created as "". CONFIG["INDEX"] = { - "driver": SingleTableSQLAlchemyIndexDriver( + "driver": SQLAlchemyIndexDriver( "sqlite:///index.sq3", echo=True, index_config={ diff --git a/indexd/index/drivers/single_table_alchemy.py b/indexd/index/drivers/single_table_alchemy.py index 47cda3a4..05dfc1d7 100644 --- a/indexd/index/drivers/single_table_alchemy.py +++ b/indexd/index/drivers/single_table_alchemy.py @@ -1,10 +1,16 @@ import datetime +import uuid +from cdislogging import get_logger from sqlalchemy import Column, String, ForeignKey, BigInteger, DateTime, ARRAY from sqlalchemy.dialects.postgresql import JSONB from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import sessionmaker +from contextlib import contextmanager from indexd.index.driver import IndexDriverABC +from indexd.index.drivers.alchemy import IndexSchemaVersion +from indexd.utils import migrate_database Base = declarative_base() @@ -34,12 +40,50 @@ class Record(Base): acl = Column(ARRAY(String)) authz = Column(ARRAY(String)) urls = Column(ARRAY(String)) - metadata = Column(JSONB) + record_metadata = Column(JSONB) url_metadata = Column(JSONB) alias = Column(ARRAY(String)) class SingleTableSQLAlchemyIndexDriver(IndexDriverABC): + def __init__(self, conn, logger=None, index_config=None, **config): + super().__init__(conn, **config) + self.logger = logger or get_logger("SQLAlchemyIndexDriver") + self.config = index_config or {} + Base.metadata.bind = self.engine + self.Session = sessionmaker(bind=self.engine) + + def migrate_index_database(self): + """ + This migration logic is DEPRECATED. It is still supported for backwards compatibility, + but any new migration should be added using Alembic. + + migrate index database to match CURRENT_SCHEMA_VERSION + """ + migrate_database( + driver=self, + migrate_functions=SCHEMA_MIGRATION_FUNCTIONS, + current_schema_version=CURRENT_SCHEMA_VERSION, + model=IndexSchemaVersion, + ) + + @property + @contextmanager + def session(self): + """ + Provide a transactional scope around a series of operations. + """ + session = self.Session() + + try: + yield session + session.commit() + except Exception: + session.rollback() + raise + finally: + session.close() + def ids( self, limit=100, @@ -250,9 +294,7 @@ def add( ) try: - checked_urls_metadata = check_urls_metadata( - urls_metadata, record, session - ) + checked_urls_metadata = check_urls_metadata(urls_metadata, record) record.url_metadata = checked_urls_metadata if self.config.get("Add_PREFIX_ALIAS"): @@ -545,3 +587,7 @@ def check_urls_metadata(urls_metadata, record): if url not in urls: raise UserError("url {} in urls_metadata does not exist".format(url)) return url_metadata + + +SCHEMA_MIGRATION_FUNCTIONS = [] +CURRENT_SCHEMA_VERSION = len(SCHEMA_MIGRATION_FUNCTIONS) diff --git a/indexd/single_table_settings.py b/indexd/single_table_settings.py new file mode 100644 index 00000000..501ac1ab --- /dev/null +++ b/indexd/single_table_settings.py @@ -0,0 +1,21 @@ +from indexd import default_settings +from indexd.index.drivers.single_table_alchemy import SingleTableSQLAlchemyIndexDriver + +# - DEFAULT_PREFIX: prefix to be prepended. +# - PREPEND_PREFIX: the prefix is preprended to the generated GUID when a +# new record is created WITHOUT a provided GUID. +# - ADD_PREFIX_ALIAS: aliases are created for new records - "". +# Do NOT set both ADD_PREFIX_ALIAS and PREPEND_PREFIX to True, or aliases +# will be created as "". +default_settings.settings["config"]["INDEX"] = { + "driver": SingleTableSQLAlchemyIndexDriver( + "sqlite:///index.sq3", + echo=True, + index_config={ + "DEFAULT_PREFIX": "testprefix:", + "PREPEND_PREFIX": True, + "ADD_PREFIX_ALIAS": False, + }, + ) +} +settings = default_settings.settings diff --git a/tests/conftest.py b/tests/conftest.py index afe08d99..e35de950 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -15,8 +15,21 @@ from tests import default_test_settings -@pytest.fixture(scope="function", autouse=True) -def app(): +def single_table_settings(): + from indexd import default_settings + from indexd import single_table_settings + + importlib.reload(single_table_settings) + default_settings.settings = { + **default_settings.settings, + **default_test_settings.settings, + **single_table_settings.settings, + } + print(f"=========SINGLE TABLE==================") + print(f"{default_settings.settings}") + + +def default_settings(): from indexd import default_settings importlib.reload(default_settings) @@ -24,7 +37,15 @@ def app(): **default_settings.settings, **default_test_settings.settings, } + print(f"===========================") + print(f"{default_settings.settings}") + +@pytest.fixture( + scope="function", autouse=True, params=[default_settings, single_table_settings] +) +def app(request): + request.param() yield get_app() try: diff --git a/tests/default_test_settings.py b/tests/default_test_settings.py index 8dee48ea..7c78acd2 100644 --- a/tests/default_test_settings.py +++ b/tests/default_test_settings.py @@ -1,7 +1,6 @@ import os from indexd.default_settings import * -from indexd.index.drivers.alchemy import SQLAlchemyIndexDriver CONFIG["DIST"] = [ { From 67247e62ef5a35429fb5800b1fd59afa8bf50692 Mon Sep 17 00:00:00 2001 From: BinamB Date: Thu, 28 Sep 2023 11:04:46 -0500 Subject: [PATCH 04/47] add to alchemy --- indexd/index/drivers/single_table_alchemy.py | 168 ++++++++++++++++++- 1 file changed, 160 insertions(+), 8 deletions(-) diff --git a/indexd/index/drivers/single_table_alchemy.py b/indexd/index/drivers/single_table_alchemy.py index 05dfc1d7..9e9064db 100644 --- a/indexd/index/drivers/single_table_alchemy.py +++ b/indexd/index/drivers/single_table_alchemy.py @@ -554,28 +554,180 @@ def add_version( def add_blank_version( self, current_guid, new_guid=None, file_name=None, uploader=None, authz=None ): - pass + """ + Add a blank record version given did. + If authz is not specified, acl/authz fields carry over from previous version. + """ + # if an authz is provided, ensure that user can actually create for that resource + authz_err_msg = "Auth error when attempting to update a record. User must have '{}' access on '{}' for service 'indexd'." + if authz: + try: + auth.authorize("create", authz) + except AuthError as err: + self.logger.error(authz_err_msg.format("create", authz)) + raise + + with self.session as session: + query = session.query(Record).filter_by(guid=current_guid) + + try: + old_record = query.one() + except NoResultFound: + raise NoRecordFound("no record found") + except MultipleResultsFound: + raise MultipleRecordsFound("multiple records found") + + old_authz = old_record.authz + try: + auth.authorize("update", old_authz) + except AuthError as err: + self.logger.error(authz_err_msg.format("update", old_authz)) + raise + + # handle the edgecase where new_guid matches the original doc's guid to + # prevent sqlalchemy FlushError + if new_guid == old_record.guid: + raise MultipleRecordsFound( + "{guid} already exists".format(guid=new_guid) + ) + + new_record = Record() + guid = new_guid + if not guid: + guid = str(uuid.uuid()) + if self.config.get("PEPREND_PREFIX"): + guid = self.config["DEFAULT_PREFIX"] + guid + + new_record.guid = guid + new_record.baseid = old_record.baseid + new_record.rev = str(uuid.uuid4()) + new_record.file_name = old_record.file_name + new_record.uploader = old_record.uploader + + new_record.acl = [] + if not authz: + authz = old_authz + old_acl = old_record.acl + new_record.acl = old_acl + new_record.authz = authz + + try: + session.add(new_record) + session.commit() + except IntegrityError: + raise MultipleRecordsFound("{guid} already exists".format(guid=guid)) + + return new_record.guid, new_record.baseid, new_record.rev def get_all_versions(self, guid): - pass + """ + Get all record versions (in order of creation) given DID + """ + ret = dict() + with self.session as session: + query = session.query(Record) + query = query.filter(Record.guid == guid) + + try: + record = query.one() + baseid = record.baseid + except NoResultFound: + record = session.query(Record).filter_by(base_id=did).first() + if not record: + raise NoRecordFound("no record found") + else: + baseid = record.baseid + except MultipleResultsFound: + raise MultipleRecordsFound("multiple records found") + + query = session.query(Record) + records = ( + query.filter(Record.baseid == baseid) + .order_by(Record.created_date.asc()) + .all() + ) + + for idx, record in enumerate(records): + ret[idx] = record.to_document_dict() + + return ret def get_latest_version(self, guid, has_version=None): - pass + """ + Get the lattest record version given did + """ + with self.session as session: + query = session.query(Record) + query = query.filter(Record.guid == guid) + + try: + record = query.one() + baseid = record.baseid + except NoResultFound: + baseid = guid + except MultipleResultsFound: + raise MultipleRecordsFound("multiple records found") + + query = session.query(Record) + query = query.filter(Record.baseid == baseid).order_by( + Record.created_date.desc() + ) + + if has_version: + query = query.filter(Record.version.isnot(None)) + record = query.first() + if not record: + raise NoRecordFound("no record found") + + return record.to_document_dict() def health_check(self): - pass + """ + Does a health check of the backend. + """ + with self.session as session: + try: + query = session.execute("SELECT 1") # pylint: disable=unused-variable + except Exception: + raise UnhealthyCheck() + + return True def __contains__(self, guid): - pass + """ + Returns True if record is stored by backend. + Returns False otherwise. + """ + with self.session as session: + query = session.query(Record) + query = query.filter(Record.guid == record) + + return query.exists() def __iter__(self): - pass + """ + Iterator over unique records stored by backend. + """ + with self.session as session: + for i in session.query(Record): + yield i.did def totalbytes(self): - pass + """ + Total number of bytes of data represented in the index. + """ + with self.session as session: + result = session.execute(select([func.sum(Record.size)])).scalar() + if result is None: + return 0 + return int(result) def len(self): - pass + """ + Number of unique records stored by backend. + """ + with self.session as session: + return session.execute(select([func.count()]).select_from(Record)).scalar() def check_urls_metadata(urls_metadata, record): From d136b6ce8a406a667d7511405a9b4600bc101153 Mon Sep 17 00:00:00 2001 From: BinamB Date: Fri, 13 Oct 2023 10:06:55 -0500 Subject: [PATCH 05/47] fixing things --- indexd/index/drivers/single_table_alchemy.py | 11 +++-- .../76a334ef8472_create_single_table.py | 47 +++++++++++++++++++ .../test_76a334ef8472_create_single_table.py | 5 ++ 3 files changed, 59 insertions(+), 4 deletions(-) create mode 100644 migrations/versions/76a334ef8472_create_single_table.py create mode 100644 tests/postgres/migrations/test_76a334ef8472_create_single_table.py diff --git a/indexd/index/drivers/single_table_alchemy.py b/indexd/index/drivers/single_table_alchemy.py index 9e9064db..b55bdfe6 100644 --- a/indexd/index/drivers/single_table_alchemy.py +++ b/indexd/index/drivers/single_table_alchemy.py @@ -4,6 +4,7 @@ from cdislogging import get_logger from sqlalchemy import Column, String, ForeignKey, BigInteger, DateTime, ARRAY from sqlalchemy.dialects.postgresql import JSONB +from sqlalchemy.exc import IntegrityError, ProgrammingError from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker from contextlib import contextmanager @@ -20,11 +21,11 @@ class Record(Base): Base index record representation. """ - __tablename__ = "record" + __tablename__ = "Record" guid = Column(String, primary_key=True) - baseid = Column(String, ForeignKey("base_version.baseid"), index=True) + baseid = Column(String, index=True) rev = Column(String) form = Column(String) size = Column(BigInteger, index=True) @@ -734,11 +735,13 @@ def check_urls_metadata(urls_metadata, record): """ create url metadata record in database """ - urls = {u.url for u in record.urls} + print("--------------------------------") + urls = {u for u in record.urls} + print(urls) for url, url_metadata in urls_metadata.items(): if url not in urls: raise UserError("url {} in urls_metadata does not exist".format(url)) - return url_metadata + return urls_metadata SCHEMA_MIGRATION_FUNCTIONS = [] diff --git a/migrations/versions/76a334ef8472_create_single_table.py b/migrations/versions/76a334ef8472_create_single_table.py new file mode 100644 index 00000000..132263ea --- /dev/null +++ b/migrations/versions/76a334ef8472_create_single_table.py @@ -0,0 +1,47 @@ +"""create_single_table + + +Revision ID: 76a334ef8472 +Revises: a72f117515c5 +Create Date: 2023-10-11 16:01:13.463855 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "76a334ef8472" # pragma: allowlist secret +down_revision = "a72f117515c5" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.create_table( + "Record", + sa.Column("guid", String, primary_key=True), + sa.Column("baseid", String, index=True), + sa.Column("rev", String), + sa.Column("form", String), + sa.Column("size", BigInteger, index=True), + sa.Column("created_date", DateTime, default=datetime.datetime.utcnow), + sa.Column("updated_date", DateTime, default=datetime.datetime.utcnow), + sa.Column("file_name", String, index=True), + sa.Column("version", String, index=True), + sa.Column("uploader", String, index=True), + sa.Column("description", String), + sa.Column("content_created_date", DateTime), + sa.Column("content_updated_date", DateTime), + sa.Column("hashes", JSONB), + sa.Column("acl", ARRAY(String)), + sa.Column("authz", ARRAY(String)), + sa.Column("urls", ARRAY(String)), + sa.Column("record_metadata", JSONB), + sa.Column("url_metadata", JSONB), + sa.Column("alias", ARRAY(String)), + ) + + +def downgrade() -> None: + op.drop_table("Record") diff --git a/tests/postgres/migrations/test_76a334ef8472_create_single_table.py b/tests/postgres/migrations/test_76a334ef8472_create_single_table.py new file mode 100644 index 00000000..43bd3e7a --- /dev/null +++ b/tests/postgres/migrations/test_76a334ef8472_create_single_table.py @@ -0,0 +1,5 @@ +from alembic.config import main as alembic_main + + +def test_upgrade(postgres_driver): + conn = postgres_driver.engine.connect() From dfbca20c679f92f2174b0e5dc55f630875e55d03 Mon Sep 17 00:00:00 2001 From: BinamB Date: Tue, 28 Nov 2023 10:19:33 -0600 Subject: [PATCH 06/47] pushing some stuff --- .secrets.baseline | 24 +- indexd/default_settings.py | 14 +- indexd/index/blueprint.py | 67 +- indexd/index/drivers/single_table_alchemy.py | 143 +++-- indexd/single_table_settings.py | 2 +- .../76a334ef8472_create_single_table.py | 47 -- .../bb3d7586a096_createsingletable.py | 47 ++ tests/conftest.py | 131 +++- .../test_76a334ef8472_create_single_table.py | 5 - .../test_bb3d7586a096_createsingletable.py | 52 ++ .../test_legacy_schema_migration.py | 28 +- tests/test_blueprint.py | 14 +- tests/test_client.py | 297 ++++++--- tests/test_driver_alchemy_auth.py | 68 +- tests/test_driver_alchemy_crud.py | 588 +++++++++--------- 15 files changed, 946 insertions(+), 581 deletions(-) delete mode 100644 migrations/versions/76a334ef8472_create_single_table.py create mode 100644 migrations/versions/bb3d7586a096_createsingletable.py delete mode 100644 tests/postgres/migrations/test_76a334ef8472_create_single_table.py create mode 100644 tests/postgres/migrations/test_bb3d7586a096_createsingletable.py diff --git a/.secrets.baseline b/.secrets.baseline index 9f1caad5..b0bc170f 100644 --- a/.secrets.baseline +++ b/.secrets.baseline @@ -266,7 +266,7 @@ "filename": "tests/postgres/migrations/test_legacy_schema_migration.py", "hashed_secret": "5666c088b494f26cd8f63ace013992f5fc391ce0", "is_verified": false, - "line_number": 88 + "line_number": 91 } ], "tests/test_aliases_endpoints.py": [ @@ -328,70 +328,70 @@ "filename": "tests/test_client.py", "hashed_secret": "15a6d8daad1278efcaadc0d6e3d1dd2d9ebbc262", "is_verified": false, - "line_number": 1084 + "line_number": 1127 }, { "type": "Hex High Entropy String", "filename": "tests/test_client.py", "hashed_secret": "1b0d1a618b5c213dd792bbc3aa96ffa6bc370ef3", "is_verified": false, - "line_number": 1300 + "line_number": 1351 }, { "type": "Hex High Entropy String", "filename": "tests/test_client.py", "hashed_secret": "1170ace44158ff189902ff44597efef121623353", "is_verified": false, - "line_number": 1731 + "line_number": 1796 }, { "type": "Hex High Entropy String", "filename": "tests/test_client.py", "hashed_secret": "ff9c79b737b3ea7386618cc9437d3fb0a772182b", "is_verified": false, - "line_number": 2406 + "line_number": 2509 }, { "type": "Hex High Entropy String", "filename": "tests/test_client.py", "hashed_secret": "c8176f1e75e62e15dabaa4087fb7194451c8f6d2", "is_verified": false, - "line_number": 2409 + "line_number": 2512 }, { "type": "Hex High Entropy String", "filename": "tests/test_client.py", "hashed_secret": "d5198f8eddb1cbeb437899cd99e5ee97ab8531b4", "is_verified": false, - "line_number": 2409 + "line_number": 2512 }, { "type": "Hex High Entropy String", "filename": "tests/test_client.py", "hashed_secret": "02dc196562514eaa3e2feac1f441ccf6ad81e09d", "is_verified": false, - "line_number": 2413 + "line_number": 2516 }, { "type": "Hex High Entropy String", "filename": "tests/test_client.py", "hashed_secret": "f1cb2d91a95165a2ab909eadd9f7b65f312c7e2d", "is_verified": false, - "line_number": 2414 + "line_number": 2517 }, { "type": "Hex High Entropy String", "filename": "tests/test_client.py", "hashed_secret": "58db546de03270b55a4c889a5c5e6296b29fef25", "is_verified": false, - "line_number": 2415 + "line_number": 2518 }, { "type": "Hex High Entropy String", "filename": "tests/test_client.py", "hashed_secret": "b6c0bd08fde409c18760f32bef8705191840c402", "is_verified": false, - "line_number": 2416 + "line_number": 2519 } ], "tests/test_deprecated_aliases_endpoints.py": [ @@ -413,5 +413,5 @@ } ] }, - "generated_at": "2023-09-20T22:43:18Z" + "generated_at": "2023-11-28T16:14:34Z" } diff --git a/indexd/default_settings.py b/indexd/default_settings.py index a0fe3156..578d5ff4 100644 --- a/indexd/default_settings.py +++ b/indexd/default_settings.py @@ -1,7 +1,6 @@ from .index.drivers.alchemy import SQLAlchemyIndexDriver from .alias.drivers.alchemy import SQLAlchemyAliasDriver from .auth.drivers.alchemy import SQLAlchemyAuthDriver -from .index.drivers.single_table_alchemy import SingleTableSQLAlchemyIndexDriver CONFIG = {} @@ -18,7 +17,7 @@ # will be created as "". CONFIG["INDEX"] = { "driver": SQLAlchemyIndexDriver( - "sqlite:///index.sq3", + "postgres://postgres:postgres@localhost:5432/indexd_tests", # pragma: allowlist secret echo=True, index_config={ "DEFAULT_PREFIX": "testprefix:", @@ -28,7 +27,12 @@ ) } -CONFIG["ALIAS"] = {"driver": SQLAlchemyAliasDriver("sqlite:///alias.sq3", echo=True)} +CONFIG["ALIAS"] = { + "driver": SQLAlchemyAliasDriver( + "postgres://postgres:postgres@localhost:5432/indexd_tests", # pragma: allowlist secret + echo=True, # pragma: allowlist secret + ) +} CONFIG["DIST"] = [ @@ -62,6 +66,8 @@ }, } -AUTH = SQLAlchemyAuthDriver("sqlite:///auth.sq3") +AUTH = SQLAlchemyAuthDriver( + "postgres://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret +) # pragma: allowlist secret settings = {"config": CONFIG, "auth": AUTH} diff --git a/indexd/index/blueprint.py b/indexd/index/blueprint.py index c7fb3c32..ee91f7c7 100644 --- a/indexd/index/blueprint.py +++ b/indexd/index/blueprint.py @@ -104,10 +104,8 @@ def get_index(form=None): validate_hashes(**hashes) hashes = hashes if hashes else None - metadata = flask.request.args.getlist("metadata") metadata = {k: v for k, v in (x.split(":", 1) for x in metadata)} - acl = flask.request.args.get("acl") if acl is not None: acl = [] if acl == "null" else acl.split(",") @@ -154,23 +152,27 @@ def get_index(form=None): negate_params=negate_params, ) else: - records = blueprint.index_driver.ids( - start=start, - limit=limit, - page=page, - size=size, - file_name=file_name, - version=version, - urls=urls, - acl=acl, - authz=authz, - hashes=hashes, - uploader=uploader, - ids=ids, - metadata=metadata, - urls_metadata=urls_metadata, - negate_params=negate_params, - ) + try: + records = blueprint.index_driver.ids( + start=start, + limit=limit, + page=page, + size=size, + file_name=file_name, + version=version, + urls=urls, + acl=acl, + authz=authz, + hashes=hashes, + uploader=uploader, + ids=ids, + metadata=metadata, + urls_metadata=urls_metadata, + negate_params=negate_params, + ) + except Exception as e: + print("--------id err--------------------------------") + print(e) base = { "ids": ids, @@ -186,9 +188,16 @@ def get_index(form=None): "authz": authz, "hashes": hashes, "metadata": metadata, + "urls_metadata": urls_metadata, } - - return flask.jsonify(base), 200 + try: + return flask.jsonify(base), 200 + except Exception as e: + print("------------jsonmiguous- ") + print(records) + for key, value in base.items(): + print(key, value, type(value)) + print(e) @blueprint.route("/urls/", methods=["GET"]) @@ -376,9 +385,13 @@ def get_index_record(record): Returns a record. """ - ret = blueprint.index_driver.get_with_nonstrict_prefix(record) + try: + ret = blueprint.index_driver.get_with_nonstrict_prefix(record) - return flask.jsonify(ret), 200 + return flask.jsonify(ret), 200 + except Exception as e: + print("-------------GET record----------------") + print(e) @blueprint.route("/index/", methods=["POST"]) @@ -537,8 +550,12 @@ def put_index_record(record): ) # authorize done in update - did, baseid, rev = blueprint.index_driver.update(record, rev, json) - + print("-----------pre driver-----------") + try: + did, baseid, rev = blueprint.index_driver.update(record, rev, json) + except Exception as e: + print(e) + print("-----------post driver-----------") ret = {"did": did, "baseid": baseid, "rev": rev} return flask.jsonify(ret), 200 diff --git a/indexd/index/drivers/single_table_alchemy.py b/indexd/index/drivers/single_table_alchemy.py index b55bdfe6..67a3db3f 100644 --- a/indexd/index/drivers/single_table_alchemy.py +++ b/indexd/index/drivers/single_table_alchemy.py @@ -9,6 +9,8 @@ from sqlalchemy.orm import sessionmaker from contextlib import contextmanager +from indexd import auth +from indexd.errors import UserError, AuthError from indexd.index.driver import IndexDriverABC from indexd.index.drivers.alchemy import IndexSchemaVersion from indexd.utils import migrate_database @@ -21,7 +23,7 @@ class Record(Base): Base index record representation. """ - __tablename__ = "Record" + __tablename__ = "record" guid = Column(String, primary_key=True) @@ -45,6 +47,60 @@ class Record(Base): url_metadata = Column(JSONB) alias = Column(ARRAY(String)) + def to_document_dict(self): + """ + Get the full index document + """ + try: + # TODO: some of these fields may not need to be a variable and could directly go to the return object -Binam + urls = self.urls + acl = self.acl + authz = self.authz + hashes = self.hashes + record_metadata = self.record_metadata + url_metadata = self.url_metadata + created_date = self.created_date.isoformat() + updated_date = self.updated_date.isoformat() + content_created_date = ( + self.content_created_date.isoformat() + if self.content_created_date is not None + else None + ) + content_updated_date = ( + self.content_updated_date.isoformat() + if self.content_updated_date is not None + else None + ) + + print("----------------------def to doc------------------------") + print(url_metadata) + print(self.guid) + + return { + "did": self.guid, + "baseid": self.baseid, + "rev": self.rev, + "size": self.size, + "file_name": self.file_name, + "version": self.version, + "uploader": self.uploader, + "urls": urls, + "url_metadata": url_metadata, + "acl": acl, + "authz": authz, + "hashes": hashes, + "metadata": record_metadata, + "form": self.form, + "created_date": created_date, + "updated_date": updated_date, + "description": self.description, + "content_created_date": content_created_date, + "content_updated_date": content_updated_date, + } + except Exception as e: + print("---------to doc dict---------") + print(e) + class SingleTableSQLAlchemyIndexDriver(IndexDriverABC): def __init__(self, conn, logger=None, index_config=None, **config): @@ -101,6 +157,7 @@ def ids( ids=None, urls_metadata=None, negate_params=None, + page=None, ): with self.session as session: query = session.query(Record) @@ -122,33 +179,44 @@ def ids( if urls: for u in urls: - query = query.filter(Record.urls.contains(u)).all() + query = query.filter(Record.urls == u).all() if acl: for u in acl: - query = query.filter(Record.acl.contains(u).all()) + query = query.filter(Record.acl == u).all() elif acl == []: query = query.filter(Record.acl == None) if authz: for u in authz: - query = query.filter(Record.authz.contains(u)).all() + query = query.filter(Record.authz == u).all() elif authz == []: query = query.filter(Record.authz == None) if hashes: for h, v in hashes.items(): - query = query.filter(Record.hashes.contains({h: v})) + query = query.filter(Record.hashes == {h: v}) if metadata: for k, v in metadata.items(): - query = query.filter(Record.metadata.contains({k: v})) + query = query.filter(Record.record_metadata[k].astext == v) if urls_metadata: + print("============if urlsmetadata============") + print(urls_metadata) for url_key, url_dict in urls_metadata.items(): - query = query.filter(Record.urls_metadata.contains(url_key)) + query = query.filter(Record.url_metadata.op("?")(url_key)) for k, v in url_dict.items(): - query = query.filter(Record.urls_metadata.any({k: v})) + print("----------kv ----------") + print(k, v) + # query = query.filter( + # Record.url_metadata.any( + # Record.url_metadata.op('->>')(k) == v + # ) + # ) + query = query.filter( + Record.url_metadata.op("->>")(k).astext == v + ) if negate_params: query = self._negate_filter(session, query, **negate_params) @@ -184,6 +252,9 @@ def ids( if page is not None: query = query.offset(limit * page) + print("---------ids----------") + print(query.statement) + return [i.to_document_dict() for i in query] def get_urls(self, size=None, hashes=None, ids=None, start=0, limit=100): @@ -236,7 +307,7 @@ def add( ): """ Creates a new record given size, urls, acl, authz, hashes, metadata, - urls_metadata file name and version + url_metadata file name and version if guid is provided, update the new record with the guid otherwise create it """ @@ -245,7 +316,7 @@ def add( authz = authz or [] hashes = hashes or {} metadata = metadata or {} - urls_metadata = urls_metadata or {} + url_metadata = urls_metadata or {} with self.session as session: record = Record() @@ -279,7 +350,7 @@ def add( record.hashes = hashes - record.metadata = metadata + record.record_metadata = metadata record.description = description @@ -295,8 +366,8 @@ def add( ) try: - checked_urls_metadata = check_urls_metadata(urls_metadata, record) - record.url_metadata = checked_urls_metadata + checked_url_metadata = check_url_metadata(url_metadata, record) + record.url_metadata = checked_url_metadata if self.config.get("Add_PREFIX_ALIAS"): self.add_prefix_alias(record, session) @@ -362,8 +433,8 @@ def update(self, guid, rev, changing_fields): "urls", "acl", "authz", - "metadata", - "urls_metadata", + "record_metadata", + "url_metadata", "content_created_date", "content_updated_date", ] @@ -374,7 +445,7 @@ def update(self, guid, rev, changing_fields): try: record = query.one() except NoResultFound: - raise NoRecordFound("no record found") + raise NoRecordFound("no Record found") except MultipleResultsFound: raise MultipleRecordsFound("multiple records found") @@ -382,7 +453,7 @@ def update(self, guid, rev, changing_fields): raise RevisionMismatch("Revision mismatch") # Some operations are dependant on other operations. For example - # urls has to be updated before urls_metadata because of schema + # urls has to be updated before url_metadata because of schema # constraints. if "urls" in changing_fields: session.delete(record.urls) @@ -411,17 +482,17 @@ def update(self, guid, rev, changing_fields): raise if "metadata" in changing_fields: - session.delete(record.metadata) + session.delete(record.record_metadata) - record.metadata = changing_fields["metadata"].items() + record.record_metadata = changing_fields["record_metadata"].items() - if "urls_metadata" in changing_fields: + if "url_metadata" in changing_fields: session.delete(record.url_metadata) - checked_urls_metadata = check_urls_metadata( - changing_fields["urls_metadata"], record + checked_url_metadata = check_url_metadata( + changing_fields["url_metadata"], record ) - record.url_metadata = checked_urls_metadata + record.url_metadata = checked_url_metadata if changing_fields.get("content_created_date") is not None: record.content_created_date = datetime.datetime.fromisoformat( @@ -430,7 +501,7 @@ def update(self, guid, rev, changing_fields): if changing_fields.get("content_updated_date") is not None: if record.content_created_date is None: raise UserError( - "Cannot set content_updated_date on record that does not have a content_created_date" + "Cannot set content_updated_date on Record that does not have a content_created_date" ) if record.content_created_date > datetime.datetime.fromisoformat( changing_fields["content_updated_date"] @@ -487,7 +558,7 @@ def add_version( size=None, file_name=None, metadata=None, - urls_metadata=None, + url_metadata=None, version=None, urls=None, acl=None, @@ -505,7 +576,7 @@ def add_version( authz = authz or [] hashes = hashes or {} metadata = metadata or {} - urls_metadata = urls_metadata or {} + url_metadata = url_metadata or {} with self.session as session: query = session.query(Record).filter_by(guid=current_guid) @@ -520,7 +591,7 @@ def add_version( auth.authorize("update", [u.resource for u in record.authz] + authz) baseid = record.baseid - record = IndexRecord() + record = Record() guid = new_guid if not guid: guid = str(uuid.uuid4()) @@ -541,8 +612,8 @@ def add_version( record.acl = acl record.authz = authz record.hashes = hashes - record.metadata = metadata - record.url_metadata = check_urls_metadata(urls_metadata, record) + record.record_metadata = metadata + record.url_metadata = check_url_metadata(url_metadata, record) try: session.add(record) @@ -675,7 +746,7 @@ def get_latest_version(self, guid, has_version=None): ) if has_version: - query = query.filter(Record.version.isnot(None)) + query = query.filter(record.version.isnot(None)) record = query.first() if not record: raise NoRecordFound("no record found") @@ -694,7 +765,7 @@ def health_check(self): return True - def __contains__(self, guid): + def __contains__(self, record): """ Returns True if record is stored by backend. Returns False otherwise. @@ -731,17 +802,15 @@ def len(self): return session.execute(select([func.count()]).select_from(Record)).scalar() -def check_urls_metadata(urls_metadata, record): +def check_url_metadata(url_metadata, record): """ create url metadata record in database """ - print("--------------------------------") urls = {u for u in record.urls} - print(urls) - for url, url_metadata in urls_metadata.items(): + for url, metadata in url_metadata.items(): if url not in urls: - raise UserError("url {} in urls_metadata does not exist".format(url)) - return urls_metadata + raise UserError("url {} in url_metadata does not exist".format(url)) + return url_metadata SCHEMA_MIGRATION_FUNCTIONS = [] diff --git a/indexd/single_table_settings.py b/indexd/single_table_settings.py index 501ac1ab..461d9f99 100644 --- a/indexd/single_table_settings.py +++ b/indexd/single_table_settings.py @@ -9,7 +9,7 @@ # will be created as "". default_settings.settings["config"]["INDEX"] = { "driver": SingleTableSQLAlchemyIndexDriver( - "sqlite:///index.sq3", + "postgres://postgres:postgres@localhost:5432/indexd_tests", # pragma: allowlist secret echo=True, index_config={ "DEFAULT_PREFIX": "testprefix:", diff --git a/migrations/versions/76a334ef8472_create_single_table.py b/migrations/versions/76a334ef8472_create_single_table.py deleted file mode 100644 index 132263ea..00000000 --- a/migrations/versions/76a334ef8472_create_single_table.py +++ /dev/null @@ -1,47 +0,0 @@ -"""create_single_table - - -Revision ID: 76a334ef8472 -Revises: a72f117515c5 -Create Date: 2023-10-11 16:01:13.463855 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = "76a334ef8472" # pragma: allowlist secret -down_revision = "a72f117515c5" -branch_labels = None -depends_on = None - - -def upgrade() -> None: - op.create_table( - "Record", - sa.Column("guid", String, primary_key=True), - sa.Column("baseid", String, index=True), - sa.Column("rev", String), - sa.Column("form", String), - sa.Column("size", BigInteger, index=True), - sa.Column("created_date", DateTime, default=datetime.datetime.utcnow), - sa.Column("updated_date", DateTime, default=datetime.datetime.utcnow), - sa.Column("file_name", String, index=True), - sa.Column("version", String, index=True), - sa.Column("uploader", String, index=True), - sa.Column("description", String), - sa.Column("content_created_date", DateTime), - sa.Column("content_updated_date", DateTime), - sa.Column("hashes", JSONB), - sa.Column("acl", ARRAY(String)), - sa.Column("authz", ARRAY(String)), - sa.Column("urls", ARRAY(String)), - sa.Column("record_metadata", JSONB), - sa.Column("url_metadata", JSONB), - sa.Column("alias", ARRAY(String)), - ) - - -def downgrade() -> None: - op.drop_table("Record") diff --git a/migrations/versions/bb3d7586a096_createsingletable.py b/migrations/versions/bb3d7586a096_createsingletable.py new file mode 100644 index 00000000..d4f51f87 --- /dev/null +++ b/migrations/versions/bb3d7586a096_createsingletable.py @@ -0,0 +1,47 @@ +"""CreateSingleTable + +Revision ID: bb3d7586a096 +Revises: a72f117515c5 +Create Date: 2023-10-24 14:46:03.868952 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects.postgresql import JSONB + + +# revision identifiers, used by Alembic. +revision = "bb3d7586a096" # pragma: allowlist secret +down_revision = "a72f117515c5" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.create_table( + "record", + sa.Column("guid", sa.VARCHAR(), primary_key=True), + sa.Column("baseid", sa.VARCHAR(), index=True), + sa.Column("rev", sa.VARCHAR()), + sa.Column("form", sa.VARCHAR()), + sa.Column("size", sa.BIGINT(), index=True), + sa.Column("created_date", sa.DateTime, nullable=True), + sa.Column("updated_date", sa.DateTime, nullable=True), + sa.Column("file_name", sa.VARCHAR(), index=True), + sa.Column("version", sa.VARCHAR(), index=True), + sa.Column("uploader", sa.VARCHAR(), index=True), + sa.Column("description", sa.VARCHAR()), + sa.Column("content_created_date", sa.DateTime), + sa.Column("content_updated_date", sa.DateTime), + sa.Column("hashes", JSONB()), + sa.Column("acl", sa.ARRAY(sa.VARCHAR())), + sa.Column("authz", sa.ARRAY(sa.VARCHAR())), + sa.Column("urls", sa.ARRAY(sa.VARCHAR())), + sa.Column("record_metadata", JSONB()), + sa.Column("url_metadata", JSONB()), + sa.Column("alias", sa.ARRAY(sa.VARCHAR())), + ) + + +def downgrade() -> None: + op.drop_table("record") diff --git a/tests/conftest.py b/tests/conftest.py index e35de950..01e16cd8 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -2,66 +2,145 @@ import importlib import pytest import requests +from sqlalchemy import create_engine import mock from unittest.mock import patch # indexd_server and indexd_client is needed as fixtures -from cdisutilstest.code.indexd_fixture import clear_database +# from cdisutilstest.code.indexd_fixture import clear_database from gen3authz.client.arborist.client import ArboristClient from indexd import get_app from indexd import auth from indexd.auth.errors import AuthError -from tests import default_test_settings - - -def single_table_settings(): +from indexd.index.drivers.alchemy import Base as index_base +from indexd.auth.drivers.alchemy import Base as auth_base +from indexd.alias.drivers.alchemy import Base as alias_base +from indexd.index.drivers.alchemy import SQLAlchemyIndexDriver +from indexd.alias.drivers.alchemy import SQLAlchemyAliasDriver +from indexd.auth.drivers.alchemy import SQLAlchemyAuthDriver + +POSTGRES_CONNECTION = "postgres://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret + + +def clear_database(): + engine = create_engine(POSTGRES_CONNECTION) + + with engine.connect() as conn: + # Clear the Index records + index_driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + table_delete_order = [ + "index_record_url_metadata", + "index_record_url", + "index_record_hash", + "index_record_authz", + "index_record_ace", + "index_record_alias", + "index_record_metadata", + "alias_record_hash", + "index_record", + "drs_bundle_record", + "base_version", + "record", + ] + + for table_name in table_delete_order: + delete_statement = f"DELETE FROM {table_name}" + conn.execute(delete_statement) + + # Clear the Alias records + alias_driver = SQLAlchemyAliasDriver(POSTGRES_CONNECTION) + for model in alias_base.__subclasses__(): + table = model.__table__ + delete_statement = table.delete() + conn.execute(delete_statement) + + # Clear the Auth records + auth_driver = SQLAlchemyAuthDriver(POSTGRES_CONNECTION) + for model in auth_base.__subclasses__(): + table = model.__table__ + delete_statement = table.delete() + conn.execute(delete_statement) + + +@pytest.fixture(scope="function", params=["default_settings", "single_table_settings"]) +def combined_default_and_single_table_settings(request): from indexd import default_settings - from indexd import single_table_settings + from tests import default_test_settings + + # Load the default settings + if request.param == "default_settings": + importlib.reload(default_settings) + default_settings.settings = { + **default_settings.settings, + **default_test_settings.settings, + } + yield get_app(default_settings.settings) + + # Load the single-table settings + elif request.param == "single_table_settings": + from indexd import single_table_settings + + importlib.reload(single_table_settings) + single_table_settings.settings = { + **default_test_settings.settings, + **single_table_settings.settings, + } + yield get_app(single_table_settings.settings) - importlib.reload(single_table_settings) - default_settings.settings = { - **default_settings.settings, - **default_test_settings.settings, - **single_table_settings.settings, - } - print(f"=========SINGLE TABLE==================") - print(f"{default_settings.settings}") + try: + clear_database() + except Exception: + pass -def default_settings(): +@pytest.fixture(scope="function", autouse=True) +def app(): from indexd import default_settings + from tests import default_test_settings importlib.reload(default_settings) default_settings.settings = { **default_settings.settings, **default_test_settings.settings, } - print(f"===========================") - print(f"{default_settings.settings}") - -@pytest.fixture( - scope="function", autouse=True, params=[default_settings, single_table_settings] -) -def app(request): - request.param() yield get_app() try: clear_database() - except Exception: + except Exception as e: pass @pytest.fixture def user(app): - app.auth.add("test", "test") + # app.auth.add("test", "test") + # yield { + # "Authorization": ("Basic " + base64.b64encode(b"test:test").decode("ascii")), + # "Content-Type": "application/json", + # } + # app.auth.delete("test") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyAuthDriver(POSTGRES_CONNECTION) + + try: + driver.add("test", "test") + except Exception as e: + print(e) + yield { "Authorization": ("Basic " + base64.b64encode(b"test:test").decode("ascii")), "Content-Type": "application/json", } - app.auth.delete("test") + + try: + driver.add("test", "test") + driver.delete("test") + except Exception as e: + print("------------user test error --------------------") + print(e) + engine.dispose() @pytest.fixture diff --git a/tests/postgres/migrations/test_76a334ef8472_create_single_table.py b/tests/postgres/migrations/test_76a334ef8472_create_single_table.py deleted file mode 100644 index 43bd3e7a..00000000 --- a/tests/postgres/migrations/test_76a334ef8472_create_single_table.py +++ /dev/null @@ -1,5 +0,0 @@ -from alembic.config import main as alembic_main - - -def test_upgrade(postgres_driver): - conn = postgres_driver.engine.connect() diff --git a/tests/postgres/migrations/test_bb3d7586a096_createsingletable.py b/tests/postgres/migrations/test_bb3d7586a096_createsingletable.py new file mode 100644 index 00000000..52cd138c --- /dev/null +++ b/tests/postgres/migrations/test_bb3d7586a096_createsingletable.py @@ -0,0 +1,52 @@ +from alembic.config import main as alembic_main + + +def test_upgrade(postgres_driver): + conn = postgres_driver.engine.connect() + + # state before migration + alembic_main(["--raiseerr", "downgrade", "a72f117515c5"]) + + # state after migration + alembic_main(["--raiseerr", "upgrade", "bb3d7586a096"]) # pragma: allowlist secret + + get_columns = "SELECT column_name, data_type FROM information_schema.columns WHERE table_schema = 'public' AND table_name = 'record'" + + expected_schema = [ + ("guid", "character varying"), + ("baseid", "character varying"), + ("rev", "character varying"), + ("form", "character varying"), + ("size", "bigint"), + ("created_date", "timestamp without time zone"), + ("updated_date", "timestamp without time zone"), + ("file_name", "character varying"), + ("version", "character varying"), + ("uploader", "character varying"), + ("description", "character varying"), + ("content_created_date", "timestamp without time zone"), + ("content_updated_date", "timestamp without time zone"), + ("hashes", "jsonb"), + ("acl", "ARRAY"), + ("authz", "ARRAY"), + ("urls", "ARRAY"), + ("record_metadata", "jsonb"), + ("url_metadata", "jsonb"), + ("alias", "ARRAY"), + ] + + table_res = conn.execute(get_columns) + actual_schema = sorted([i for i in table_res]) + assert sorted(expected_schema) == actual_schema + + +def test_downgrade(postgres_driver): + conn = postgres_driver.engine.connect() + alembic_main(["--raiseerr", "downgrade", "a72f117515c5"]) + + # the database should not contain the 'record' table + tables_res = conn.execute( + "SELECT * FROM pg_catalog.pg_tables WHERE schemaname != 'pg_catalog' AND schemaname != 'information_schema';" + ) + tables = [i[1] for i in tables_res] + assert "record" not in tables diff --git a/tests/postgres/migrations/test_legacy_schema_migration.py b/tests/postgres/migrations/test_legacy_schema_migration.py index a4460d7c..fbbd2aaa 100644 --- a/tests/postgres/migrations/test_legacy_schema_migration.py +++ b/tests/postgres/migrations/test_legacy_schema_migration.py @@ -5,6 +5,7 @@ from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy import create_engine import sqlite3 import tests.util as util from indexd.index.drivers.alchemy import ( @@ -52,9 +53,12 @@ ], } +POSTGRES_CONNECTION = "postgres://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret -def update_version_table_for_testing(db, tb_name, val): - with sqlite3.connect(db) as conn: + +def update_version_table_for_testing(tb_name, val): + engine = create_engine(POSTGRES_CONNECTION) + with engine.connect() as conn: conn.execute( """\ CREATE TABLE IF NOT EXISTS {} (version INT)\ @@ -76,7 +80,6 @@ def update_version_table_for_testing(db, tb_name, val): tb_name, val ) ) - conn.commit() def test_migrate_acls(client, user, postgres_driver): @@ -105,7 +108,6 @@ def test_migrate_acls(client, user, postgres_driver): assert rec["metadata"] == {} -@util.removes("index.sq3") def test_migrate_index(): def test_migrate_index_internal(monkeypatch): called = [] @@ -121,8 +123,8 @@ def mock_migrate(**kwargs): [mock_migrate, mock_migrate], ) - update_version_table_for_testing("index.sq3", "index_schema_version", 0) - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + update_version_table_for_testing("index_schema_version", 0) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) assert len(called) == 2 with driver.session as s: @@ -133,7 +135,6 @@ def mock_migrate(**kwargs): return test_migrate_index_internal -@util.removes("index.sq3") def test_migrate_index_only_diff(): def test_migrate_index_only_diff_internal(monkeypatch): called = [] @@ -153,9 +154,9 @@ def mock_migrate_2(**kwargs): [mock_migrate, mock_migrate_2], ) - update_version_table_for_testing("index.sq3", "index_schema_version", 0) + update_version_table_for_testing("index_schema_version", 0) - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) assert len(called) == 1 assert len(called_2) == 0 @@ -163,8 +164,8 @@ def mock_migrate_2(**kwargs): called_2 = [] monkeypatch.setattr("indexd.index.drivers.alchemy.CURRENT_SCHEMA_VERSION", 2) - update_version_table_for_testing("index.sq3", "index_schema_version", 1) - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + update_version_table_for_testing("index_schema_version", 1) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) assert len(called) == 0 assert len(called_2) == 1 @@ -175,7 +176,6 @@ def mock_migrate_2(**kwargs): return test_migrate_index_only_diff_internal -@util.removes("alias.sq3") def test_migrate_alias(): def test_migrate_alias_internal(monkeypatch): called = [] @@ -190,9 +190,9 @@ def mock_migrate(**kwargs): monkeypatch.setattr("indexd.utils.check_engine_for_migrate", lambda _: True) - update_version_table_for_testing("alias.sq3", "alias_schema_version", 0) + update_version_table_for_testing("alias_schema_version", 0) - driver = SQLAlchemyAliasDriver("sqlite:///alias.sq3") + driver = SQLAlchemyAliasDriver(POSTGRES_CONNECTION) assert len(called) == 1 with driver.session as s: v = s.query(AliasSchemaVersion).first() diff --git a/tests/test_blueprint.py b/tests/test_blueprint.py index cfb85662..e9617fa3 100644 --- a/tests/test_blueprint.py +++ b/tests/test_blueprint.py @@ -13,9 +13,17 @@ DIST_CONFIG = [] -INDEX_CONFIG = {"driver": SQLAlchemyIndexDriver("sqlite:///index.sq3")} - -ALIAS_CONFIG = {"driver": SQLAlchemyAliasDriver("sqlite:///alias.sq3")} +INDEX_CONFIG = { + "driver": SQLAlchemyIndexDriver( + "postgres://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret + ) +} + +ALIAS_CONFIG = { + "driver": SQLAlchemyAliasDriver( + "postgres://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret + ) +} @util.removes("index.sq3") diff --git a/tests/test_client.py b/tests/test_client.py index eadc38f4..062d830c 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -29,14 +29,16 @@ def get_doc( return doc -def test_index_list(client): +def test_index_list(client, combined_default_and_single_table_settings): res = client.get("/index/") assert res.status_code == 200 rec = res.json assert rec["records"] == [] -def test_index_list_with_params(client, user): +def test_index_list_with_params( + client, user, combined_default_and_single_table_settings +): data1 = get_doc() data1["urls"] = [ "s3://endpointurl/bucket_2/key_2", @@ -111,15 +113,18 @@ def test_index_list_with_params(client, user): param = {"bucket": {"state": "error", "other": "xxx"}} + print("====================urls metadata test=====================") data_by_url_md = client.get("/index/?urls_metadata=" + json.dumps(param)) assert data_by_url_md.status_code == 200 data_list = data_by_url_md.json + print("-------data by url--------------") + print(data_list) assert len(data_list["records"]) == 1 assert data_list["records"][0]["did"] == rec_1["did"] assert data_list["records"][0]["urls_metadata"] == data1["urls_metadata"] -def test_get_list_form_param(client, user): +def test_get_list_form_param(client, user, combined_default_and_single_table_settings): """ bundle1 +-object1 @@ -155,7 +160,9 @@ def test_get_list_form_param(client, user): assert len(rec3["records"]) == 2 * n_records -def test_get_list_form_with_params(client, user): +def test_get_list_form_with_params( + client, user, combined_default_and_single_table_settings +): n_records = 6 for _ in range(n_records): did_list, _ = create_index(client, user) @@ -232,7 +239,7 @@ def test_get_list_form_with_params(client, user): assert rec_1["did"] in ids -def test_index_list_by_size(client, user): +def test_index_list_by_size(client, user, combined_default_and_single_table_settings): # post two records of different size data = get_doc() res = client.post("/index/", json=data, headers=user) @@ -248,7 +255,9 @@ def test_index_list_by_size(client, user): assert rec["records"][0]["size"] == 100 -def test_index_list_by_filename(client, user): +def test_index_list_by_filename( + client, user, combined_default_and_single_table_settings +): # post three records of different name data = get_doc() res = client.post("/index/", json=data, headers=user) @@ -267,7 +276,7 @@ def test_index_list_by_filename(client, user): assert rec["records"][0]["file_name"] == data["file_name"] -def test_index_list_by_authz(client, user): +def test_index_list_by_authz(client, user, combined_default_and_single_table_settings): # post three records of different authz data = get_doc() res = client.post("/index/", json=data, headers=user) @@ -286,7 +295,9 @@ def test_index_list_by_authz(client, user): assert sorted(rec["records"][0]["authz"]) == sorted(data["authz"]) -def test_index_list_by_multiple_authz(client, user): +def test_index_list_by_multiple_authz( + client, user, combined_default_and_single_table_settings +): data = get_doc() data["authz"] = ["abc"] @@ -307,7 +318,9 @@ def test_index_list_by_multiple_authz(client, user): assert sorted(rec["records"][0]["authz"]) == sorted(data["authz"]) -def test_index_list_by_multiple_acl(client, user): +def test_index_list_by_multiple_acl( + client, user, combined_default_and_single_table_settings +): data = get_doc() data["acl"] = ["abc"] @@ -328,7 +341,7 @@ def test_index_list_by_multiple_acl(client, user): assert sorted(rec["records"][0]["acl"]) == sorted(data["acl"]) -def test_index_list_by_urls(client, user): +def test_index_list_by_urls(client, user, combined_default_and_single_table_settings): data = get_doc() data["urls"] = ["s3://bucket1"] @@ -361,7 +374,9 @@ def test_index_list_by_urls(client, user): assert sorted(rec["records"][0]["urls"]) == sorted(data["urls"]) -def test_index_list_by_version(client, user): +def test_index_list_by_version( + client, user, combined_default_and_single_table_settings +): # post three records of different version data = get_doc() res = client.post("/index/", json=data, headers=user) @@ -380,7 +395,9 @@ def test_index_list_by_version(client, user): assert rec["records"][0]["version"] == data["version"] -def test_index_list_with_params_negate(client, user): +def test_index_list_with_params_negate( + client, user, combined_default_and_single_table_settings +): data = get_doc() res_1 = client.post("/index/", json=data, headers=user) assert res_1.status_code == 200 @@ -481,7 +498,7 @@ def test_index_list_with_params_negate(client, user): assert rec_5["did"] in ids -def test_index_list_invalid_param(client): +def test_index_list_invalid_param(client, combined_default_and_single_table_settings): # test 400 when limit > 1024 res = client.get("/index/?limit=1025") assert res.status_code == 400 @@ -503,7 +520,9 @@ def test_index_list_invalid_param(client): assert res.status_code == 400 -def test_negate_filter_file_name(client, user): +def test_negate_filter_file_name( + client, user, combined_default_and_single_table_settings +): # post two records of different file name data1 = get_doc() data1["file_name"] = "test_file_name_1" @@ -529,7 +548,7 @@ def test_negate_filter_file_name(client, user): assert rec["records"][0]["file_name"] == data1["file_name"] -def test_negate_filter_acl(client, user): +def test_negate_filter_acl(client, user, combined_default_and_single_table_settings): # post two records of different acl data1 = get_doc() data1["acl"] = ["read"] @@ -554,7 +573,7 @@ def test_negate_filter_acl(client, user): assert sorted(rec["records"][0]["acl"]) == sorted(data1["acl"]) -def test_negate_filter_authz(client, user): +def test_negate_filter_authz(client, user, combined_default_and_single_table_settings): # post two records of different authz data1 = get_doc() data1["authz"] = ["admin"] @@ -579,7 +598,9 @@ def test_negate_filter_authz(client, user): assert sorted(rec["records"][0]["authz"]) == sorted(data1["authz"]) -def test_negate_filter_version(client, user): +def test_negate_filter_version( + client, user, combined_default_and_single_table_settings +): # post two records of different version data1 = get_doc() data1["version"] = "3" @@ -604,7 +625,9 @@ def test_negate_filter_version(client, user): assert rec["records"][0]["version"] == data1["version"] -def test_list_entries_with_uploader(client, user): +def test_list_entries_with_uploader( + client, user, combined_default_and_single_table_settings +): """ Test that return a list of record given uploader """ @@ -637,7 +660,9 @@ def test_list_entries_with_uploader(client, user): assert data_list["records"][1]["uploader"] == "uploader_123" -def test_list_entries_with_uploader_wrong_uploader(client, user): +def test_list_entries_with_uploader_wrong_uploader( + client, user, combined_default_and_single_table_settings +): """ Test that returns no record due to wrong uploader id """ @@ -661,7 +686,7 @@ def test_list_entries_with_uploader_wrong_uploader(client, user): assert len(data_list["records"]) == 0 -def test_create_blank_record(client, user): +def test_create_blank_record(client, user, combined_default_and_single_table_settings): """ Test that new blank records only contain the uploader and optionally file_name fields: test without file name @@ -685,7 +710,9 @@ def test_create_blank_record(client, user): assert_blank(rec) -def test_create_blank_record_with_file_name(client, user): +def test_create_blank_record_with_file_name( + client, user, combined_default_and_single_table_settings +): """ Test that new blank records only contain the uploader and optionally file_name fields: test with file name @@ -709,7 +736,9 @@ def test_create_blank_record_with_file_name(client, user): assert_blank(rec) -def test_create_blank_record_with_authz(client, use_mock_authz): +def test_create_blank_record_with_authz( + client, use_mock_authz, combined_default_and_single_table_settings +): """ Test that a new blank record can be created with a specified authz when the user has the expected access @@ -760,7 +789,7 @@ def test_create_blank_record_with_authz(client, use_mock_authz): assert res.status_code == 403, res.json -def test_create_blank_version(client, user): +def test_create_blank_version(client, user, combined_default_and_single_table_settings): """ Test that we can create a new, blank version of a record with POST /index/blank/{GUID}. The new blank version should @@ -783,7 +812,9 @@ def test_create_blank_version(client, user): ) original_doc_guid = res.json["did"] - def assert_acl_authz_and_baseid(acl, authz, baseid, guid): + def assert_acl_authz_and_baseid( + acl, authz, baseid, guid, combined_default_and_single_table_settings + ): """ Helper to GET record with specified guid and assert acl, authz, and baseid. @@ -833,7 +864,9 @@ def assert_acl_authz_and_baseid(acl, authz, baseid, guid): assert not new_blank_doc[field] -def test_create_blank_version_with_authz(client, user, use_mock_authz): +def test_create_blank_version_with_authz( + client, user, use_mock_authz, combined_default_and_single_table_settings +): """ Test that a new version of a blank record can be created with a different authz when the user has the expected access @@ -891,7 +924,9 @@ def test_create_blank_version_with_authz(client, user, use_mock_authz): assert not new_version[field] -def test_create_blank_version_specify_did(client, user): +def test_create_blank_version_specify_did( + client, user, combined_default_and_single_table_settings +): """ Test that we can specify the new GUID of a new, blank version of a record with POST /index/blank/{GUID}. @@ -940,7 +975,9 @@ def test_create_blank_version_specify_did(client, user): assert blank_doc_guid == specified_guid -def test_create_blank_version_specify_guid_already_exists(client, user): +def test_create_blank_version_specify_guid_already_exists( + client, user, combined_default_and_single_table_settings +): """ Test that if we try to specify the GUID of a new blank version, but the new GUID we specified already exists in the index, the operation fails with 409. @@ -999,7 +1036,9 @@ def test_create_blank_version_specify_guid_already_exists(client, user): ), "Request should have failed with 409 user error: {}".format(res.json) -def test_create_blank_version_no_existing_record(client, user): +def test_create_blank_version_no_existing_record( + client, user, combined_default_and_single_table_settings +): """ Test that attempts to create a blank version of a nonexisting GUID should fail with 404. @@ -1015,7 +1054,9 @@ def test_create_blank_version_no_existing_record(client, user): ), "Expected to fail to create new blank version, instead got {}".format(res.json) -def test_create_blank_version_blank_record(client, user): +def test_create_blank_version_blank_record( + client, user, combined_default_and_single_table_settings +): """ Test that attempts to create a blank version of a blank record should succeed @@ -1068,7 +1109,9 @@ def test_create_blank_version_blank_record(client, user): assert not blank_doc[field] -def test_fill_size_n_hash_for_blank_record(client, user): +def test_fill_size_n_hash_for_blank_record( + client, user, combined_default_and_single_table_settings +): """ Test that can fill size and hashes for empty record """ @@ -1098,7 +1141,9 @@ def test_fill_size_n_hash_for_blank_record(client, user): assert rec["hashes"]["md5"] == "8b9942cf415384b27cadf1f4d2d981f5" -def test_update_blank_record_with_authz(client, user, use_mock_authz): +def test_update_blank_record_with_authz( + client, user, use_mock_authz, combined_default_and_single_table_settings +): """ Test that a blank record (WITHOUT AUTHZ) can be updated with an authz when the user has the expected access @@ -1158,7 +1203,9 @@ def test_update_blank_record_with_authz(client, user, use_mock_authz): assert rec["authz"] == [new_authz2] # authz as provided -def test_update_blank_record_with_authz_new(client, user, use_mock_authz): +def test_update_blank_record_with_authz_new( + client, user, use_mock_authz, combined_default_and_single_table_settings +): """ Test that a blank record (WITH AUTHZ) can be updated with a different authz when the user has the expected access @@ -1222,7 +1269,9 @@ def test_update_blank_record_with_authz_new(client, user, use_mock_authz): assert rec["authz"] == [new_authz2] # authz as provided -def test_get_empty_acl_authz_record(client, user): +def test_get_empty_acl_authz_record( + client, user, combined_default_and_single_table_settings +): """ Test that can get a list of empty acl/authz given uploader """ @@ -1259,7 +1308,9 @@ def test_get_empty_acl_authz_record(client, user): assert data_list["records"][1]["authz"] == [] -def test_get_empty_acl_authz_record_after_fill_size_n_hash(client, user): +def test_get_empty_acl_authz_record_after_fill_size_n_hash( + client, user, combined_default_and_single_table_settings +): """ Test create blank record -> fill hash and size -> get record with empty or none acl/authz @@ -1353,7 +1404,9 @@ def test_get_empty_acl_authz_record_after_fill_size_n_hash(client, user): assert len(ids) == 2 -def test_cant_update_inexistent_blank_record(client, user): +def test_cant_update_inexistent_blank_record( + client, user, combined_default_and_single_table_settings +): # test that non-existent did throws 400 error data = {"size": 123, "hashes": {"md5": "8b9942cf415384b27cadf1f4d2d682e5"}} fake_did = "testprefix:455ffb35-1b0e-49bd-a4ab-3afe9f3aece9" @@ -1435,7 +1488,13 @@ def test_update_urls_metadata(client, user): ], ) def test_urls_metadata_partial_match( - client, doc_urls, urls_meta, params, expected, user + client, + doc_urls, + urls_meta, + params, + expected, + user, + combined_default_and_single_table_settings, ): url_doc_mapping = {} for url_group in doc_urls: @@ -1459,7 +1518,7 @@ def test_urls_metadata_partial_match( assert ids == {url_doc_mapping[url]["did"] for url in expected} -def test_get_urls(client, user): +def test_get_urls(client, user, combined_default_and_single_table_settings): data = get_doc(has_urls_metadata=True) response = client.post("/index/", json=data, headers=user) assert response.status_code == 200 @@ -1480,7 +1539,7 @@ def test_get_urls(client, user): assert record["urls"][0]["metadata"] == data["urls_metadata"][url] -def test_get_urls_size_0(client, user): +def test_get_urls_size_0(client, user, combined_default_and_single_table_settings): data = get_doc(has_urls_metadata=True) data["size"] = 0 response = client.post("/index/", json=data, headers=user) @@ -1495,7 +1554,7 @@ def test_get_urls_size_0(client, user): assert record["urls"][0]["metadata"] == data["urls_metadata"][url] -def test_index_create(client, user): +def test_index_create(client, user, combined_default_and_single_table_settings): data = get_doc(has_baseid=True) res = client.post("/index/", json=data, headers=user) @@ -1510,7 +1569,9 @@ def test_index_create(client, user): assert rec["authz"] == [] -def test_index_list_with_start(client, user): +def test_index_list_with_start( + client, user, combined_default_and_single_table_settings +): data = { "did": "testprefix:11111111-1111-1111-1111-111111111111", "form": "object", @@ -1542,7 +1603,7 @@ def test_index_list_with_start(client, user): assert rec3["did"] in dids -def test_index_list_with_page(client, user): +def test_index_list_with_page(client, user, combined_default_and_single_table_settings): data = { "did": "testprefix:11111111-1111-1111-1111-111111111111", "form": "object", @@ -1582,14 +1643,14 @@ def test_index_list_with_page(client, user): assert rec3["did"] in dids -def test_unauthorized_create(client): +def test_unauthorized_create(client, combined_default_and_single_table_settings): # test that unauthorized post throws 403 error data = get_doc() res = client.post("/index/", json=data) assert res.status_code == 403 -def test_index_get(client, user): +def test_index_get(client, user, combined_default_and_single_table_settings): data = get_doc(has_baseid=True) res = client.post("/index/", json=data, headers=user) @@ -1605,7 +1666,7 @@ def test_index_get(client, user): assert rec_2["did"] == rec["did"] -def test_get_id(client, user): +def test_get_id(client, user, combined_default_and_single_table_settings): # test getting an existing ID data = get_doc() res = client.post("/index/", json=data, headers=user) @@ -1625,7 +1686,7 @@ def test_get_id(client, user): assert res.status_code == 404 -def test_index_prepend_prefix(client, user): +def test_index_prepend_prefix(client, user, combined_default_and_single_table_settings): """ For index_config = { @@ -1658,7 +1719,9 @@ def test_index_prepend_prefix(client, user): assert rec_3["did"].startswith("testprefix:") -def test_index_get_with_baseid(client, user): +def test_index_get_with_baseid( + client, user, combined_default_and_single_table_settings +): data1 = get_doc(has_baseid=True) res = client.post("/index/", json=data1, headers=user) assert res.status_code == 200 @@ -1674,7 +1737,7 @@ def test_index_get_with_baseid(client, user): assert rec_2["did"] == rec_1["did"] -def test_delete_and_recreate(client, user): +def test_delete_and_recreate(client, user, combined_default_and_single_table_settings): """ Test that you can delete an IndexDocument and be able to recreate it with the same fields. @@ -1724,7 +1787,9 @@ def test_delete_and_recreate(client, user): assert new_data["hashes"]["md5"] == new_record["hashes"]["md5"] -def test_index_create_with_multiple_hashes(client, user): +def test_index_create_with_multiple_hashes( + client, user, combined_default_and_single_table_settings +): data = get_doc() data["hashes"] = { "md5": "8b9942cf415384b27cadf1f4d2d682e5", @@ -1737,7 +1802,9 @@ def test_index_create_with_multiple_hashes(client, user): assert record["did"] -def test_index_create_with_valid_did(client, user): +def test_index_create_with_valid_did( + client, user, combined_default_and_single_table_settings +): data = get_doc() data["did"] = "3d313755-cbb4-4b08-899d-7bbac1f6e67d" @@ -1747,7 +1814,9 @@ def test_index_create_with_valid_did(client, user): assert record["did"] == "3d313755-cbb4-4b08-899d-7bbac1f6e67d" -def test_index_create_with_acl_authz(client, user): +def test_index_create_with_acl_authz( + client, user, combined_default_and_single_table_settings +): data = { "acl": ["a", "b"], "authz": ["x", "y"], @@ -1767,7 +1836,9 @@ def test_index_create_with_acl_authz(client, user): assert sorted(record["authz"]) == ["x", "y"] -def test_index_create_with_duplicate_acl_authz(client, user): +def test_index_create_with_duplicate_acl_authz( + client, user, combined_default_and_single_table_settings +): data = { "acl": ["a", "b", "a"], "authz": ["x", "y", "x"], @@ -1787,7 +1858,9 @@ def test_index_create_with_duplicate_acl_authz(client, user): assert sorted(record["authz"]) == ["x", "y"] -def test_index_create_with_invalid_did(client, user): +def test_index_create_with_invalid_did( + client, user, combined_default_and_single_table_settings +): data = get_doc() data["did"] = "3d313755-cbb4-4b0fdfdfd8-899d-7bbac1f6e67dfdd" @@ -1795,7 +1868,9 @@ def test_index_create_with_invalid_did(client, user): assert response.status_code == 400 -def test_index_create_with_prefix(client, user): +def test_index_create_with_prefix( + client, user, combined_default_and_single_table_settings +): data = get_doc() data["did"] = "cdis:3d313755-cbb4-4b08-899d-7bbac1f6e67d" @@ -1805,7 +1880,9 @@ def test_index_create_with_prefix(client, user): assert record["did"] == "cdis:3d313755-cbb4-4b08-899d-7bbac1f6e67d" -def test_index_create_with_duplicate_did(client, user): +def test_index_create_with_duplicate_did( + client, user, combined_default_and_single_table_settings +): data = get_doc() data["did"] = "3d313755-cbb4-4b08-899d-7bbac1f6e67d" @@ -1815,7 +1892,9 @@ def test_index_create_with_duplicate_did(client, user): assert response.status_code == 409 -def test_index_create_with_file_name(client, user): +def test_index_create_with_file_name( + client, user, combined_default_and_single_table_settings +): data = get_doc() data["file_name"] = "abc" @@ -1828,7 +1907,9 @@ def test_index_create_with_file_name(client, user): assert rec["file_name"] == "abc" -def test_index_create_with_version(client, user): +def test_index_create_with_version( + client, user, combined_default_and_single_table_settings +): data = get_doc() data["version"] = "ver_123" @@ -1841,7 +1922,9 @@ def test_index_create_with_version(client, user): assert rec["version"] == data["version"] -def test_create_blank_record_with_baseid(client, user): +def test_create_blank_record_with_baseid( + client, user, combined_default_and_single_table_settings +): doc = {"uploader": "uploader_123", "baseid": "baseid_123"} res = client.post("/index/blank/", json=doc, headers=user) @@ -1854,7 +1937,9 @@ def test_create_blank_record_with_baseid(client, user): assert_blank(rec) -def test_index_create_with_uploader(client, user): +def test_index_create_with_uploader( + client, user, combined_default_and_single_table_settings +): data = get_doc() data["uploader"] = "uploader_123" res = client.post("/index/", json=data, headers=user) @@ -1866,7 +1951,9 @@ def test_index_create_with_uploader(client, user): assert rec["uploader"] == data["uploader"] -def test_index_get_global_endpoint(client, user): +def test_index_get_global_endpoint( + client, user, combined_default_and_single_table_settings +): data = get_doc() res = client.post("/index/", json=data, headers=user) @@ -1883,7 +1970,9 @@ def test_index_get_global_endpoint(client, user): assert rec["hashes"]["md5"] == data["hashes"]["md5"] -def test_index_add_prefix_alias(client, user): +def test_index_add_prefix_alias( + client, user, combined_default_and_single_table_settings +): """ For index_config = { @@ -1914,7 +2003,7 @@ def test_index_add_prefix_alias(client, user): ] = previous_add_alias_cfg -def test_index_update(client, user): +def test_index_update(client, user, combined_default_and_single_table_settings): # create record data = get_doc() res = client.post("/index/", json=data, headers=user) @@ -1971,7 +2060,9 @@ def test_index_update(client, user): assert rec_2["rev"] != rec["rev"] -def test_index_update_with_authz_check(client, user, use_mock_authz): +def test_index_update_with_authz_check( + client, user, use_mock_authz, combined_default_and_single_table_settings +): old_authz = "/programs/A" new_authz = "/programs/B" @@ -2006,7 +2097,9 @@ def test_index_update_with_authz_check(client, user, use_mock_authz): assert rec["authz"] == [new_authz] -def test_index_update_duplicate_acl_authz(client, user): +def test_index_update_duplicate_acl_authz( + client, user, combined_default_and_single_table_settings +): data = get_doc() res = client.post("/index/", json=data, headers=user) @@ -2037,7 +2130,9 @@ def test_index_update_duplicate_acl_authz(client, user): assert sorted(record["authz"]) == ["x", "y"] -def test_update_uploader_field(client, user): +def test_update_uploader_field( + client, user, combined_default_and_single_table_settings +): data = get_doc() data["uploader"] = "uploader_123" res = client.post("/index/", json=data, headers=user) @@ -2074,7 +2169,7 @@ def test_update_uploader_field(client, user): assert rec["uploader"] is None -def test_index_delete(client, user): +def test_index_delete(client, user, combined_default_and_single_table_settings): data = get_doc(has_metadata=False, has_baseid=False) res = client.post("/index/", json=data, headers=user) @@ -2098,7 +2193,7 @@ def test_index_delete(client, user): assert res.status_code == 404 -def test_create_index_version(client, user): +def test_create_index_version(client, user, combined_default_and_single_table_settings): data = get_doc(has_metadata=False, has_baseid=False) res = client.post("/index/", json=data, headers=user) @@ -2124,7 +2219,7 @@ def test_create_index_version(client, user): assert rec_2["did"] == dataNew["did"] -def test_get_latest_version(client, user): +def test_get_latest_version(client, user, combined_default_and_single_table_settings): data = get_doc(has_metadata=False, has_baseid=False, has_version=True) res_1 = client.post("/index/", json=data, headers=user) assert res_1.status_code == 200 @@ -2152,7 +2247,7 @@ def test_get_latest_version(client, user): assert rec_5["did"] == rec_1["did"] -def test_get_all_versions(client, user): +def test_get_all_versions(client, user, combined_default_and_single_table_settings): dids = [] # create 1st version @@ -2184,7 +2279,7 @@ def test_get_all_versions(client, user): assert record["did"] == dids[int(i)], "record id does not match" -def test_update_all_versions(client, user): +def test_update_all_versions(client, user, combined_default_and_single_table_settings): dids = [] mock_acl_A = ["mock_acl_A1", "mock_acl_A2"] mock_acl_B = ["mock_acl_B1", "mock_acl_B2"] @@ -2230,7 +2325,9 @@ def test_update_all_versions(client, user): assert sorted(version["authz"]) == sorted(mock_authz_B) -def test_update_all_versions_using_baseid(client, user): +def test_update_all_versions_using_baseid( + client, user, combined_default_and_single_table_settings +): mock_acl_A = ["mock_acl_A1", "mock_acl_A2"] mock_acl_B = ["mock_acl_B1", "mock_acl_B2"] mock_authz_A = ["mock_authz_A1", "mock_authz_A2"] @@ -2271,7 +2368,9 @@ def test_update_all_versions_using_baseid(client, user): assert sorted(version["authz"]) == sorted(mock_authz_B) -def test_update_all_versions_guid_not_found(client, user): +def test_update_all_versions_guid_not_found( + client, user, combined_default_and_single_table_settings +): bad_guid = "00000000-0000-0000-0000-000000000000" update_data = {"acl": ["mock_acl"], "authz": ["mock_authz"]} @@ -2284,7 +2383,9 @@ def test_update_all_versions_guid_not_found(client, user): ), "Expected update operation to fail with 404: {}".format(res.json) -def test_update_all_versions_fail_on_bad_metadata(client, user): +def test_update_all_versions_fail_on_bad_metadata( + client, user, combined_default_and_single_table_settings +): """ When making an update request, endpoint should return 400 (User error) if the metadata to update contains any fields that cannot be updated across all versions. @@ -2333,7 +2434,9 @@ def test_update_all_versions_fail_on_bad_metadata(client, user): assert sorted(version["authz"]) == sorted(mock_authz_A) -def test_update_all_versions_fail_on_missing_permissions(client, user, use_mock_authz): +def test_update_all_versions_fail_on_missing_permissions( + client, user, use_mock_authz, combined_default_and_single_table_settings +): """ If user does not have the 'update' permission on any record, request should fail with 403. @@ -2376,7 +2479,7 @@ def test_update_all_versions_fail_on_missing_permissions(client, user, use_mock_ ), "Expected operation to fail due to lack of user permissions: {}".format(res.json) -def test_index_stats(client, user): +def test_index_stats(client, user, combined_default_and_single_table_settings): # populate the index with three different size records data1 = get_doc() res = client.post("/index/", json=data1, headers=user) @@ -2418,7 +2521,7 @@ def test_index_stats(client, user): ("crc", "997a6f5c"), ], ) -def test_good_hashes(client, user, typ, h): +def test_good_hashes(client, user, typ, h, combined_default_and_single_table_settings): data = { "form": "object", "size": 123, @@ -2451,7 +2554,7 @@ def test_good_hashes(client, user, typ, h): ("sha512", "not valid"), ], ) -def test_bad_hashes(client, user, typ, h): +def test_bad_hashes(client, user, typ, h, combined_default_and_single_table_settings): data = { "form": "object", "size": 123, @@ -2472,7 +2575,7 @@ def test_bad_hashes(client, user, typ, h): assert "does not match" in json_resp["error"] -def test_dos_get(client, user): +def test_dos_get(client, user, combined_default_and_single_table_settings): data = get_doc(has_urls_metadata=True, has_metadata=True, has_baseid=True) res_1 = client.post("/index/", json=data, headers=user) @@ -2500,14 +2603,14 @@ def test_dos_get(client, user): assert rec_3["data_object"]["id"] == rec_1["did"] -def test_get_dos_record_error(client, user): +def test_get_dos_record_error(client, user, combined_default_and_single_table_settings): # test exception raised at nonexistent fake_did = "testprefix:d96bab16-c4e1-44ac-923a-04328b6fe78f" res = client.get("/ga4gh/dos/v1/dataobjects/" + fake_did) assert res.status_code == 404 -def test_dos_list(client, user): +def test_dos_list(client, user, combined_default_and_single_table_settings): data = get_doc(has_urls_metadata=True, has_metadata=True, has_baseid=True) res_1 = client.post("/index/", json=data, headers=user) @@ -2533,7 +2636,9 @@ def test_dos_list(client, user): ) -def test_update_without_changing_fields(client, user): +def test_update_without_changing_fields( + client, user, combined_default_and_single_table_settings +): # setup test data = get_doc(has_urls_metadata=True, has_metadata=True, has_baseid=True) @@ -2578,7 +2683,7 @@ def test_update_without_changing_fields(client, user): assert second_doc["version"] != third_doc["version"] -def test_bulk_get_documents(client, user): +def test_bulk_get_documents(client, user, combined_default_and_single_table_settings): # just make a bunch of entries in indexd dids = [ client.post("/index/", json=get_doc(has_baseid=True), headers=user).json["did"] @@ -2595,7 +2700,9 @@ def test_bulk_get_documents(client, user): @pytest.mark.parametrize("authz", [["/some/path"], []]) -def test_indexd_admin_authz(client, mock_arborist_requests, authz): +def test_indexd_admin_authz( + client, mock_arborist_requests, authz, combined_default_and_single_table_settings +): """ Test that admin users can perform an operation even if they don't have explicit access to do it. @@ -2626,17 +2733,17 @@ def test_indexd_admin_authz(client, mock_arborist_requests, authz): assert res.status_code == 200 # authorized -def test_status_check(client): +def test_status_check(client, combined_default_and_single_table_settings): res = client.get("/_status/") assert res.status_code == 200 -def test_version_check(client): +def test_version_check(client, combined_default_and_single_table_settings): res = client.get("/_version") assert res.status_code == 200 -def test_get_dist(client): +def test_get_dist(client, combined_default_and_single_table_settings): res = client.get("/_dist") assert res.status_code == 200 and res.json == [ { @@ -2648,7 +2755,9 @@ def test_get_dist(client): ] -def test_changing_timestamps_updated_not_before_created(client, user): +def test_changing_timestamps_updated_not_before_created( + client, user, combined_default_and_single_table_settings +): """ Checks that records cannot be updated to have a content_updated_date before the provided content_created_date """ @@ -2676,7 +2785,9 @@ def test_changing_timestamps_updated_not_before_created(client, user): assert update_obj_resp.status_code == 400 -def test_changing_none_timestamps(client, user): +def test_changing_none_timestamps( + client, user, combined_default_and_single_table_settings +): """ Checks that updates with null values are handled correctly """ @@ -2695,7 +2806,9 @@ def test_changing_none_timestamps(client, user): assert update_obj_resp.status_code == 200 -def test_changing_timestamps_no_updated_without_created(client, user): +def test_changing_timestamps_no_updated_without_created( + client, user, combined_default_and_single_table_settings +): """ Checks that records cannot be updated to have a content_updated_date when a content_created_date does not exist for the record and one is not provided in the update. @@ -2712,7 +2825,9 @@ def test_changing_timestamps_no_updated_without_created(client, user): assert update_obj_resp.status_code == 400 -def test_timestamps_updated_not_before_created(client, user): +def test_timestamps_updated_not_before_created( + client, user, combined_default_and_single_table_settings +): """ Checks that records cannot be created with a content_update_date that is before the content_created_date """ @@ -2723,7 +2838,9 @@ def test_timestamps_updated_not_before_created(client, user): assert create_obj_resp.status_code == 400 -def test_timestamps_no_updated_without_created(client, user): +def test_timestamps_no_updated_without_created( + client, user, combined_default_and_single_table_settings +): """ Checks that records cannot be created with a content_update_date without providing a content_created_date """ diff --git a/tests/test_driver_alchemy_auth.py b/tests/test_driver_alchemy_auth.py index ec81f819..1b391828 100644 --- a/tests/test_driver_alchemy_auth.py +++ b/tests/test_driver_alchemy_auth.py @@ -1,7 +1,7 @@ -import sqlite3 import hashlib import pytest +from sqlalchemy import create_engine import tests.util as util @@ -13,61 +13,53 @@ USERNAME = "abc" PASSWORD = "123" DIGESTED = SQLAlchemyAuthDriver.digest(PASSWORD) +POSTGRES_CONNECTION = "postgres://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret # TODO check if pytest has utilities for meta-programming of tests -@util.removes("auth.sq3") def test_driver_init_does_not_create_records(): """ Tests for creation of records after driver init. Tests driver init does not have unexpected side-effects. """ - driver = SQLAlchemyAuthDriver( - "sqlite:///auth.sq3" - ) # pylint: disable=unused-variable - with sqlite3.connect("auth.sq3") as conn: - count = conn.execute( - """ - SELECT COUNT(*) FROM auth_record - """ - ).fetchone()[0] + engine = create_engine(POSTGRES_CONNECTION) + with engine.connect() as conn: + result = conn.execute("SELECT COUNT(*) FROM auth_record") + count = result.scalar() assert count == 0, "driver created records upon initilization" -@util.removes("auth.sq3") def test_driver_auth_accepts_good_creds(): """ Tests driver accepts good creds. """ - driver = SQLAlchemyAuthDriver("sqlite:///auth.sq3") - - with sqlite3.connect("auth.sq3") as conn: - conn.execute( - """ - INSERT INTO auth_record VALUES (?,?) - """, - (USERNAME, DIGESTED), + + driver = SQLAlchemyAuthDriver(POSTGRES_CONNECTION) + engine = create_engine(POSTGRES_CONNECTION) + with engine.connect() as conn: + result = conn.execute( + "INSERT INTO auth_record VALUES ('{}', '{}')".format(USERNAME, DIGESTED) ) driver.auth(USERNAME, PASSWORD) -@util.removes("auth.sq3") def test_driver_auth_rejects_bad_creds(): """ Test driver rejects bad creds. """ - driver = SQLAlchemyAuthDriver("sqlite:///auth.sq3") - - with sqlite3.connect("auth.sq3") as conn: - conn.execute( - """ - INSERT INTO auth_record VALUES (?, ?) - """, - (USERNAME, DIGESTED), + driver = SQLAlchemyAuthDriver( + "postgres://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret + ) + + engine = create_engine(POSTGRES_CONNECTION) + + with engine.connect() as conn: + result = conn.execute( + "INSERT INTO auth_record VALUES ('{}', '{}')".format(USERNAME, DIGESTED) ) with pytest.raises(AuthError): @@ -77,19 +69,19 @@ def test_driver_auth_rejects_bad_creds(): driver.auth("invalid_" + USERNAME, PASSWORD) -@util.removes("auth.sq3") def test_driver_auth_returns_user_context(): """ Tests driver accepts good creds. """ - driver = SQLAlchemyAuthDriver("sqlite:///auth.sq3") - - with sqlite3.connect("auth.sq3") as conn: - conn.execute( - """ - INSERT INTO auth_record VALUES (?,?) - """, - (USERNAME, DIGESTED), + driver = SQLAlchemyAuthDriver( + "postgres://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret + ) + + engine = create_engine(POSTGRES_CONNECTION) + + with engine.connect() as conn: + result = conn.execute( + "INSERT INTO auth_record VALUES ('{}', '{}')".format(USERNAME, DIGESTED) ) user = driver.auth(USERNAME, PASSWORD) diff --git a/tests/test_driver_alchemy_crud.py b/tests/test_driver_alchemy_crud.py index 4355ab15..ed13ee95 100644 --- a/tests/test_driver_alchemy_crud.py +++ b/tests/test_driver_alchemy_crud.py @@ -1,7 +1,7 @@ import uuid -import sqlite3 import pytest +from sqlalchemy import create_engine import tests.util as util @@ -17,78 +17,76 @@ # TODO check if pytest has utilities for meta-programming of tests +POSTGRES_CONNECTION = "postgres://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret -@util.removes("index.sq3") -def test_driver_init_does_not_create_records(): + +def test_driver_init_does_not_create_records( + combined_default_and_single_table_settings, +): """ Tests for creation of records after driver init. Tests driver init does not have unexpected side-effects. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") - count = conn.execute( - """ - SELECT COUNT(*) FROM index_record - """ - ).fetchone()[0] + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) - assert count == 0, "driver created records upon initilization" + with engine.connect() as conn: + result = conn.execute("SELECT COUNT(*) FROM index_record") + count = result.scalar() + assert count == 0, "driver created records upon initialization" -@util.removes("index.sq3") -def test_driver_init_does_not_create_record_urls(): + +def test_driver_init_does_not_create_record_urls( + combined_default_and_single_table_settings, +): """ Tests for creation of urls after driver init. Tests driver init does not have unexpected side-effects. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") - count = conn.execute( - """ - SELECT COUNT(*) FROM index_record_url - """ - ).fetchone()[0] + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + + with engine.connect() as conn: + result = conn.execute("SELECT COUNT(*) FROM index_record_url") + count = result.scalar() assert count == 0, "driver created records urls upon initilization" -@util.removes("index.sq3") -def test_driver_init_does_not_create_record_hashes(): +def test_driver_init_does_not_create_record_hashes( + combined_default_and_single_table_settings, +): """ Tests for creation of hashes after driver init. Tests driver init does not have unexpected side-effects. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver( - "sqlite:///index.sq3" - ) # pylint: disable=unused-variable - count = conn.execute( - """ - SELECT COUNT(*) FROM index_record_hash - """ - ).fetchone()[0] + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + + with engine.connect() as conn: + result = conn.execute("SELECT COUNT(*) FROM index_record_hash") + count = result.scalar() assert count == 0, "driver created records hashes upon initilization" -@util.removes("index.sq3") -def test_driver_add_object_record(): +def test_driver_add_object_record(combined_default_and_single_table_settings): """ Tests creation of a record. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + + with engine.connect() as conn: driver.add("object") - count = conn.execute( - """ - SELECT COUNT(*) FROM index_record - """ - ).fetchone()[0] + result = conn.execute("SELECT COUNT(*) FROM index_record") + count = result.scalar() assert count == 1, "driver did not create record" @@ -105,42 +103,37 @@ def test_driver_add_object_record(): assert record[4] is None, "record size non-null" -@util.removes("index.sq3") -def test_driver_add_bundle_record(): +def test_driver_add_bundle_record(combined_default_and_single_table_settings): """ Tests creation of a record. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + + with engine.connect() as conn: driver.add_blank_bundle() - count = conn.execute( - """ - SELECT COUNT(*) FROM drs_bundle_record - """ - ).fetchone()[0] + result = conn.execute("SELECT COUNT(*) FROM drs_bundle_record") + count = result.scalar() assert count == 1, "driver did not create record" - record = conn.execute( - """ - SELECT * FROM drs_bundle_record - """ - ).fetchone() + result = conn.execute("SELECT * FROM drs_bundle_record").fetchone() - assert record != None - assert len(record) == 10 + assert result != None + assert len(result) == 10 -@util.removes("index.sq3") -def test_driver_add_container_record(): +def test_driver_add_container_record(combined_default_and_single_table_settings): """ Tests creation of a record. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + + with engine.connect() as conn: driver.add("container") count = conn.execute( @@ -164,14 +157,14 @@ def test_driver_add_container_record(): assert record[4] == None, "record size non-null" -@util.removes("index.sq3") -def test_driver_add_bundles_record(): +def test_driver_add_bundles_record(combined_default_and_single_table_settings): """ Tests creation of a record. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + with engine.connect() as conn: driver.add_bundle(name="bundle") count = conn.execute( @@ -194,14 +187,15 @@ def test_driver_add_bundles_record(): assert record[3], "record updated date not populated" -@util.removes("index.sq3") -def test_driver_add_multipart_record(): +def test_driver_add_multipart_record(combined_default_and_single_table_settings): """ Tests creation of a record. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + + with engine.connect() as conn: driver.add("multipart") count = conn.execute( @@ -225,12 +219,11 @@ def test_driver_add_multipart_record(): assert record[4] == None, "record size non-null" -@util.removes("index.sq3") -def test_driver_add_with_valid_did(): +def test_driver_add_with_valid_did(combined_default_and_single_table_settings): """ Tests creation of a record with given valid did. """ - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) form = "object" did = "3d313755-cbb4-4b08-899d-7bbac1f6e67d" @@ -239,12 +232,11 @@ def test_driver_add_with_valid_did(): assert s.query(IndexRecord).first().did == did -@util.removes("index.sq3") -def test_driver_add_with_duplicate_did(): +def test_driver_add_with_duplicate_did(combined_default_and_single_table_settings): """ Tests creation of a record with duplicate did. """ - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) form = "object" did = "3d313755-cbb4-4b08-899d-7bbac1f6e67d" @@ -254,14 +246,14 @@ def test_driver_add_with_duplicate_did(): driver.add(form, did=did) -@util.removes("index.sq3") -def test_driver_add_multiple_records(): +def test_driver_add_multiple_records(combined_default_and_single_table_settings): """ Tests creation of a record. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + with engine.connect() as conn: driver.add("object") driver.add("object") driver.add("object") @@ -288,14 +280,14 @@ def test_driver_add_multiple_records(): assert record[4] == None, "record size non-null" -@util.removes("index.sq3") -def test_driver_add_with_size(): +def test_driver_add_with_size(combined_default_and_single_table_settings): """ Tests creation of a record with size. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + with engine.connect() as conn: form = "object" size = 512 @@ -319,14 +311,14 @@ def test_driver_add_with_size(): assert size == new_size, "record size mismatch" -@util.removes("index.sq3") -def test_driver_add_with_urls(): +def test_driver_add_with_urls(combined_default_and_single_table_settings): """ Tests creation of a record with urls. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + with engine.connect() as conn: form = "object" urls = ["a", "b", "c"] @@ -360,12 +352,11 @@ def test_driver_add_with_urls(): assert urls == new_urls, "record urls mismatch" -@util.removes("index.sq3") -def test_driver_add_with_filename(): +def test_driver_add_with_filename(combined_default_and_single_table_settings): """ Tests creation of a record with filename. """ - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) form = "object" file_name = "abc" @@ -374,12 +365,11 @@ def test_driver_add_with_filename(): assert s.query(IndexRecord).first().file_name == "abc" -@util.removes("index.sq3") -def test_driver_add_with_version(): +def test_driver_add_with_version(combined_default_and_single_table_settings): """ Tests creation of a record with version string. """ - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) form = "object" version = "ver_123" @@ -388,14 +378,15 @@ def test_driver_add_with_version(): assert s.query(IndexRecord).first().version == "ver_123" -@util.removes("index.sq3") -def test_driver_add_with_hashes(): +def test_driver_add_with_hashes(combined_default_and_single_table_settings): """ Tests creation of a record with hashes. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + + with engine.connect() as conn: form = "object" hashes = {"a": "1", "b": "2", "c": "3"} @@ -429,31 +420,30 @@ def test_driver_add_with_hashes(): assert hashes == new_hashes, "record hashes mismatch" -@util.removes("index.sq3") -def test_driver_get_record(): +def test_driver_get_record(combined_default_and_single_table_settings): """ Tests retrieval of a record. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + + with engine.connect() as conn: did = str(uuid.uuid4()) baseid = str(uuid.uuid4()) rev = str(uuid.uuid4())[:8] size = 512 form = "object" - baseid = str(uuid.uuid4()) created_date = datetime.now() updated_date = datetime.now() description = "a description" content_created_date = datetime.now() content_updated_date = datetime.now() + conn.execute("INSERT INTO base_version(baseid) VALUES ('{}')".format(baseid)) + conn.execute( - """ - INSERT INTO index_record(did, baseid, rev, form, size, created_date, updated_date, content_created_date, content_updated_date, description) VALUES (?,?,?,?,?,?,?,?,?,?) - """, - ( + "INSERT INTO index_record(did, baseid, rev, form, size, created_date, updated_date, content_created_date, content_updated_date, description) VALUES ('{}','{}','{}','{}','{}','{}','{}','{}','{}','{}')".format( did, baseid, rev, @@ -464,11 +454,9 @@ def test_driver_get_record(): content_created_date, content_updated_date, description, - ), + ) ) - conn.commit() - record = driver.get(did) assert record["did"] == did, "record id does not match" @@ -484,48 +472,51 @@ def test_driver_get_record(): ), "updated date does not match" -@util.removes("index.sq3") -def test_driver_get_fails_with_no_records(): +def test_driver_get_fails_with_no_records(combined_default_and_single_table_settings): """ Tests retrieval of a record fails if there are no records. """ - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) with pytest.raises(NoRecordFound): driver.get("some_record_that_does_not_exist") -@util.removes("index.sq3") -def test_driver_nonstrict_get_without_prefix(): +def test_driver_nonstrict_get_without_prefix( + combined_default_and_single_table_settings, +): """ Tests retrieval of a record when a default prefix is set, but no prefix is supplied by the request. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver( - "sqlite:///index.sq3", - index_config={ - "DEFAULT_PREFIX": "testprefix/", - "PREPEND_PREFIX": True, - "ADD_PREFIX_ALIAS": False, - }, - ) + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver( + POSTGRES_CONNECTION, + index_config={ + "DEFAULT_PREFIX": "testprefix/", + "PREPEND_PREFIX": True, + "ADD_PREFIX_ALIAS": False, + }, + ) + + with engine.connect() as conn: did = str(uuid.uuid4()) baseid = str(uuid.uuid4()) rev = str(uuid.uuid4())[:8] size = 512 form = "object" - baseid = str(uuid.uuid4()) created_date = datetime.now() updated_date = datetime.now() content_created_date = datetime.now() content_updated_date = datetime.now() description = "a description" + + conn.execute("INSERT INTO base_version(baseid) VALUES ('{}')".format(baseid)) + conn.execute( """ - INSERT INTO index_record(did, baseid, rev, form, size, created_date, updated_date, content_created_date, content_updated_date, description) VALUES (?,?,?,?,?,?,?,?,?,?) - """, - ( + INSERT INTO index_record(did, baseid, rev, form, size, created_date, updated_date, content_created_date, content_updated_date, description) VALUES ('{}','{}','{}','{}','{}','{}','{}','{}','{}','{}') + """.format( "testprefix/" + did, baseid, rev, @@ -539,8 +530,6 @@ def test_driver_nonstrict_get_without_prefix(): ), ) - conn.commit() - record = driver.get_with_nonstrict_prefix(did) assert record["did"] == "testprefix/" + did, "record id does not match" @@ -556,22 +545,22 @@ def test_driver_nonstrict_get_without_prefix(): ), "updated date does not match" -@util.removes("index.sq3") -def test_driver_nonstrict_get_with_prefix(): +def test_driver_nonstrict_get_with_prefix(combined_default_and_single_table_settings): """ Tests retrieval of a record when a default prefix is set and supplied by the request, but records are stored without prefixes. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver( - "sqlite:///index.sq3", - index_config={ - "DEFAULT_PREFIX": "testprefix/", - "PREPEND_PREFIX": False, - "ADD_PREFIX_ALIAS": True, - }, - ) + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver( + POSTGRES_CONNECTION, + index_config={ + "DEFAULT_PREFIX": "testprefix/", + "PREPEND_PREFIX": False, + "ADD_PREFIX_ALIAS": True, + }, + ) + with engine.connect() as conn: did = str(uuid.uuid4()) baseid = str(uuid.uuid4()) rev = str(uuid.uuid4())[:8] @@ -583,11 +572,13 @@ def test_driver_nonstrict_get_with_prefix(): description = "a description" content_created_date = datetime.now() content_updated_date = datetime.now() + + conn.execute("INSERT INTO base_version(baseid) VALUES ('{}')".format(baseid)) + conn.execute( """ - INSERT INTO index_record(did, baseid, rev, form, size, created_date, updated_date, content_created_date, content_updated_date, description) VALUES (?,?,?,?,?,?,?,?,?,?) - """, - ( + INSERT INTO index_record(did, baseid, rev, form, size, created_date, updated_date, content_created_date, content_updated_date, description) VALUES ('{}','{}','{}','{}','{}','{}','{}','{}','{}','{}') + """.format( did, baseid, rev, @@ -601,8 +592,6 @@ def test_driver_nonstrict_get_with_prefix(): ), ) - conn.commit() - record = driver.get_with_nonstrict_prefix("testprefix/" + did) assert record["did"] == did, "record id does not match" @@ -618,21 +607,22 @@ def test_driver_nonstrict_get_with_prefix(): ), "updated date does not match" -@util.removes("index.sq3") -def test_driver_nonstrict_get_with_incorrect_prefix(): +def test_driver_nonstrict_get_with_incorrect_prefix( + combined_default_and_single_table_settings, +): """ Tests retrieval of a record fails if default prefix is set and request uses a different prefix with same uuid """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver( - "sqlite:///index.sq3", - index_config={ - "DEFAULT_PREFIX": "testprefix/", - "PREPEND_PREFIX": True, - "ADD_PREFIX_ALIAS": False, - }, - ) - + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver( + POSTGRES_CONNECTION, + index_config={ + "DEFAULT_PREFIX": "testprefix/", + "PREPEND_PREFIX": True, + "ADD_PREFIX_ALIAS": False, + }, + ) + with engine.connect() as conn: did = str(uuid.uuid4()) baseid = str(uuid.uuid4()) rev = str(uuid.uuid4())[:8] @@ -642,26 +632,28 @@ def test_driver_nonstrict_get_with_incorrect_prefix(): created_date = datetime.now() updated_date = datetime.now() + conn.execute("INSERT INTO base_version(baseid) VALUES ('{}')".format(baseid)) + conn.execute( """ - INSERT INTO index_record(did, baseid, rev, form, size, created_date, updated_date) VALUES (?,?,?,?,?,?,?) - """, - ("testprefix/" + did, baseid, rev, form, size, created_date, updated_date), + INSERT INTO index_record(did, baseid, rev, form, size, created_date, updated_date) VALUES ('{}','{}','{}','{}','{}','{}','{}') + """.format( + "testprefix/" + did, baseid, rev, form, size, created_date, updated_date + ), ) - conn.commit() - with pytest.raises(NoRecordFound): driver.get_with_nonstrict_prefix("wrongprefix/" + did) -@util.removes("index.sq3") -def test_driver_nonstrict_get_with_no_default_prefix(): +def test_driver_nonstrict_get_with_no_default_prefix( + combined_default_and_single_table_settings, +): """ Tests retrieval of a record fails as expected if no default prefix is set """ driver = SQLAlchemyIndexDriver( - "sqlite:///index.sq3", + POSTGRES_CONNECTION, index_config={ "DEFAULT_PREFIX": None, "PREPEND_PREFIX": False, @@ -673,13 +665,14 @@ def test_driver_nonstrict_get_with_no_default_prefix(): driver.get_with_nonstrict_prefix("fake_id_without_prefix") -@util.removes("index.sq3") -def test_driver_get_latest_version(): +def test_driver_get_latest_version(combined_default_and_single_table_settings): """ Tests retrieval of the lattest record version """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + + with engine.connect() as conn: baseid = str(uuid.uuid4()) for _ in range(10): @@ -693,11 +686,15 @@ def test_driver_get_latest_version(): description = "a description" content_created_date = datetime.now() content_updated_date = datetime.now() + + conn.execute( + "INSERT INTO base_version(baseid) VALUES ('{}')".format(baseid) + ) + conn.execute( """ - INSERT INTO index_record(did, baseid, rev, form, size, created_date, updated_date, content_created_date, content_updated_date, description) VALUES (?,?,?,?,?,?,?,?,?,?) - """, - ( + INSERT INTO index_record(did, baseid, rev, form, size, created_date, updated_date, content_created_date, content_updated_date, description) VALUES ('{}','{}','{}','{}','{}','{}','{}','{}','{}','{}') + """.format( did, baseid, rev, @@ -711,8 +708,6 @@ def test_driver_get_latest_version(): ), ) - conn.commit() - record = driver.get_latest_version(did) assert record["did"] == did, "record id does not match" @@ -727,14 +722,16 @@ def test_driver_get_latest_version(): ), "updated date does not match" -@util.removes("index.sq3") -def test_driver_get_latest_version_with_no_record(): +def test_driver_get_latest_version_with_no_record( + combined_default_and_single_table_settings, +): """ Tests retrieval of the lattest record version """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + with engine.connect() as conn: for _ in range(10): did = str(uuid.uuid4()) rev = str(uuid.uuid4())[:8] @@ -744,25 +741,29 @@ def test_driver_get_latest_version_with_no_record(): dt = datetime.now() conn.execute( - """ - INSERT INTO index_record(did, baseid, rev, form, size, created_date, updated_date) VALUES (?,?,?,?,?,?,?) - """, - (did, baseid, rev, form, size, dt, dt), + "INSERT INTO base_version(baseid) VALUES ('{}')".format(baseid) ) - conn.commit() + conn.execute( + """ + INSERT INTO index_record(did, baseid, rev, form, size, created_date, updated_date) VALUES ('{}','{}','{}','{}','{}','{}','{}') + """.format( + did, baseid, rev, form, size, dt, dt + ), + ) with pytest.raises(NoRecordFound): driver.get_latest_version("some base version") -@util.removes("index.sq3") -def test_driver_get_all_versions(): +def test_driver_get_all_versions(combined_default_and_single_table_settings): """ Tests retrieval of the lattest record version """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + + with engine.connect() as conn: baseid = str(uuid.uuid4()) NUMBER_OF_RECORD = 3 @@ -774,6 +775,9 @@ def test_driver_get_all_versions(): content_created_dates = [] content_updated_dates = [] descriptions = [] + + conn.execute("INSERT INTO base_version(baseid) VALUES ('{}')".format(baseid)) + for _ in range(NUMBER_OF_RECORD): did = str(uuid.uuid4()) rev = str(uuid.uuid4())[:8] @@ -790,12 +794,12 @@ def test_driver_get_all_versions(): updated_dates.append(updated_date) content_created_dates.append(content_created_date) descriptions.append(description) + conn.execute( """ INSERT INTO index_record(did, baseid, rev, form, size, created_date, updated_date, content_created_date, content_updated_date, description) \ - VALUES (?,?,?,?,?,?,?,?,?,?) - """, - ( + VALUES ('{}','{}','{}','{}','{}','{}','{}','{}','{}','{}') + """.format( did, baseid, rev, @@ -809,8 +813,6 @@ def test_driver_get_all_versions(): ), ) - conn.commit() - records = driver.get_all_versions(did) assert len(records) == NUMBER_OF_RECORD, "the number of records does not match" @@ -828,15 +830,20 @@ def test_driver_get_all_versions(): ), "updated date does not match" -@util.removes("index.sq3") -def test_driver_get_all_versions_with_no_record(): +def test_driver_get_all_versions_with_no_record( + combined_default_and_single_table_settings, +): """ Tests retrieval of the lattest record version """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + + with engine.connect() as conn: baseid = str(uuid.uuid4()) + conn.execute("INSERT INTO base_version(baseid) VALUES ('{}')".format(baseid)) + for _ in range(3): did = str(uuid.uuid4()) rev = str(uuid.uuid4())[:8] @@ -845,69 +852,72 @@ def test_driver_get_all_versions_with_no_record(): conn.execute( """ - INSERT INTO index_record(did, baseid, rev, form, size) VALUES (?,?,?,?,?) - """, - (did, baseid, rev, form, size), + INSERT INTO index_record(did, baseid, rev, form, size) VALUES ('{}','{}','{}','{}','{}') + """.format( + did, baseid, rev, form, size + ), ) - conn.commit() - with pytest.raises(NoRecordFound): driver.get_all_versions("some baseid") -@util.removes("index.sq3") -def test_driver_get_fails_with_invalid_id(): +def test_driver_get_fails_with_invalid_id(combined_default_and_single_table_settings): """ Tests retrieval of a record fails if the record id is not found. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + with engine.connect() as conn: did = str(uuid.uuid4()) baseid = str(uuid.uuid4()) rev = str(uuid.uuid4())[:8] form = "object" + size = 512 + + conn.execute("INSERT INTO base_version(baseid) VALUES ('{}')".format(baseid)) conn.execute( """ - INSERT INTO index_record(did, baseid, rev, form, size) VALUES (?,?,?,?,?) - """, - (did, baseid, rev, form, None), + INSERT INTO index_record(did, baseid, rev, form, size) VALUES ('{}','{}','{}','{}','{}') + """.format( + did, baseid, rev, form, size + ), ) - conn.commit() - with pytest.raises(NoRecordFound): driver.get("some_record_that_does_not_exist") -def test_driver_update_record(skip_authz): +def test_driver_update_record(skip_authz, combined_default_and_single_table_settings): _test_driver_update_record() -@util.removes("index.sq3") def _test_driver_update_record(): """ Tests updating of a record. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + with engine.connect() as conn: did = str(uuid.uuid4()) baseid = str(uuid.uuid4()) rev = str(uuid.uuid4())[:8] form = "object" + size = 512 + + conn.execute("INSERT INTO base_version(baseid) VALUES ('{}')".format(baseid)) conn.execute( """ - INSERT INTO index_record(did, baseid, rev, form, size) VALUES (?,?,?,?,?) - """, - (did, baseid, rev, form, None), + INSERT INTO index_record(did, baseid, rev, form, size) VALUES ('{}','{}','{}','{}','{}') + """.format( + did, baseid, rev, form, size + ), ) - conn.commit() - # update_size = 256 update_urls = ["a", "b", "c"] # update_hashes = {"a": "1", "b": "2", "c": "3"} @@ -954,12 +964,13 @@ def _test_driver_update_record(): assert version == new_version, "version does not match" -@util.removes("index.sq3") -def test_driver_update_fails_with_no_records(): +def test_driver_update_fails_with_no_records( + combined_default_and_single_table_settings, +): """ Tests updating a record fails if there are no records. """ - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) with pytest.raises(NoRecordFound): driver.update( @@ -967,84 +978,94 @@ def test_driver_update_fails_with_no_records(): ) -@util.removes("index.sq3") -def test_driver_update_fails_with_invalid_id(): +def test_driver_update_fails_with_invalid_id( + combined_default_and_single_table_settings, +): """ Tests updating a record fails if the record id is not found. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + with engine.connect() as conn: did = str(uuid.uuid4()) baseid = str(uuid.uuid4()) rev = str(uuid.uuid4())[:8] form = "object" + size = 512 + + conn.execute("INSERT INTO base_version(baseid) VALUES ('{}')".format(baseid)) conn.execute( """ - INSERT INTO index_record(did, baseid, rev, form, size) VALUES (?,?,?,?,?) - """, - (did, baseid, rev, form, None), + INSERT INTO index_record(did, baseid, rev, form, size) VALUES ('{}','{}','{}','{}','{}') + """.format( + did, baseid, rev, form, size + ), ) - conn.commit() - with pytest.raises(NoRecordFound): driver.update("some_record_that_does_not_exist", "some_record_version", rev) -@util.removes("index.sq3") -def test_driver_update_fails_with_invalid_rev(): +def test_driver_update_fails_with_invalid_rev( + combined_default_and_single_table_settings, +): """ Tests updating a record fails if the record rev is not invalid. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + with engine.connect() as conn: did = str(uuid.uuid4()) baseid = str(uuid.uuid4()) rev = str(uuid.uuid4())[:8] form = "object" + size = 512 + + conn.execute("INSERT INTO base_version(baseid) VALUES ('{}')".format(baseid)) conn.execute( """ - INSERT INTO index_record(did, baseid, rev, form, size) VALUES (?,?,?,?,?) - """, - (did, baseid, rev, form, None), + INSERT INTO index_record(did, baseid, rev, form, size) VALUES ('{}','{}','{}','{}','{}') + """.format( + did, baseid, rev, form, size + ), ) - conn.commit() - with pytest.raises(RevisionMismatch): driver.update(did, baseid, "some_revision") -def test_driver_delete_record(skip_authz): +def test_driver_delete_record(skip_authz, combined_default_and_single_table_settings): _test_driver_delete_record() -@util.removes("index.sq3") def _test_driver_delete_record(): """ Tests deletion of a record. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + with engine.connect() as conn: did = str(uuid.uuid4()) baseid = str(uuid.uuid4()) rev = str(uuid.uuid4())[:8] form = "object" + size = 512 + + conn.execute("INSERT INTO base_version(baseid) VALUES ('{}')".format(baseid)) conn.execute( """ - INSERT INTO index_record(did, baseid, rev, form, size) VALUES (?,?,?,?,?) - """, - (did, baseid, rev, form, None), + INSERT INTO index_record(did, baseid, rev, form, size) VALUES ('{}','{}','{}','{}','{}') + """.format( + did, baseid, rev, form, size + ), ) - conn.commit() - driver.delete(did, rev) count = conn.execute( @@ -1056,92 +1077,101 @@ def _test_driver_delete_record(): assert count == 0, "records remain after deletion" -@util.removes("index.sq3") -def test_driver_delete_fails_with_no_records(): +def test_driver_delete_fails_with_no_records( + combined_default_and_single_table_settings, +): """ Tests deletion of a record fails if there are no records. """ - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) with pytest.raises(NoRecordFound): driver.delete("some_record_that_does_not_exist", "some_revision") -@util.removes("index.sq3") -def test_driver_delete_fails_with_invalid_id(): +def test_driver_delete_fails_with_invalid_id( + combined_default_and_single_table_settings, +): """ Tests deletion of a record fails if the record id is not found. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + with engine.connect() as conn: did = str(uuid.uuid4()) baseid = str(uuid.uuid4()) rev = str(uuid.uuid4())[:8] form = "object" + size = 512 + + conn.execute("INSERT INTO base_version(baseid) VALUES ('{}')".format(baseid)) conn.execute( """ - INSERT INTO index_record(did, baseid, rev, form, size) VALUES (?,?,?,?,?) - """, - (did, baseid, rev, form, None), + INSERT INTO index_record(did, baseid, rev, form, size) VALUES ('{}','{}','{}','{}','{}') + """.format( + did, baseid, rev, form, size + ), ) - conn.commit() - with pytest.raises(NoRecordFound): driver.delete("some_record_that_does_not_exist", rev) -@util.removes("index.sq3") -def test_driver_delete_fails_with_invalid_rev(): +def test_driver_delete_fails_with_invalid_rev( + combined_default_and_single_table_settings, +): """ Tests deletion of a record fails if the record rev is not invalid. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + with engine.connect() as conn: did = str(uuid.uuid4()) baseid = str(uuid.uuid4()) rev = str(uuid.uuid4())[:8] form = "object" + size = 512 + + conn.execute("INSERT INTO base_version(baseid) VALUES ('{}')".format(baseid)) conn.execute( """ - INSERT INTO index_record(did, baseid, rev, form, size) VALUES (?,?,?,?,?) - """, - (did, baseid, rev, form, None), + INSERT INTO index_record(did, baseid, rev, form, size) VALUES ('{}','{}','{}','{}','{}') + """.format( + did, baseid, rev, form, size + ), ) - conn.commit() - with pytest.raises(RevisionMismatch): driver.delete(did, "some_revision") -@util.removes("index.sq3") -def test_driver_get_bundle(): +def test_driver_get_bundle(combined_default_and_single_table_settings): """ Tests retrieval of a record. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + with engine.connect() as conn: bundle_id = str(uuid.uuid4()) checksum = "iuhd91h9ufh928jidsoajh9du328" size = 512 name = "object" created_time = updated_time = datetime.now() - bundle_data = "{'bundle_data': [{'access_methods': [{'access_id': 's3', 'access_url': {'url': 's3://endpointurl/bucket/key'}, 'region': '', 'type': 's3'}], 'aliases': [], 'checksums': [{'checksum': '8b9942cf415384b27cadf1f4d2d682e5', 'type': 'md5'}], 'contents': [], 'created_time': '2020-04-23T21:42:36.506404', 'description': '', 'id': 'testprefix:7e677693-9da3-455a-b51c-03467d5498b0', 'mime_type': 'application/json', 'name': None, 'self_uri': 'drs://fictitious-commons.io/testprefix:7e677693-9da3-455a-b51c-03467d5498b0', 'size': 123, 'updated_time': '2020-04-23T21:42:36.506410', 'version': '3c995667'}], 'bundle_id': '1ff381ef-55c7-42b9-b33f-81ac0689d131', 'checksum': '65b464c1aea98176ef2fa38e8b6b9fc7', 'created_time': '2020-04-23T21:42:36.564808', 'name': 'test_bundle', 'size': 123, 'updated_time': '2020-04-23T21:42:36.564819'}" + bundle_data = '{"bundle_data": [{"access_methods": [{"access_id": "s3", "access_url": {"url": "s3://endpointurl/bucket/key"}, "region": "", "type": "s3"}], "aliases": [], "checksums": [{"checksum": "8b9942cf415384b27cadf1f4d2d682e5", "type": "md5"}], "contents": [], "created_time": "2020-04-23T21:42:36.506404", "description": "", "id": "testprefix:7e677693-9da3-455a-b51c-03467d5498b0", "mime_type": "application/json", "name": None, "self_uri": "drs://fictitious-commons.io/testprefix:7e677693-9da3-455a-b51c-03467d5498b0", "size": 123, "updated_time": "2020-04-23T21:42:36.506410", "version": "3c995667"}], "bundle_id": "1ff381ef-55c7-42b9-b33f-81ac0689d131", "checksum": "65b464c1aea98176ef2fa38e8b6b9fc7", "created_time": "2020-04-23T21:42:36.564808", "name": "test_bundle", "size": 123, "updated_time": "2020-04-23T21:42:36.564819"}' + conn.execute( """ - INSERT INTO drs_bundle_record(bundle_id, name, checksum, size, bundle_data, created_time, updated_time) VALUES (?,?,?,?,?,?,?) - """, - (bundle_id, name, checksum, size, bundle_data, created_time, updated_time), + INSERT INTO drs_bundle_record(bundle_id, name, checksum, size, bundle_data, created_time, updated_time) VALUES ('{}','{}','{}','{}','{}','{}','{}') + """.format( + bundle_id, name, checksum, size, bundle_data, created_time, updated_time + ), ) - conn.commit() - record = driver.get_bundle(bundle_id) assert record["id"] == bundle_id, "record id does not match" From 2dc057eb3f3cf0bbcf9464feaa6b2f086707e3b0 Mon Sep 17 00:00:00 2001 From: BinamB Date: Wed, 24 Jan 2024 13:59:22 -0600 Subject: [PATCH 07/47] Fix tests (partial) --- .secrets.baseline | 22 +- indexd/index/blueprint.py | 65 +- indexd/index/drivers/single_table_alchemy.py | 830 ++++++++++++++++--- tests/conftest.py | 10 +- tests/test_client.py | 20 +- 5 files changed, 768 insertions(+), 179 deletions(-) diff --git a/.secrets.baseline b/.secrets.baseline index b0bc170f..d05c42f2 100644 --- a/.secrets.baseline +++ b/.secrets.baseline @@ -328,70 +328,70 @@ "filename": "tests/test_client.py", "hashed_secret": "15a6d8daad1278efcaadc0d6e3d1dd2d9ebbc262", "is_verified": false, - "line_number": 1127 + "line_number": 1122 }, { "type": "Hex High Entropy String", "filename": "tests/test_client.py", "hashed_secret": "1b0d1a618b5c213dd792bbc3aa96ffa6bc370ef3", "is_verified": false, - "line_number": 1351 + "line_number": 1346 }, { "type": "Hex High Entropy String", "filename": "tests/test_client.py", "hashed_secret": "1170ace44158ff189902ff44597efef121623353", "is_verified": false, - "line_number": 1796 + "line_number": 1802 }, { "type": "Hex High Entropy String", "filename": "tests/test_client.py", "hashed_secret": "ff9c79b737b3ea7386618cc9437d3fb0a772182b", "is_verified": false, - "line_number": 2509 + "line_number": 2515 }, { "type": "Hex High Entropy String", "filename": "tests/test_client.py", "hashed_secret": "c8176f1e75e62e15dabaa4087fb7194451c8f6d2", "is_verified": false, - "line_number": 2512 + "line_number": 2518 }, { "type": "Hex High Entropy String", "filename": "tests/test_client.py", "hashed_secret": "d5198f8eddb1cbeb437899cd99e5ee97ab8531b4", "is_verified": false, - "line_number": 2512 + "line_number": 2518 }, { "type": "Hex High Entropy String", "filename": "tests/test_client.py", "hashed_secret": "02dc196562514eaa3e2feac1f441ccf6ad81e09d", "is_verified": false, - "line_number": 2516 + "line_number": 2522 }, { "type": "Hex High Entropy String", "filename": "tests/test_client.py", "hashed_secret": "f1cb2d91a95165a2ab909eadd9f7b65f312c7e2d", "is_verified": false, - "line_number": 2517 + "line_number": 2523 }, { "type": "Hex High Entropy String", "filename": "tests/test_client.py", "hashed_secret": "58db546de03270b55a4c889a5c5e6296b29fef25", "is_verified": false, - "line_number": 2518 + "line_number": 2524 }, { "type": "Hex High Entropy String", "filename": "tests/test_client.py", "hashed_secret": "b6c0bd08fde409c18760f32bef8705191840c402", "is_verified": false, - "line_number": 2519 + "line_number": 2525 } ], "tests/test_deprecated_aliases_endpoints.py": [ @@ -413,5 +413,5 @@ } ] }, - "generated_at": "2023-11-28T16:14:34Z" + "generated_at": "2024-01-24T19:59:13Z" } diff --git a/indexd/index/blueprint.py b/indexd/index/blueprint.py index ee91f7c7..7c79513f 100644 --- a/indexd/index/blueprint.py +++ b/indexd/index/blueprint.py @@ -152,27 +152,23 @@ def get_index(form=None): negate_params=negate_params, ) else: - try: - records = blueprint.index_driver.ids( - start=start, - limit=limit, - page=page, - size=size, - file_name=file_name, - version=version, - urls=urls, - acl=acl, - authz=authz, - hashes=hashes, - uploader=uploader, - ids=ids, - metadata=metadata, - urls_metadata=urls_metadata, - negate_params=negate_params, - ) - except Exception as e: - print("--------id err--------------------------------") - print(e) + records = blueprint.index_driver.ids( + start=start, + limit=limit, + page=page, + size=size, + file_name=file_name, + version=version, + urls=urls, + acl=acl, + authz=authz, + hashes=hashes, + uploader=uploader, + ids=ids, + metadata=metadata, + urls_metadata=urls_metadata, + negate_params=negate_params, + ) base = { "ids": ids, @@ -190,14 +186,7 @@ def get_index(form=None): "metadata": metadata, "urls_metadata": urls_metadata, } - try: - return flask.jsonify(base), 200 - except Exception as e: - print("------------jsonmiguous- ") - print(records) - for key, value in base.items(): - print(key, value, type(value)) - print(e) + return flask.jsonify(base), 200 @blueprint.route("/urls/", methods=["GET"]) @@ -384,14 +373,9 @@ def get_index_record(record): """ Returns a record. """ + ret = blueprint.index_driver.get_with_nonstrict_prefix(record) - try: - ret = blueprint.index_driver.get_with_nonstrict_prefix(record) - - return flask.jsonify(ret), 200 - except Exception as e: - print("-------------GET record----------------") - print(e) + return flask.jsonify(ret), 200 @blueprint.route("/index/", methods=["POST"]) @@ -500,7 +484,6 @@ def add_index_blank_record_version(record): did, baseid, rev = blueprint.index_driver.add_blank_version( record, new_did=new_did, uploader=uploader, file_name=file_name, authz=authz ) - ret = {"did": did, "baseid": baseid, "rev": rev} return flask.jsonify(ret), 201 @@ -550,12 +533,7 @@ def put_index_record(record): ) # authorize done in update - print("-----------pre driver-----------") - try: - did, baseid, rev = blueprint.index_driver.update(record, rev, json) - except Exception as e: - print(e) - print("-----------post driver-----------") + did, baseid, rev = blueprint.index_driver.update(record, rev, json) ret = {"did": did, "baseid": baseid, "rev": rev} return flask.jsonify(ret), 200 @@ -754,7 +732,6 @@ def post_bundle(): for checksum in flask.request.json.get("checksums") } validate_hashes(**hashes) - # get bundles/records that already exists and add it to bundle_data for bundle in bundles: data = get_index_record(bundle)[0] diff --git a/indexd/index/drivers/single_table_alchemy.py b/indexd/index/drivers/single_table_alchemy.py index 67a3db3f..84399242 100644 --- a/indexd/index/drivers/single_table_alchemy.py +++ b/indexd/index/drivers/single_table_alchemy.py @@ -2,17 +2,39 @@ import uuid from cdislogging import get_logger -from sqlalchemy import Column, String, ForeignKey, BigInteger, DateTime, ARRAY +from sqlalchemy import ( + Column, + String, + ForeignKey, + BigInteger, + DateTime, + ARRAY, + func, + or_, + text, + not_, + and_, + cast, + TEXT, + select, +) from sqlalchemy.dialects.postgresql import JSONB from sqlalchemy.exc import IntegrityError, ProgrammingError from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker +from sqlalchemy.orm.exc import MultipleResultsFound, NoResultFound from contextlib import contextmanager from indexd import auth from indexd.errors import UserError, AuthError from indexd.index.driver import IndexDriverABC -from indexd.index.drivers.alchemy import IndexSchemaVersion +from indexd.index.drivers.alchemy import IndexSchemaVersion, DrsBundleRecord +from indexd.index.errors import ( + MultipleRecordsFound, + NoRecordFound, + RevisionMismatch, + UnhealthyCheck, +) from indexd.utils import migrate_database Base = declarative_base() @@ -51,55 +73,47 @@ def to_document_dict(self): """ Get the full index document """ - try: - # TODO: some of these fields may not need to be a variable and could directly go to the return object -Binam - urls = self.urls - acl = self.acl - authz = self.authz - hashes = self.hashes - record_metadata = self.record_metadata - url_metadata = self.url_metadata - created_date = self.created_date.isoformat() - updated_date = self.updated_date.isoformat() - content_created_date = ( - self.content_created_date.isoformat() - if self.content_created_date is not None - else None - ) - content_updated_date = ( - self.content_updated_date.isoformat() - if self.content_updated_date is not None - else None - ) + # TODO: some of these fields may not need to be a variable and could directly go to the return object -Binam + urls = self.urls + acl = self.acl or [] + authz = self.authz or [] + hashes = self.hashes + record_metadata = self.record_metadata + url_metadata = self.url_metadata + created_date = self.created_date.isoformat() + updated_date = self.updated_date.isoformat() + content_created_date = ( + self.content_created_date.isoformat() + if self.content_created_date is not None + else None + ) + content_updated_date = ( + self.content_updated_date.isoformat() + if self.content_updated_date is not None + else None + ) - print("----------------------def to doc------------------------") - print(url_metadata) - print(self.guid) - - return { - "did": self.guid, - "baseid": self.baseid, - "rev": self.rev, - "size": self.size, - "file_name": self.file_name, - "version": self.version, - "uploader": self.uploader, - "urls": urls, - "url_metadata": url_metadata, - "acl": acl, - "authz": authz, - "hashes": hashes, - "metadata": record_metadata, - "form": self.form, - "created_date": created_date, - "updated_date": updated_date, - "description": self.description, - "content_created_date": content_created_date, - "content_updated_date": content_updated_date, - } - except Exception as e: - print("---------to doc dict---------") - print(e) + return { + "did": self.guid, + "baseid": self.baseid, + "rev": self.rev, + "size": self.size, + "file_name": self.file_name, + "version": self.version, + "uploader": self.uploader, + "urls": urls, + "urls_metadata": url_metadata, + "acl": acl, + "authz": authz, + "hashes": hashes, + "metadata": record_metadata, + "form": self.form, + "created_date": created_date, + "updated_date": updated_date, + "description": self.description, + "content_created_date": content_created_date, + "content_updated_date": content_updated_date, + } class SingleTableSQLAlchemyIndexDriver(IndexDriverABC): @@ -179,17 +193,17 @@ def ids( if urls: for u in urls: - query = query.filter(Record.urls == u).all() + query = query.filter(Record.urls.any(u)) if acl: for u in acl: - query = query.filter(Record.acl == u).all() + query = query.filter(Record.acl.any(u)) elif acl == []: query = query.filter(Record.acl == None) if authz: for u in authz: - query = query.filter(Record.authz == u).all() + query = query.filter(Record.authz.any(u)) elif authz == []: query = query.filter(Record.authz == None) @@ -202,20 +216,12 @@ def ids( query = query.filter(Record.record_metadata[k].astext == v) if urls_metadata: - print("============if urlsmetadata============") - print(urls_metadata) for url_key, url_dict in urls_metadata.items(): - query = query.filter(Record.url_metadata.op("?")(url_key)) for k, v in url_dict.items(): - print("----------kv ----------") - print(k, v) - # query = query.filter( - # Record.url_metadata.any( - # Record.url_metadata.op('->>')(k) == v - # ) - # ) query = query.filter( - Record.url_metadata.op("->>")(k).astext == v + func.jsonb_path_match( + Record.url_metadata, '$.*.{} == "{}"'.format(k, v) + ) ) if negate_params: @@ -252,11 +258,109 @@ def ids( if page is not None: query = query.offset(limit * page) - print("---------ids----------") - print(query.statement) - return [i.to_document_dict() for i in query] + @staticmethod + def _negate_filter( + session, + query, + urls=None, + acl=None, + authz=None, + file_name=None, + version=None, + metadata=None, + urls_metadata=None, + ): + """ + param_values passed in here will be negated + + for string (version, file_name), filter with value != + for list (urls, acl), filter with doc that don't HAS + for dict (metadata, urls_metadata). In each (key,value) pair: + - if value is None or empty: then filter with key doesn't exist + - if value is provided, then filter with value != OR key doesn't exist + + Args: + session: db session + query: sqlalchemy query + urls (list): doc.urls don't have any in the urls list + acl (list): doc.acl don't have any in the acl list + authz (list): doc.authz don't have any in the authz list + file_name (str): doc.file_name != + version (str): doc.version != + metadata (dict): see above for dict + urls_metadata (dict): see above for dict + + Returns: + Database query + """ + if file_name is not None: + query = query.filter(Record.file_name != file_name) + + if version is not None: + query = query.filter(Record.version != version) + + if urls is not None and urls: + for u in urls: + query = query.filter(not_(Record.urls.any(u))) + + if acl is not None and acl: + for u in acl: + query = query.filter( + Record.acl.isnot(None), + func.array_length(Record.acl, 1) > 0, + not_(Record.acl.any(u)), + ) + + if authz is not None and authz: + for u in authz: + query = query.filter( + Record.authz.isnot(None), + func.array_length(Record.authz, 1) > 0, + not_(Record.authz.any(u)), + ) + + if metadata is not None and metadata: + for k, v in metadata.items(): + if not v: + query = query.filter(~text(f"record_metadata ? :key")).params(key=k) + else: + query = query.filter(Record.record_metadata[k].astext != v) + + if urls_metadata is not None and urls_metadata: + for url_key, url_dict in urls_metadata.items(): + if not url_dict: + query = query.filter( + ~text( + f"EXISTS (SELECT 1 FROM UNNEST(urls) AS element WHERE element LIKE '%{url_key}%')" + ) + ) + query = query.filter( + ~text( + f"EXISTS (SELECT 1 FROM jsonb_object_keys(url_metadata) AS key WHERE key LIKE '%{url_key}%')" + ) + ) + else: + for k, v in url_dict.items(): + if not v: + query = session.query(Record).filter( + text( + f"EXISTS (SELECT 1 FROM jsonb_each_text(url_metadata) AS x WHERE x.value LIKE '%{k}%')" + ) + ) + else: + query = query.filter( + text( + "url_metadata IS NOT NULL AND url_metadata != '{}'" + ), + ~func.jsonb_path_match( + Record.url_metadata, '$.*.{} == "{}"'.format(k, v) + ), + ) + + return query + def get_urls(self, size=None, hashes=None, ids=None, start=0, limit=100): """ Returns a list of urls matching supplied size/hashes/guids. @@ -280,11 +384,17 @@ def get_urls(self, size=None, hashes=None, ids=None, start=0, limit=100): # Return only specified window. query = query.offset(start) query = query.limit(limit) + return_urls = [] + for r in query: + for url, values in r.url_metadata.items(): + return_urls.append( + { + "url": url, + "metadata": values, + } + ) - return [ - {"url": r.urls, "metadata": {m.key: m.value for m in r.url_metadata}} - for r in query - ] + return return_urls def add( self, @@ -342,11 +452,11 @@ def add( record.uploader = uploader - record.urls = urls + record.urls = list(set(urls)) - record.acl = acl + record.acl = list(set(acl)) - record.authz = authz + record.authz = list(set(authz)) record.hashes = hashes @@ -380,7 +490,306 @@ def add( return record.guid, record.rev, record.baseid - def get(self, guid): + def add_blank_record(self, uploader, file_name=None, authz=None): + """ + Create a new blank record with only uploader and optionally + file_name and authz fields filled + """ + # if an authz is provided, ensure that user can actually create for that resource + authorized = False + authz_err_msg = "Auth error when attempting to update a blank record. User must have '{}' access on '{}' for service 'indexd'." + if authz: + try: + auth.authorize("create", authz) + authorized = True + except AuthError as err: + self.logger.error( + authz_err_msg.format("create", authz) + + " Falling back to 'file_upload' on '/data_file'." + ) + + if not authorized: + # either no 'authz' was provided, or user doesn't have + # the right CRUD access. Fall back on 'file_upload' logic + try: + auth.authorize("file_upload", ["/data_file"]) + except AuthError as err: + self.logger.error(authz_err_msg.format("file_upload", "/data_file")) + raise + + with self.session as session: + record = Record() + + did = str(uuid.uuid4()) + baseid = str(uuid.uuid4()) + if self.config.get("PREPEND_PREFIX"): + did = self.config["DEFAULT_PREFIX"] + did + + record.guid = did + record.baseid = baseid + + record.rev = str(uuid.uuid4())[:8] + record.baseid = baseid + record.uploader = uploader + record.file_name = file_name + + record.authz = authz + + session.add(record) + session.commit() + + return record.guid, record.rev, record.baseid + + def update_blank_record(self, did, rev, size, hashes, urls, authz=None): + """ + Update a blank record with size, hashes, urls, authz and raise + exception if the record is non-empty or the revision is not matched + """ + hashes = hashes or {} + urls = urls or [] + + with self.session as session: + query = session.query(Record).filter(Record.guid == did) + + try: + record = query.one() + except NoResultFound: + raise NoRecordFound("no record found") + except MultipleResultsFound: + raise MultipleRecordsFound("multiple records found") + + if record.size or record.hashes: + raise UserError("update api is not supported for non-empty record!") + + if rev != record.rev: + raise RevisionMismatch("revision mismatch") + + record.size = size + + record.hashes = hashes + + record.urls = list(set(urls)) + + authorized = False + authz_err_msg = "Auth error when attempting to update a blank record. User must have '{}' access on '{}' for service 'indexd'." + + if authz: + # if an authz is provided, ensure that user can actually + # create/update for that resource (old authz and new authz) + old_authz = [u for u in record.authz] if record.authz else [] + all_authz = old_authz + authz + try: + auth.authorize("update", all_authz) + authorized = True + except AuthError as err: + self.logger.error( + authz_err_msg.format("update", all_authz) + + " Falling back to 'file_uplaod' on '/data_file'." + ) + + record.authz = set(authz) + + if not authorized: + # either no 'authz' was provided, or user doesn't have + # the right CRUD access. Fall back on 'file_upload' logic + try: + auth.authorize("file_upload", ["/data_file"]) + except AuthError as err: + self.logger.error(authz_err_msg.format("file_upload", "/data_file")) + raise + + record.rev = str(uuid.uuid4())[:8] + + record.updated_data = datetime.datetime.utcnow() + + session.add(record) + session.commit() + + return record.guid, record.rev, record.baseid + + def add_prefix_alias(self, record, session): + """ + Create a index alias with the alias as {prefix:did} + """ + prefix = self.config["DEFAULT_PREFIX"] + session.add(Record().alias.append(prefix + record.guid)) + + def get_by_alias(self, alias): + """ + Gets a record given a record alias + """ + with self.session as session: + try: + record = session.query(Record).filter(Record.alias.any(alias)).one() + except NoResultFound: + raise NoRecordFound("no record found") + except MultipleResultsFound: + raise MultipleRecordsFound("multiple records found") + return record.to_document_dict + + def get_aliases_for_did(self, did): + """ + Gets the aliases for a did + """ + with self.session as session: + self.logger.info(f"Trying to get all aliases for did {did}...") + + index_record = get_record_if_exists(did, session) + if index_record is None: + self.logger.warning(f"No record found for did {did}") + raise NoRecordFound(did) + + query = session.query(Record).filter(Record.guid == did) + return [i.alias for i in query] + + def append_aliases_for_did(self, aliases, did): + """ + Append one or more aliases to aliases already associated with one DID / GUID. + """ + with self.session as session: + self.logger.info( + f"Trying to append new aliases {aliases} to aliases for did {did}..." + ) + + index_record = get_record_if_exists(did, session) + if index_record is None: + self.logger.warning(f"No record found for did {did}") + raise NoRecordFound(did) + + # authorization + try: + resources = [u.resource for u in index_record.authz] + auth.authorize("update", resources) + except AuthError as err: + self.logger.warning( + f"Auth error while appending aliases to did {did}: User not authorized to update one or more of these resources: {resources}" + ) + raise err + + # add new aliases + query = session.query(Record).filter(Record.guid == did) + record = query.one() + + try: + record.alias = record.alias + aliases + session.commit() + except IntegrityError as err: + # One or more aliases in request were non-unique + self.logger.warning( + f"One or more aliases in request already associated with this or another GUID: {aliases}", + exc_info=True, + ) + raise MultipleRecordsFound( + f"One or more aliases in request already associated with this or another GUID: {aliases}" + ) + + def replace_aliases_for_did(self, aliases, did): + """ + Replace all aliases for one DID / GUID with new aliases. + """ + with self.session as session: + self.logger.info( + f"Trying to replace aliases for did {did} with new aliases {aliases}..." + ) + + index_record = get_record_if_exists(did, session) + if index_record is None: + self.logger.warning(f"No record found for did {did}") + raise NoRecordFound(did) + + # authorization + try: + resources = [u.resource for u in index_record.authz] + auth.authorize("update", resources) + except AuthError as err: + self.logger.warning( + f"Auth error while replacing aliases for did {did}: User not authorized to update one or more of these resources: {resources}" + ) + raise err + + try: + query = session.query(Record).filter(Record.guid == did) + record = query.one() + # delete this GUID's aliases and add new aliases + record.alias = aliases + session.commit() + self.logger.info( + f"Replaced aliases for did {did} with new aliases {aliases}" + ) + except IntegrityError: + # One or more aliases in request were non-unique + self.logger.warning( + f"One or more aliases in request already associated with another GUID: {aliases}" + ) + raise MultipleRecordsFound( + f"One or more aliases in request already associated with another GUID: {aliases}" + ) + + def delete_all_aliases_for_did(self, did): + """ + Delete all of this DID / GUID's aliases. + """ + with self.session as session: + self.logger.info(f"Trying to delete all aliases for did {did}...") + + index_record = get_record_if_exists(did, session) + if index_record is None: + self.logger.warning(f"No record found for did {did}") + raise NoRecordFound(did) + + # authorization + try: + resources = [u.resource for u in index_record.authz] + auth.authorize("delete", resources) + except AuthError as err: + self.logger.warning( + f"Auth error while deleting all aliases for did {did}: User not authorized to delete one or more of these resources: {resources}" + ) + raise err + + query = session.query(Record).filter(Record.guid == did) + record = query.one() + # delete this GUID's aliases and add new aliases + record.alias = [] + session.commit() + + self.logger.info(f"Deleted all aliases for did {did}.") + + def delete_one_alias_for_did(self, alias, did): + """ + Delete one of this DID / GUID's aliases. + """ + with self.session as session: + self.logger.info(f"Trying to delete alias {alias} for did {did}...") + + index_record = get_record_if_exists(did, session) + if index_record is None: + self.logger.warning(f"No record found for did {did}") + raise NoRecordFound(did) + + # authorization + try: + resources = [u.resource for u in index_record.authz] + auth.authorize("delete", resources) + except AuthError as err: + self.logger.warning( + f"Auth error deleting alias {alias} for did {did}: User not authorized to delete one or more of these resources: {resources}" + ) + raise err + + query = session.query(Record).filter(Record.guid == did) + record = query.one() + # delete just this alias + if alias in record.alias: + record.alias.remove(alias) + session.commit() + else: + self.logger.warning(f"No alias {alias} found for did {did}") + raise NoRecordFound(alias) + + self.logger.info(f"Deleted alias {alias} for did {did}.") + + def get(self, guid, expand=True): """ Gets a record given the record id or baseid. If the given id is a baseid, it will return the latest version @@ -423,7 +832,7 @@ def get_with_nonstrict_prefix(self, guid, expand=True): return record - def update(self, guid, rev, changing_fields): + def update(self, did, rev, changing_fields): """ Updates an existing record with new values. """ @@ -440,7 +849,7 @@ def update(self, guid, rev, changing_fields): ] with self.session as session: - query = session.query(Record).filter(Record.guid == guid) + query = session.query(Record).filter(Record.guid == did) try: record = query.one() @@ -456,23 +865,16 @@ def update(self, guid, rev, changing_fields): # urls has to be updated before url_metadata because of schema # constraints. if "urls" in changing_fields: - session.delete(record.urls) - - record.urls = Record(guid=record.guid, urls=changing_fields["urls"]) + record.urls = list(set(changing_fields["urls"])) if "acl" in changing_fields: - session.delete(record.acl) - - record.acl = Record(guid=record.guid, acl=changing_fields["acl"]) + record.acl = list(set(changing_fields["acl"])) - all_authz = list(set(record.authz)) + all_authz = list(set(record.authz)) if record.authz else [] if "authz" in changing_fields: new_authz = list(set(changing_fields["authz"])) all_authz += new_authz - - session.delete(record.authz) - - record.authz = Record(guid=record.guid, authz=new_authz) + record.authz = new_authz # authorization check: `update` access on old AND new resources try: @@ -482,15 +884,11 @@ def update(self, guid, rev, changing_fields): raise if "metadata" in changing_fields: - session.delete(record.record_metadata) - - record.record_metadata = changing_fields["record_metadata"].items() - - if "url_metadata" in changing_fields: - session.delete(record.url_metadata) + record.record_metadata = changing_fields["metadata"] + if "urls_metadata" in changing_fields: checked_url_metadata = check_url_metadata( - changing_fields["url_metadata"], record + changing_fields["urls_metadata"], record ) record.url_metadata = checked_url_metadata @@ -554,11 +952,11 @@ def add_version( self, current_guid, form, - new_guid=None, + new_did=None, size=None, file_name=None, metadata=None, - url_metadata=None, + urls_metadata=None, version=None, urls=None, acl=None, @@ -576,7 +974,7 @@ def add_version( authz = authz or [] hashes = hashes or {} metadata = metadata or {} - url_metadata = url_metadata or {} + urls_metadata = urls_metadata or {} with self.session as session: query = session.query(Record).filter_by(guid=current_guid) @@ -588,11 +986,11 @@ def add_version( except MultipleResultsFound: raise MultipleRecordsFound("multiple records found") - auth.authorize("update", [u.resource for u in record.authz] + authz) + auth.authorize("update", [u for u in record.authz] + authz) baseid = record.baseid record = Record() - guid = new_guid + guid = new_did if not guid: guid = str(uuid.uuid4()) if self.config.get("PREPEND_PREFIX"): @@ -613,7 +1011,7 @@ def add_version( record.authz = authz record.hashes = hashes record.record_metadata = metadata - record.url_metadata = check_url_metadata(url_metadata, record) + record.url_metadata = check_url_metadata(urls_metadata, record) try: session.add(record) @@ -624,7 +1022,7 @@ def add_version( return record.guid, record.baseid, record.rev def add_blank_version( - self, current_guid, new_guid=None, file_name=None, uploader=None, authz=None + self, current_guid, new_did=None, file_name=None, uploader=None, authz=None ): """ Add a blank record version given did. @@ -656,17 +1054,15 @@ def add_blank_version( self.logger.error(authz_err_msg.format("update", old_authz)) raise - # handle the edgecase where new_guid matches the original doc's guid to + # handle the edgecase where new_did matches the original doc's guid to # prevent sqlalchemy FlushError - if new_guid == old_record.guid: - raise MultipleRecordsFound( - "{guid} already exists".format(guid=new_guid) - ) + if new_did == old_record.guid: + raise MultipleRecordsFound("{guid} already exists".format(guid=new_did)) new_record = Record() - guid = new_guid + guid = new_did if not guid: - guid = str(uuid.uuid()) + guid = str(uuid.uuid4()) if self.config.get("PEPREND_PREFIX"): guid = self.config["DEFAULT_PREFIX"] + guid @@ -704,7 +1100,7 @@ def get_all_versions(self, guid): record = query.one() baseid = record.baseid except NoResultFound: - record = session.query(Record).filter_by(base_id=did).first() + record = session.query(Record).filter_by(baseid=guid).first() if not record: raise NoRecordFound("no record found") else: @@ -712,6 +1108,7 @@ def get_all_versions(self, guid): except MultipleResultsFound: raise MultipleRecordsFound("multiple records found") + # Find all versions of this record query = session.query(Record) records = ( query.filter(Record.baseid == baseid) @@ -724,6 +1121,52 @@ def get_all_versions(self, guid): return ret + def update_all_versions(self, guid, acl=None, authz=None): + """ + Update all record versions with new acl and authz + """ + with self.session as session: + query = session.query(Record) + query = query.filter(Record.guid == guid) + + try: + record = query.one() + baseid = record.baseid + except NoResultFound: + record = session.query(Record).filter_by(baseid=guid).first() + if not record: + raise NoRecordFound("no record found") + else: + baseid = record.baseid + except MultipleResultsFound: + raise MultipleRecordsFound("multiple records found") + + # Find all versions of this record + query = session.query(Record) + records = ( + query.filter(Record.baseid == baseid) + .order_by(Record.created_date.asc()) + .all() + ) + + # User requires update permissions for all versions of the record + all_resources = [] + all_resources.append([rec.authz] for rec in records) + auth.authorize("update", list(all_resources)) + + ret = [] + # Update fields for all versions + for record in records: + record.acl = set(acl) if acl else None + record.authz = set(authz) if authz else None + + record.rev = str(uuid.uuid4())[:8] + ret.append( + {"did": record.guid, "baseid": record.baseid, "rev": record.rev} + ) + session.commit() + return ret + def get_latest_version(self, guid, has_version=None): """ Get the lattest record version given did @@ -746,7 +1189,7 @@ def get_latest_version(self, guid, has_version=None): ) if has_version: - query = query.filter(record.version.isnot(None)) + query = query.filter(Record.version.isnot(None)) record = query.first() if not record: raise NoRecordFound("no record found") @@ -801,6 +1244,163 @@ def len(self): with self.session as session: return session.execute(select([func.count()]).select_from(Record)).scalar() + def add_bundle( + self, + bundle_id=None, + name=None, + checksum=None, + size=None, + bundle_data=None, + description=None, + version=None, + aliases=None, + ): + """ + Add a bundle record + """ + with self.session as session: + record = DrsBundleRecord() + if not bundle_id: + bundle_id = str(uuid.uuid4()) + if self.config.get("PREPEND_PREFIX"): + bundle_id = self.config["DEFAULT_PREFIX"] + bundle_id + if not name: + name = bundle_id + + record.bundle_id = bundle_id + + record.name = name + + record.checksum = checksum + + record.size = size + + record.bundle_data = bundle_data + + record.description = description + + record.version = version + + record.aliases = aliases + + try: + session.add(record) + session.commit() + except IntegrityError: + raise MultipleRecordsFound( + 'bundle id "{bundle_id}" already exists'.format( + bundle_id=record.bundle_id + ) + ) + + return record.bundle_id, record.name, record.bundle_data + + def get_bundle_list(self, start=None, limit=100, page=None): + """ + Returns list of all bundles + """ + with self.session as session: + query = session.query(DrsBundleRecord) + query = query.limit(limit) + + if start is not None: + query = query.filter(DrsBundleRecord.bundle_id > start) + + if page is not None: + query = query.offset(limit * page) + + return [i.to_document_dict() for i in query] + + def get_bundle(self, bundle_id, expand=False): + """ + Gets a bundle record given the bundle_id. + """ + with self.session as session: + query = session.query(DrsBundleRecord) + + query = query.filter(or_(DrsBundleRecord.bundle_id == bundle_id)).order_by( + DrsBundleRecord.created_time.desc() + ) + + record = query.first() + if record is None: + raise NoRecordFound("No bundle found") + + doc = record.to_document_dict(expand) + + return doc + + def get_bundle_and_object_list( + self, + limit=100, + page=None, + start=None, + size=None, + urls=None, + acl=None, + authz=None, + hashes=None, + file_name=None, + version=None, + uploader=None, + metadata=None, + ids=None, + urls_metadata=None, + negate_params=None, + ): + """ + Gets bundles and objects and orders them by created time. + """ + limit = int((limit / 2) + 1) + bundle = self.get_bundle_list(start=start, limit=limit, page=page) + objects = self.ids( + limit=limit, + page=page, + start=start, + size=size, + urls=urls, + acl=acl, + authz=authz, + hashes=hashes, + file_name=file_name, + version=version, + uploader=uploader, + metadata=metadata, + ids=ids, + urls_metadata=urls_metadata, + negate_params=negate_params, + ) + + ret = [] + i = 0 + j = 0 + + while i + j < len(bundle) + len(objects): + if i != len(bundle) and ( + j == len(objects) + or bundle[i]["created_time"] > objects[j]["created_date"] + ): + ret.append(bundle[i]) + i += 1 + else: + ret.append(objects[j]) + j += 1 + return ret + + def delete_bundle(self, bundle_id): + with self.session as session: + query = session.query(DrsBundleRecord) + query = query.filter(DrsBundleRecord.bundle_id == bundle_id) + + try: + record = query.one() + except NoResultFound: + raise NoRecordFound("No bundle found") + except MultipleResultsFound: + raise MultipleRecordsFound("Multiple bundles found") + + session.delete(record) + def check_url_metadata(url_metadata, record): """ @@ -813,5 +1413,13 @@ def check_url_metadata(url_metadata, record): return url_metadata +def get_record_if_exists(did, session): + """ + Searches for a record with this did and returns it. + If no record found, returns None. + """ + return session.query(Record).filter(Record.guid == did).first() + + SCHEMA_MIGRATION_FUNCTIONS = [] CURRENT_SCHEMA_VERSION = len(SCHEMA_MIGRATION_FUNCTIONS) diff --git a/tests/conftest.py b/tests/conftest.py index 01e16cd8..f04b72d0 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -115,12 +115,6 @@ def app(): @pytest.fixture def user(app): - # app.auth.add("test", "test") - # yield { - # "Authorization": ("Basic " + base64.b64encode(b"test:test").decode("ascii")), - # "Content-Type": "application/json", - # } - # app.auth.delete("test") engine = create_engine(POSTGRES_CONNECTION) driver = SQLAlchemyAuthDriver(POSTGRES_CONNECTION) @@ -184,8 +178,12 @@ def _use_mock_authz(allowed_permissions=None): assert isinstance(allowed_permissions, list) def mock_authz(method, resources): + print("=======mock authz==================") + print(resources) + print(allowed_permissions) for resource in resources: if (method, resource) not in allowed_permissions: + print("-------------method loop failed-------------") raise AuthError( "Mock indexd.auth.authz: ({},{}) is not one of the allowed permissions: {}".format( method, resource, allowed_permissions diff --git a/tests/test_client.py b/tests/test_client.py index 062d830c..e2092db8 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -113,12 +113,9 @@ def test_index_list_with_params( param = {"bucket": {"state": "error", "other": "xxx"}} - print("====================urls metadata test=====================") data_by_url_md = client.get("/index/?urls_metadata=" + json.dumps(param)) assert data_by_url_md.status_code == 200 data_list = data_by_url_md.json - print("-------data by url--------------") - print(data_list) assert len(data_list["records"]) == 1 assert data_list["records"][0]["did"] == rec_1["did"] assert data_list["records"][0]["urls_metadata"] == data1["urls_metadata"] @@ -812,9 +809,7 @@ def test_create_blank_version(client, user, combined_default_and_single_table_se ) original_doc_guid = res.json["did"] - def assert_acl_authz_and_baseid( - acl, authz, baseid, guid, combined_default_and_single_table_settings - ): + def assert_acl_authz_and_baseid(acl, authz, baseid, guid): """ Helper to GET record with specified guid and assert acl, authz, and baseid. @@ -1417,7 +1412,7 @@ def test_cant_update_inexistent_blank_record( assert res.status_code == 404 -def test_update_urls_metadata(client, user): +def test_update_urls_metadata(client, user, combined_default_and_single_table_settings): data = get_doc(has_urls_metadata=True) res = client.post("/index/", json=data, headers=user) assert res.status_code == 200 @@ -1515,6 +1510,17 @@ def test_urls_metadata_partial_match( rec = res.json ids = {r["did"] for r in rec["records"]} + + print("-----------------the test-------------------") + print(ids) + print({url_doc_mapping[url]["did"] for url in expected}) + print("---params---") + print(params) + r = client.get("/index/") + print("----get all-----") + print(r.json["records"]) + print(len(r.json["records"])) + assert ids == {url_doc_mapping[url]["did"] for url in expected} From f558dde4667e920375742d25769e6d2b83525c3f Mon Sep 17 00:00:00 2001 From: BinamB Date: Mon, 5 Feb 2024 12:09:45 -0600 Subject: [PATCH 08/47] Fix unit tests --- .secrets.baseline | 10 +- indexd/alias/blueprint.py | 10 +- indexd/index/blueprint.py | 38 ++--- indexd/index/drivers/single_table_alchemy.py | 63 +++++++- tests/conftest.py | 13 +- tests/test_bundles.py | 98 +++++++++---- tests/test_drs.py | 50 ++++--- tests/test_setup.py | 142 ------------------- tests/test_urls_endpoints.py | 6 +- 9 files changed, 203 insertions(+), 227 deletions(-) delete mode 100644 tests/test_setup.py diff --git a/.secrets.baseline b/.secrets.baseline index d05c42f2..e74a1b84 100644 --- a/.secrets.baseline +++ b/.secrets.baseline @@ -291,28 +291,28 @@ "filename": "tests/test_bundles.py", "hashed_secret": "fd66f51cba49640055a05a6173764b5f0241c63e", "is_verified": false, - "line_number": 137 + "line_number": 143 }, { "type": "Hex High Entropy String", "filename": "tests/test_bundles.py", "hashed_secret": "168762db39e35d49d630689f2ff453b5813a9255", "is_verified": false, - "line_number": 152 + "line_number": 160 }, { "type": "Hex High Entropy String", "filename": "tests/test_bundles.py", "hashed_secret": "c5f0378cf93d896ecc394150943f13afa16ba766", "is_verified": false, - "line_number": 174 + "line_number": 184 }, { "type": "Hex High Entropy String", "filename": "tests/test_bundles.py", "hashed_secret": "a2ca8b84f631b40d866b8e376d077da3527b1fe4", "is_verified": false, - "line_number": 177 + "line_number": 187 } ], "tests/test_client.py": [ @@ -413,5 +413,5 @@ } ] }, - "generated_at": "2024-01-24T19:59:13Z" + "generated_at": "2024-02-05T18:09:32Z" } diff --git a/indexd/alias/blueprint.py b/indexd/alias/blueprint.py index 12f3d74d..2b2881ff 100644 --- a/indexd/alias/blueprint.py +++ b/indexd/alias/blueprint.py @@ -73,9 +73,13 @@ def get_alias(): if limit < 0 or limit > 1024: raise UserError("limit must be between 0 and 1024") - aliases = blueprint.alias_driver.aliases( - start=start, limit=limit, size=size, hashes=hashes - ) + try: + aliases = blueprint.alias_driver.aliases( + start=start, limit=limit, size=size, hashes=hashes + ) + except Exception as e: + print("-------------------") + print(e) base = { "aliases": aliases, diff --git a/indexd/index/blueprint.py b/indexd/index/blueprint.py index 7c79513f..8a793bb1 100644 --- a/indexd/index/blueprint.py +++ b/indexd/index/blueprint.py @@ -152,23 +152,27 @@ def get_index(form=None): negate_params=negate_params, ) else: - records = blueprint.index_driver.ids( - start=start, - limit=limit, - page=page, - size=size, - file_name=file_name, - version=version, - urls=urls, - acl=acl, - authz=authz, - hashes=hashes, - uploader=uploader, - ids=ids, - metadata=metadata, - urls_metadata=urls_metadata, - negate_params=negate_params, - ) + try: + records = blueprint.index_driver.ids( + start=start, + limit=limit, + page=page, + size=size, + file_name=file_name, + version=version, + urls=urls, + acl=acl, + authz=authz, + hashes=hashes, + uploader=uploader, + ids=ids, + metadata=metadata, + urls_metadata=urls_metadata, + negate_params=negate_params, + ) + except Exception as e: + print("---------------------") + print(e) base = { "ids": ids, diff --git a/indexd/index/drivers/single_table_alchemy.py b/indexd/index/drivers/single_table_alchemy.py index 84399242..68e92253 100644 --- a/indexd/index/drivers/single_table_alchemy.py +++ b/indexd/index/drivers/single_table_alchemy.py @@ -217,11 +217,14 @@ def ids( if urls_metadata: for url_key, url_dict in urls_metadata.items(): + matches = "" for k, v in url_dict.items(): + matches += '@.{} == "{}" && '.format(k, v) + if matches: + matches = matches.rstrip("&& ") + match_string = "$.* ? ({})".format(matches) query = query.filter( - func.jsonb_path_match( - Record.url_metadata, '$.*.{} == "{}"'.format(k, v) - ) + func.jsonb_path_exists(Record.url_metadata, match_string) ) if negate_params: @@ -918,7 +921,7 @@ def update(self, did, rev, changing_fields): # ie file_name, version, etc setattr(record, key, value) - record.rev = str(uuid.uuid4())[:8] + record.rev = str(uuid.uuid4())[:8] record.updated_date = datetime.datetime.utcnow() @@ -1151,7 +1154,8 @@ def update_all_versions(self, guid, acl=None, authz=None): # User requires update permissions for all versions of the record all_resources = [] - all_resources.append([rec.authz] for rec in records) + for rec in records: + all_resources += rec.authz auth.authorize("update", list(all_resources)) ret = [] @@ -1401,6 +1405,55 @@ def delete_bundle(self, bundle_id): session.delete(record) + def query_urls( + self, + exclude=None, + include=None, + versioned=None, + offset=0, + limit=1000, + fields="did,urls", + **kwargs, + ): + if kwargs: + raise UserError( + "Unexpected query parameter(s) {}".format(list(kwargs.keys())) + ) + + versioned = ( + versioned.lower() in ["true", "t", "yes", "y"] if versioned else None + ) + + with self.driver.session as session: + query = session.query(Record.guid, func.string_agg(Record.urls, ",")) + # add version filter if versioned is not None + if versioned is True: # retrieve only those with a version number + query = query.filter(~Record.version.isnot(None)) + elif versioned is False: # retrieve only those without a version number + query = query.filter(~Record.version.isnot(None)) + + query = query.group_by(Record.guid) + + # add url filters + if include and exclude: + query = query.having( + and_( + ~func.string_agg(Record.urls, ",").contains(exclude), + func.string_agg(Record.urls, ",").contains(include), + ) + ) + elif include: + query = query.having(func.string_agg(Record.url, ",").contains(include)) + elif exclude: + query = query.having( + ~func.string_agg(Record.url, ",").contains(exclude) + ) + print(query) + record_list = ( + query.order_by(Record.guid.asc()).offset(offset).limit(limit).all() + ) + return self._format_response(fields, record_list) + def check_url_metadata(url_metadata, record): """ diff --git a/tests/conftest.py b/tests/conftest.py index f04b72d0..a78fafc6 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -38,6 +38,8 @@ def clear_database(): "index_record_alias", "index_record_metadata", "alias_record_hash", + "alias_record_host_authority", + "alias_record", "index_record", "drs_bundle_record", "base_version", @@ -121,7 +123,7 @@ def user(app): try: driver.add("test", "test") except Exception as e: - print(e) + pass yield { "Authorization": ("Basic " + base64.b64encode(b"test:test").decode("ascii")), @@ -129,11 +131,10 @@ def user(app): } try: - driver.add("test", "test") driver.delete("test") except Exception as e: - print("------------user test error --------------------") - print(e) + pass + engine.dispose() @@ -178,12 +179,8 @@ def _use_mock_authz(allowed_permissions=None): assert isinstance(allowed_permissions, list) def mock_authz(method, resources): - print("=======mock authz==================") - print(resources) - print(allowed_permissions) for resource in resources: if (method, resource) not in allowed_permissions: - print("-------------method loop failed-------------") raise AuthError( "Mock indexd.auth.authz: ({},{}) is not one of the allowed permissions: {}".format( method, resource, allowed_permissions diff --git a/tests/test_bundles.py b/tests/test_bundles.py index 84ef3f72..57d25738 100644 --- a/tests/test_bundles.py +++ b/tests/test_bundles.py @@ -47,7 +47,7 @@ def create_index(client, user, add_bundle=False): return did_list, rec1 -def test_bundle_post(client, user): +def test_bundle_post(client, user, combined_default_and_single_table_settings): """ Bundle 1 +-object1 @@ -59,7 +59,9 @@ def test_bundle_post(client, user): assert res2.status_code == 200 -def test_bundle_get_post_with_optional_fields(client, user): +def test_bundle_get_post_with_optional_fields( + client, user, combined_default_and_single_table_settings +): """ Bundle 1 +-object1 @@ -109,7 +111,9 @@ def test_bundle_get_post_with_optional_fields(client, user): assert "aliases" not in content -def test_bundle_post_self_reference(client, user): +def test_bundle_post_self_reference( + client, user, combined_default_and_single_table_settings +): """ Make sure this doesnt exist Bundle 1 @@ -127,7 +131,9 @@ def test_bundle_post_self_reference(client, user): assert res2.status_code == 400 -def test_bundle_post_defined_size_checksum(client, user): +def test_bundle_post_defined_size_checksum( + client, user, combined_default_and_single_table_settings +): did_list, _ = create_index(client, user) bundle_id = str(uuid.uuid4) data = { @@ -141,7 +147,9 @@ def test_bundle_post_defined_size_checksum(client, user): assert res2.status_code == 200 -def test_bundle_post_different_checksum_types(client, user): +def test_bundle_post_different_checksum_types( + client, user, combined_default_and_single_table_settings +): did_list, _ = create_index(client, user) bundle_id = str(uuid.uuid4) data = { @@ -162,7 +170,9 @@ def test_bundle_post_different_checksum_types(client, user): } -def test_bundle_post_multiple_checksum_types(client, user): +def test_bundle_post_multiple_checksum_types( + client, user, combined_default_and_single_table_settings +): did_list, _ = create_index(client, user) bundle_id = str(uuid.uuid4) data = { @@ -193,7 +203,9 @@ def test_bundle_post_multiple_checksum_types(client, user): ] -def test_bundle_post_checksum_with_incorrect_schema(client, user): +def test_bundle_post_checksum_with_incorrect_schema( + client, user, combined_default_and_single_table_settings +): did_list, _ = create_index(client, user) bundle_id = str(uuid.uuid4) @@ -222,7 +234,9 @@ def test_bundle_post_checksum_with_incorrect_schema(client, user): assert res.status_code == 404 -def test_bundle_bundle_data_not_found(client, user): +def test_bundle_bundle_data_not_found( + client, user, combined_default_and_single_table_settings +): bundle_id = str(uuid.uuid4) data = { "name": "test_bundle", @@ -235,7 +249,9 @@ def test_bundle_bundle_data_not_found(client, user): assert res2.status_code == 404 -def test_post_drs_no_duplicate_bundles(client, user): +def test_post_drs_no_duplicate_bundles( + client, user, combined_default_and_single_table_settings +): did_list, _ = create_index(client, user) data = get_bundle_doc(bundles=[did_list[0], did_list[0], did_list[0]]) @@ -243,13 +259,17 @@ def test_post_drs_no_duplicate_bundles(client, user): assert res2.status_code == 400 -def test_bundle_post_invalid_input(client, user): +def test_bundle_post_invalid_input( + client, user, combined_default_and_single_table_settings +): data = {} res2 = client.post("/bundle/", json=data, headers=user) assert res2.status_code == 400 -def test_bundle_post_no_bundle_data(client, user): +def test_bundle_post_no_bundle_data( + client, user, combined_default_and_single_table_settings +): data = { "name": "test_bundle", "bundles": [], @@ -259,7 +279,7 @@ def test_bundle_post_no_bundle_data(client, user): assert res2.json["error"] == "Bundle data required." -def test_bundle_get(client, user): +def test_bundle_get(client, user, combined_default_and_single_table_settings): """ Post with bundle_id and get. Bundle1 @@ -286,7 +306,7 @@ def test_bundle_get(client, user): assert rec2["size"] == 123 -def test_bundle_get_form_type(client, user): +def test_bundle_get_form_type(client, user, combined_default_and_single_table_settings): """ form = object when object form = bundle when bundle @@ -308,7 +328,9 @@ def test_bundle_get_form_type(client, user): assert rec2["form"] == "bundle" -def test_bundle_get_no_bundle_id(client, user): +def test_bundle_get_no_bundle_id( + client, user, combined_default_and_single_table_settings +): did_list, _ = create_index(client, user) bundle_id = str(uuid.uuid4()) data = get_bundle_doc(did_list, bundle_id=bundle_id) @@ -320,7 +342,9 @@ def test_bundle_get_no_bundle_id(client, user): assert res2.status_code == 404 -def test_bundle_get_expand_false(client, user): +def test_bundle_get_expand_false( + client, user, combined_default_and_single_table_settings +): did_list, rec = create_index(client, user) res1 = client.get("/ga4gh/drs/v1/objects/" + rec["did"]) @@ -338,7 +362,9 @@ def test_bundle_get_expand_false(client, user): assert "bundle_data" not in rec2 -def test_redirect_to_bundle_from_index(client, user): +def test_redirect_to_bundle_from_index( + client, user, combined_default_and_single_table_settings +): did_list, _ = create_index(client, user) bundle_id = str(uuid.uuid4()) data = get_bundle_doc(did_list, bundle_id=bundle_id) @@ -353,7 +379,9 @@ def test_redirect_to_bundle_from_index(client, user): assert res3.status_code == 200 -def test_bundle_from_drs_endpoint(client, user): +def test_bundle_from_drs_endpoint( + client, user, combined_default_and_single_table_settings +): did_list, _ = create_index(client, user) bundle_id = str(uuid.uuid4()) data = get_bundle_doc(did_list, bundle_id=bundle_id) @@ -368,7 +396,7 @@ def test_bundle_from_drs_endpoint(client, user): assert res3.status_code == 200 -def test_get_bundle_list(client, user): +def test_get_bundle_list(client, user, combined_default_and_single_table_settings): """ bundle1 +-object1 @@ -411,7 +439,7 @@ def test_get_bundle_list(client, user): assert len(rec5["records"]) == n_records + n_bundles -def test_multiple_bundle_data(client, user): +def test_multiple_bundle_data(client, user, combined_default_and_single_table_settings): """ bundle1 +-object1 @@ -441,7 +469,7 @@ def test_multiple_bundle_data(client, user): assert data["id"] in did_list -def test_bundle_delete(client, user): +def test_bundle_delete(client, user, combined_default_and_single_table_settings): n_records = 6 n_delete = 2 bundle_ids = [] @@ -471,18 +499,24 @@ def test_bundle_delete(client, user): assert len(rec3["records"]) == n_records - n_delete -def test_bundle_delete_invalid_bundle_id(client, user): +def test_bundle_delete_invalid_bundle_id( + client, user, combined_default_and_single_table_settings +): bundle_id = "12938hd981h123hd18hd80h028" res = client.delete("/bundle/" + bundle_id, headers=user) assert res.status_code == 404 -def test_bundle_delete_no_bundle_id(client, user): +def test_bundle_delete_no_bundle_id( + client, user, combined_default_and_single_table_settings +): res = client.delete("/bundle/", headers=user) assert res.status_code == 405 -def test_bundle_data_bundle_and_index(client, user): +def test_bundle_data_bundle_and_index( + client, user, combined_default_and_single_table_settings +): """ bundle_main +-bundle1 @@ -521,7 +555,7 @@ def test_bundle_data_bundle_and_index(client, user): assert rec3["size"] == len(rec3["contents"]) * 123 -def test_nested_bundle_data(client, user): +def test_nested_bundle_data(client, user, combined_default_and_single_table_settings): """ bundle1 +-bundle2 @@ -557,7 +591,9 @@ def test_nested_bundle_data(client, user): rec3 = rec3[key][0] -def test_bundle_no_bundle_name(client, user): +def test_bundle_no_bundle_name( + client, user, combined_default_and_single_table_settings +): did_list, _ = create_index(client, user) bundle_id = str(uuid.uuid4()) @@ -632,7 +668,9 @@ def content_validation(contents): return True -def test_get_drs_expand_contents_default(client, user): +def test_get_drs_expand_contents_default( + client, user, combined_default_and_single_table_settings +): bundle_id = build_bundle(client, user) res = client.get("/bundle/" + bundle_id) assert res.status_code == 200 @@ -645,7 +683,9 @@ def test_get_drs_expand_contents_default(client, user): assert len(contents) == 3 -def test_get_drs_expand_contents_false(client, user): +def test_get_drs_expand_contents_false( + client, user, combined_default_and_single_table_settings +): bundle_id = build_bundle(client, user) res = client.get("/bundle/" + bundle_id) assert res.status_code == 200 @@ -658,7 +698,9 @@ def test_get_drs_expand_contents_false(client, user): assert len(contents) == 0 -def test_get_drs_expand_contents_true(client, user): +def test_get_drs_expand_contents_true( + client, user, combined_default_and_single_table_settings +): bundle_id = build_bundle(client, user) res = client.get("/bundle/" + bundle_id) assert res.status_code == 200 diff --git a/tests/test_drs.py b/tests/test_drs.py index 68a8c015..7d644dfb 100644 --- a/tests/test_drs.py +++ b/tests/test_drs.py @@ -65,7 +65,7 @@ def get_bundle(client, user, has_description=True): return bundle -def test_drs_get(client, user): +def test_drs_get(client, user, combined_default_and_single_table_settings): data = get_doc() res_1 = client.post("/index/", json=data, headers=user) assert res_1.status_code == 200 @@ -84,7 +84,7 @@ def test_drs_get(client, user): assert "contents" not in rec_2 -def test_drs_get_no_default(client, user): +def test_drs_get_no_default(client, user, combined_default_and_single_table_settings): # Change default index driver settings to use no prefix settings["config"]["INDEX"]["driver"].config["DEFAULT_PREFIX"] = None settings["config"]["INDEX"]["driver"].config["ADD_PREFIX_ALIAS"] = False @@ -130,7 +130,7 @@ def verify_timestamps(expected_doc, did, client, has_updated_date=True): assert drs_resp.json["index_updated_time"] == record_resp.json["updated_date"] -def test_timestamps(client, user): +def test_timestamps(client, user, combined_default_and_single_table_settings): data = get_doc() create_obj_resp = client.post("/index/", json=data, headers=user) assert create_obj_resp.status_code == 200 @@ -138,7 +138,7 @@ def test_timestamps(client, user): verify_timestamps(data, obj_did, client) -def test_changing_timestamps(client, user): +def test_changing_timestamps(client, user, combined_default_and_single_table_settings): data = get_doc() create_obj_resp = client.post("/index/", json=data, headers=user) assert create_obj_resp.status_code == 200 @@ -156,7 +156,9 @@ def test_changing_timestamps(client, user): verify_timestamps(update_json, update_obj_did, client) -def test_timestamps_updated_sets_to_created(client, user): +def test_timestamps_updated_sets_to_created( + client, user, combined_default_and_single_table_settings +): """ Checks that content_updated_date is set to content_created_date when none is provided. """ @@ -167,7 +169,7 @@ def test_timestamps_updated_sets_to_created(client, user): verify_timestamps(data, obj_did, client, has_updated_date=False) -def test_timestamps_none(client, user): +def test_timestamps_none(client, user, combined_default_and_single_table_settings): data = get_doc(has_content_updated_date=False, has_content_created_date=False) create_obj_resp = client.post("/index/", json=data, headers=user) assert create_obj_resp.status_code == 200 @@ -184,7 +186,7 @@ def test_timestamps_none(client, user): assert drs_resp.json["index_updated_time"] == record_resp.json["updated_date"] -def test_drs_get_description(client, user): +def test_drs_get_description(client, user, combined_default_and_single_table_settings): data = get_doc(has_description=True) res_1 = client.post("/index/", json=data, headers=user) assert res_1.status_code == 200 @@ -195,7 +197,9 @@ def test_drs_get_description(client, user): assert rec_2["description"] == data["description"] -def test_drs_changing_description(client, user): +def test_drs_changing_description( + client, user, combined_default_and_single_table_settings +): data = get_doc(has_description=True) create_obj_resp = client.post("/index/", json=data, headers=user) assert create_obj_resp.status_code == 200 @@ -214,7 +218,9 @@ def test_drs_changing_description(client, user): assert drs_rec["description"] == update_json["description"] -def test_drs_get_no_description(client, user): +def test_drs_get_no_description( + client, user, combined_default_and_single_table_settings +): data = get_doc(has_description=False) res_1 = client.post("/index/", json=data, headers=user) assert res_1.status_code == 200 @@ -225,7 +231,7 @@ def test_drs_get_no_description(client, user): assert rec_2["description"] is None -def test_drs_get_bundle(client, user): +def test_drs_get_bundle(client, user, combined_default_and_single_table_settings): bundle = get_bundle(client, user) bundle_res = client.post("/bundle/", json=bundle, headers=user) assert bundle_res.status_code == 200 @@ -235,7 +241,9 @@ def test_drs_get_bundle(client, user): assert drs_res.json["description"] == bundle["description"] -def test_drs_get_bundle_no_description(client, user): +def test_drs_get_bundle_no_description( + client, user, combined_default_and_single_table_settings +): bundle = get_bundle(client, user, has_description=False) bundle_res = client.post("/bundle/", json=bundle, headers=user) assert bundle_res.status_code == 200 @@ -245,7 +253,9 @@ def test_drs_get_bundle_no_description(client, user): assert drs_res.json["description"] is "" -def test_drs_multiple_endpointurl(client, user): +def test_drs_multiple_endpointurl( + client, user, combined_default_and_single_table_settings +): object_urls = { "sftp": "sftp://endpointurl/bucket/key", "ftp": "ftp://endpointurl/bucket/key", @@ -267,7 +277,7 @@ def test_drs_multiple_endpointurl(client, user): assert url["access_url"]["url"] == object_urls[protocol] -def test_drs_list(client, user): +def test_drs_list(client, user, combined_default_and_single_table_settings): record_length = 7 data = get_doc() submitted_guids = [] @@ -296,14 +306,18 @@ def test_drs_list(client, user): assert len(rec_4["drs_objects"]) == record_length -def test_get_drs_record_not_found(client, user): +def test_get_drs_record_not_found( + client, user, combined_default_and_single_table_settings +): # test exception raised at nonexistent fake_did = "testprefix:d96bab16-c4e1-44ac-923a-04328b6fe78f" res = client.get("/ga4gh/drs/v1/objects/" + fake_did) assert res.status_code == 404 -def test_get_drs_with_encoded_slash(client, user): +def test_get_drs_with_encoded_slash( + client, user, combined_default_and_single_table_settings +): data = get_doc() data["did"] = "testprefix:ed8f4658-6acd-4f96-9dd8-3709890c959e" res_1 = client.post("/index/", json=data, headers=user) @@ -322,7 +336,7 @@ def test_get_drs_with_encoded_slash(client, user): assert rec_2["self_uri"] == "drs://testprefix:" + rec_1["did"].split(":")[1] -def test_drs_service_info_endpoint(client): +def test_drs_service_info_endpoint(client, combined_default_and_single_table_settings): """ Test drs service endpoint with drs service info friendly distribution information """ @@ -349,7 +363,9 @@ def test_drs_service_info_endpoint(client): assert res.json == expected_info -def test_drs_service_info_no_information_configured(client): +def test_drs_service_info_no_information_configured( + client, combined_default_and_single_table_settings +): """ Test drs service info endpoint when dist is not configured in the indexd config file """ diff --git a/tests/test_setup.py b/tests/test_setup.py deleted file mode 100644 index ca8773b1..00000000 --- a/tests/test_setup.py +++ /dev/null @@ -1,142 +0,0 @@ -import sqlite3 - -import tests.util as util - -from indexd.index.drivers.alchemy import SQLAlchemyIndexDriver -from indexd.alias.drivers.alchemy import SQLAlchemyAliasDriver - - -OLD_SQLITE = sqlite3.sqlite_version_info < (3, 7, 16) - -INDEX_HOST = "index.sq3" -ALIAS_HOST = "alias.sq3" - -INDEX_TABLES = { - "base_version": [(0, "baseid", "VARCHAR", 1, None, 1)], - "index_record": [ - (0, "did", "VARCHAR", 1, None, 1), - (1, "baseid", "VARCHAR", 0, None, 0), - (2, "rev", "VARCHAR", 0, None, 0), - (3, "form", "VARCHAR", 0, None, 0), - (4, "size", "BIGINT", 0, None, 0), - (5, "created_date", "DATETIME", 0, None, 0), - (6, "updated_date", "DATETIME", 0, None, 0), - (7, "file_name", "VARCHAR", 0, None, 0), - (8, "version", "VARCHAR", 0, None, 0), - (9, "uploader", "VARCHAR", 0, None, 0), - (10, "description", "VARCHAR", 0, None, 0), - (11, "content_created_date", "DATETIME", 0, None, 0), - (12, "content_updated_date", "DATETIME", 0, None, 0), - ], - "index_record_hash": [ - (0, "did", "VARCHAR", 1, None, 1), - (1, "hash_type", "VARCHAR", 1, None, 1 if OLD_SQLITE else 2), - (2, "hash_value", "VARCHAR", 0, None, 0), - ], - "index_record_url": [ - (0, "did", "VARCHAR", 1, None, 1), - (1, "url", "VARCHAR", 1, None, 1 if OLD_SQLITE else 2), - ], - "index_schema_version": [(0, "version", "INTEGER", 1, None, 1)], - "drs_bundle_record": [ - (0, "bundle_id", "VARCHAR", 1, None, 1), - (1, "name", "VARCHAR", 0, None, 0), - (2, "created_time", "DATETIME", 0, None, 0), - (3, "updated_time", "DATETIME", 0, None, 0), - (4, "checksum", "VARCHAR", 0, None, 0), - (5, "size", "BIGINT", 0, None, 0), - (6, "bundle_data", "TEXT", 0, None, 0), - (7, "description", "TEXT", 0, None, 0), - (8, "version", "VARCHAR", 0, None, 0), - (9, "aliases", "VARCHAR", 0, None, 0), - ], -} - -ALIAS_TABLES = { - "alias_record": [ - (0, "name", "VARCHAR", 1, None, 1), - (1, "rev", "VARCHAR", 0, None, 0), - (2, "size", "BIGINT", 0, None, 0), - (3, "release", "VARCHAR", 0, None, 0), - (4, "metastring", "VARCHAR", 0, None, 0), - (5, "keeper_authority", "VARCHAR", 0, None, 0), - ], - "alias_record_hash": [ - (0, "name", "VARCHAR", 1, None, 1), - (1, "hash_type", "VARCHAR", 1, None, 1 if OLD_SQLITE else 2), - (2, "hash_value", "VARCHAR", 0, None, 0), - ], - "alias_record_host_authority": [ - (0, "name", "VARCHAR", 1, None, 1), - (1, "host", "VARCHAR", 1, None, 1 if OLD_SQLITE else 2), - ], - "alias_schema_version": [(0, "version", "INTEGER", 1, None, 1)], -} - -INDEX_CONFIG = {"driver": SQLAlchemyIndexDriver("sqlite:///index.sq3")} - -ALIAS_CONFIG = {"driver": SQLAlchemyAliasDriver("sqlite:///alias.sq3")} - - -@util.removes(INDEX_HOST) -def test_sqlite3_index_setup_tables(): - """ - Tests that the SQLite3 index database gets set up correctly. - """ - SQLAlchemyIndexDriver("sqlite:///index.sq3") - - with sqlite3.connect(INDEX_HOST) as conn: - c = conn.execute( - """ - SELECT name FROM sqlite_master WHERE type = 'table' - """ - ) - - tables = [i[0] for i in c] - - for table in INDEX_TABLES: - assert table in tables, "{table} not created".format(table=table) - - for table, schema in list(INDEX_TABLES.items()): - # NOTE PRAGMA's don't work with parameters... - c = conn.execute( - """ - PRAGMA table_info ('{table}') - """.format( - table=table - ) - ) - - assert schema == [i for i in c] - - -@util.removes(ALIAS_HOST) -def test_sqlite3_alias_setup_tables(): - """ - Tests that the SQLite3 alias database gets set up correctly. - """ - SQLAlchemyAliasDriver("sqlite:///alias.sq3") - - with sqlite3.connect(ALIAS_HOST) as conn: - c = conn.execute( - """ - SELECT name FROM sqlite_master WHERE type = 'table' - """ - ) - - tables = [i[0] for i in c] - - for table in ALIAS_TABLES: - assert table in tables, "{table} not created".format(table=table) - - for table, schema in list(ALIAS_TABLES.items()): - # NOTE PRAGMA's don't work with parameters... - c = conn.execute( - """ - PRAGMA table_info ('{table}') - """.format( - table=table - ) - ) - - assert schema == [i for i in c] diff --git a/tests/test_urls_endpoints.py b/tests/test_urls_endpoints.py index 19db0aca..df61789e 100644 --- a/tests/test_urls_endpoints.py +++ b/tests/test_urls_endpoints.py @@ -40,7 +40,7 @@ def test_data(client, user): return url_x_count, versioned_count, unversioned_count -def test_query_urls(client, test_data): +def test_query_urls(client, test_data, combined_default_and_single_table_settings): """ Args: client (test fixture) @@ -91,7 +91,9 @@ def test_query_urls(client, test_data): assert len(urls_list) == versioned_count + unversioned_count - 2 * url_x_count -def test_query_urls_metadata(client, test_data): +def test_query_urls_metadata( + client, test_data, combined_default_and_single_table_settings +): """ Args: client (test fixture) From 55b1f2e41a613c6aad78211dd30931f714caeb74 Mon Sep 17 00:00:00 2001 From: BinamB Date: Tue, 13 Feb 2024 13:03:06 -0600 Subject: [PATCH 09/47] Fix urls endpoints unit tests --- .secrets.baseline | 22 ++--- indexd/alias/blueprint.py | 10 +-- indexd/index/blueprint.py | 38 ++++----- indexd/index/drivers/query/urls.py | 1 - indexd/index/drivers/single_table_alchemy.py | 88 ++++++++++++++++++-- indexd/urls/blueprint.py | 7 +- tests/test_client.py | 10 --- tests/test_urls_endpoints.py | 2 +- 8 files changed, 117 insertions(+), 61 deletions(-) diff --git a/.secrets.baseline b/.secrets.baseline index e74a1b84..f156c256 100644 --- a/.secrets.baseline +++ b/.secrets.baseline @@ -328,70 +328,70 @@ "filename": "tests/test_client.py", "hashed_secret": "15a6d8daad1278efcaadc0d6e3d1dd2d9ebbc262", "is_verified": false, - "line_number": 1122 + "line_number": 1121 }, { "type": "Hex High Entropy String", "filename": "tests/test_client.py", "hashed_secret": "1b0d1a618b5c213dd792bbc3aa96ffa6bc370ef3", "is_verified": false, - "line_number": 1346 + "line_number": 1345 }, { "type": "Hex High Entropy String", "filename": "tests/test_client.py", "hashed_secret": "1170ace44158ff189902ff44597efef121623353", "is_verified": false, - "line_number": 1802 + "line_number": 1792 }, { "type": "Hex High Entropy String", "filename": "tests/test_client.py", "hashed_secret": "ff9c79b737b3ea7386618cc9437d3fb0a772182b", "is_verified": false, - "line_number": 2515 + "line_number": 2505 }, { "type": "Hex High Entropy String", "filename": "tests/test_client.py", "hashed_secret": "c8176f1e75e62e15dabaa4087fb7194451c8f6d2", "is_verified": false, - "line_number": 2518 + "line_number": 2508 }, { "type": "Hex High Entropy String", "filename": "tests/test_client.py", "hashed_secret": "d5198f8eddb1cbeb437899cd99e5ee97ab8531b4", "is_verified": false, - "line_number": 2518 + "line_number": 2508 }, { "type": "Hex High Entropy String", "filename": "tests/test_client.py", "hashed_secret": "02dc196562514eaa3e2feac1f441ccf6ad81e09d", "is_verified": false, - "line_number": 2522 + "line_number": 2512 }, { "type": "Hex High Entropy String", "filename": "tests/test_client.py", "hashed_secret": "f1cb2d91a95165a2ab909eadd9f7b65f312c7e2d", "is_verified": false, - "line_number": 2523 + "line_number": 2513 }, { "type": "Hex High Entropy String", "filename": "tests/test_client.py", "hashed_secret": "58db546de03270b55a4c889a5c5e6296b29fef25", "is_verified": false, - "line_number": 2524 + "line_number": 2514 }, { "type": "Hex High Entropy String", "filename": "tests/test_client.py", "hashed_secret": "b6c0bd08fde409c18760f32bef8705191840c402", "is_verified": false, - "line_number": 2525 + "line_number": 2515 } ], "tests/test_deprecated_aliases_endpoints.py": [ @@ -413,5 +413,5 @@ } ] }, - "generated_at": "2024-02-05T18:09:32Z" + "generated_at": "2024-02-13T19:02:53Z" } diff --git a/indexd/alias/blueprint.py b/indexd/alias/blueprint.py index 2b2881ff..12f3d74d 100644 --- a/indexd/alias/blueprint.py +++ b/indexd/alias/blueprint.py @@ -73,13 +73,9 @@ def get_alias(): if limit < 0 or limit > 1024: raise UserError("limit must be between 0 and 1024") - try: - aliases = blueprint.alias_driver.aliases( - start=start, limit=limit, size=size, hashes=hashes - ) - except Exception as e: - print("-------------------") - print(e) + aliases = blueprint.alias_driver.aliases( + start=start, limit=limit, size=size, hashes=hashes + ) base = { "aliases": aliases, diff --git a/indexd/index/blueprint.py b/indexd/index/blueprint.py index 8a793bb1..7c79513f 100644 --- a/indexd/index/blueprint.py +++ b/indexd/index/blueprint.py @@ -152,27 +152,23 @@ def get_index(form=None): negate_params=negate_params, ) else: - try: - records = blueprint.index_driver.ids( - start=start, - limit=limit, - page=page, - size=size, - file_name=file_name, - version=version, - urls=urls, - acl=acl, - authz=authz, - hashes=hashes, - uploader=uploader, - ids=ids, - metadata=metadata, - urls_metadata=urls_metadata, - negate_params=negate_params, - ) - except Exception as e: - print("---------------------") - print(e) + records = blueprint.index_driver.ids( + start=start, + limit=limit, + page=page, + size=size, + file_name=file_name, + version=version, + urls=urls, + acl=acl, + authz=authz, + hashes=hashes, + uploader=uploader, + ids=ids, + metadata=metadata, + urls_metadata=urls_metadata, + negate_params=negate_params, + ) base = { "ids": ids, diff --git a/indexd/index/drivers/query/urls.py b/indexd/index/drivers/query/urls.py index 18eea35b..fc56f342 100644 --- a/indexd/index/drivers/query/urls.py +++ b/indexd/index/drivers/query/urls.py @@ -80,7 +80,6 @@ def query_urls( query = query.having( ~q_func["string_agg"](IndexRecordUrl.url, ",").contains(exclude) ) - print(query) # [('did', 'urls')] record_list = ( query.order_by(IndexRecordUrl.did.asc()) diff --git a/indexd/index/drivers/single_table_alchemy.py b/indexd/index/drivers/single_table_alchemy.py index 68e92253..07294fce 100644 --- a/indexd/index/drivers/single_table_alchemy.py +++ b/indexd/index/drivers/single_table_alchemy.py @@ -18,7 +18,7 @@ TEXT, select, ) -from sqlalchemy.dialects.postgresql import JSONB +from sqlalchemy.dialects.postgresql import JSONB, ARRAY from sqlalchemy.exc import IntegrityError, ProgrammingError from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker @@ -1424,11 +1424,12 @@ def query_urls( versioned.lower() in ["true", "t", "yes", "y"] if versioned else None ) - with self.driver.session as session: - query = session.query(Record.guid, func.string_agg(Record.urls, ",")) + with self.session as session: + query = session.query(Record.guid, Record.urls) + # add version filter if versioned is not None if versioned is True: # retrieve only those with a version number - query = query.filter(~Record.version.isnot(None)) + query = query.filter(Record.version.isnot(None)) elif versioned is False: # retrieve only those without a version number query = query.filter(~Record.version.isnot(None)) @@ -1438,22 +1439,91 @@ def query_urls( if include and exclude: query = query.having( and_( - ~func.string_agg(Record.urls, ",").contains(exclude), - func.string_agg(Record.urls, ",").contains(include), + ~func.array_to_string(Record.urls, ",").contains(exclude), + func.array_to_string(Record.urls, ",").contains(include), ) ) elif include: - query = query.having(func.string_agg(Record.url, ",").contains(include)) + query = query.having( + func.array_to_string(Record.urls, ",").contains(include) + ) elif exclude: query = query.having( - ~func.string_agg(Record.url, ",").contains(exclude) + ~func.array_to_string(Record.urls, ",").contains(exclude) ) - print(query) record_list = ( query.order_by(Record.guid.asc()).offset(offset).limit(limit).all() ) return self._format_response(fields, record_list) + def query_metadata_by_key( + self, + key, + value, + url=None, + versioned=None, + offset=0, + limit=1000, + fields="did,urls,rev", + **kwargs, + ): + if kwargs: + raise UserError( + "Unexpected query parameter(s) {}".format(list(kwargs.keys())) + ) + + versioned = ( + versioned.lower() in ["true", "t", "yes", "y"] if versioned else None + ) + with self.session as session: + query = session.query(Record.guid, Record.urls, Record.rev) + + query = query.filter( + func.jsonb_path_exists( + Record.url_metadata, f'$.* ? (@.{key} == "{value}")' + ) + ) + + # add version filter if versioned is not None + if versioned is True: # retrieve only those with a version number + query = query.filter(Record.version.isnot(None)) + elif versioned is False: # retrieve only those without a version number + query = query.filter(~Record.version.isnot(None)) + + if url: + query = query.filter( + func.array_to_string(Record.urls, ",").contains(url) + ) + # [('did', 'url', 'rev')] + record_list = ( + query.order_by(Record.guid.asc()).offset(offset).limit(limit).all() + ) + return self._format_response(fields, record_list) + + @staticmethod + def _format_response(requested_fields, record_list): + """loops through the query result and removes undesired columns and converts result of urls string_agg to list + Args: + requested_fields (str): comma separated list of fields to return, if not specified return all fields + record_list (list(tuple]): must be of the form [(did, urls, rev)], rev is not required for urls query + Returns: + list[dict]: list of response dicts + """ + result = [] + provided_fields_dict = {k: 1 for k in requested_fields.split(",")} + for record in record_list: + resp_dict = {} + if provided_fields_dict.get("did"): + resp_dict["did"] = record[0] + if provided_fields_dict.get("urls"): + resp_dict["urls"] = record[1] if record[1] else [] + + # check if record is returned in tuple + if provided_fields_dict.get("rev") and len(record) == 3: + resp_dict["rev"] = record[2] + result.append(resp_dict) + return result + def check_url_metadata(url_metadata, record): """ diff --git a/indexd/urls/blueprint.py b/indexd/urls/blueprint.py index 7d34d75a..03208bc1 100644 --- a/indexd/urls/blueprint.py +++ b/indexd/urls/blueprint.py @@ -5,6 +5,7 @@ from indexd.errors import UserError from indexd.index.drivers.query.urls import AlchemyURLsQueryDriver +from indexd.index.drivers.single_table_alchemy import SingleTableSQLAlchemyIndexDriver blueprint = Blueprint("urls", __name__) @@ -71,7 +72,11 @@ def query_metadata(): def pre_config(state): driver = state.app.config["INDEX"]["driver"] blueprint.logger = state.app.logger - blueprint.driver = AlchemyURLsQueryDriver(driver) + blueprint.driver = ( + driver + if type(driver) == SingleTableSQLAlchemyIndexDriver + else AlchemyURLsQueryDriver(driver) + ) @blueprint.errorhandler(UserError) diff --git a/tests/test_client.py b/tests/test_client.py index e2092db8..dbdf205a 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -539,7 +539,6 @@ def test_negate_filter_file_name( res = client.get("/index/?negate_params=" + json.dumps(negate_param)) assert res.status_code == 200 rec = res.json - print(rec) # assert record returned with proper non-negated file name assert len(rec["records"]) == 1 assert rec["records"][0]["file_name"] == data1["file_name"] @@ -1374,7 +1373,6 @@ def test_get_empty_acl_authz_record_after_fill_size_n_hash( res = client.get("/index/?uploader=uploader_123") assert res.status_code == 200 rec = res.json - print(rec) assert len(rec["records"]) == 3 res = client.get("/index/?uploader=uploader_123&acl=read") @@ -1511,15 +1509,7 @@ def test_urls_metadata_partial_match( ids = {r["did"] for r in rec["records"]} - print("-----------------the test-------------------") - print(ids) - print({url_doc_mapping[url]["did"] for url in expected}) - print("---params---") - print(params) r = client.get("/index/") - print("----get all-----") - print(r.json["records"]) - print(len(r.json["records"])) assert ids == {url_doc_mapping[url]["did"] for url in expected} diff --git a/tests/test_urls_endpoints.py b/tests/test_urls_endpoints.py index df61789e..d8da5f9e 100644 --- a/tests/test_urls_endpoints.py +++ b/tests/test_urls_endpoints.py @@ -4,7 +4,7 @@ @pytest.fixture(scope="function") -def test_data(client, user): +def test_data(client, user, combined_default_and_single_table_settings): system_random = random.SystemRandom() url_x_count = system_random.randint(2, 5) From 0c202207eb944f0deaa156c1363b0b8304439302 Mon Sep 17 00:00:00 2001 From: BinamB Date: Wed, 21 Feb 2024 15:10:50 -0600 Subject: [PATCH 10/47] migration script --- bin/migrate_to_single_table.py | 189 +++++++++++++++++++++++++++++++++ 1 file changed, 189 insertions(+) create mode 100644 bin/migrate_to_single_table.py diff --git a/bin/migrate_to_single_table.py b/bin/migrate_to_single_table.py new file mode 100644 index 00000000..380e16cb --- /dev/null +++ b/bin/migrate_to_single_table.py @@ -0,0 +1,189 @@ +""" + +""" +import json +import config_helper +from cdislogging import get_logger +from sqlalchemy import create_engine, MetaData, Table, Column, Integer, String, DateTime +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import sessionmaker + +from indexd.index.drivers.alchemy import ( + IndexRecord, + IndexRecordAuthz, + BaseVersion, + IndexRecordAlias, + IndexRecordUrl, + IndexRecordACE, + IndexRecordMetadata, + IndexRecordUrlMetadata, + IndexRecordHash, +) +from indexd.index.drivers.single_table_alchemy import Record + +APP_NAME = "indexd" + +logger = get_logger("migrate_single_table", log_level="debug") + + +def load_json(file_name): + return config_helper.load_json(file_name, APP_NAME) + + +def main(): + migrator = IndexRecordMigrator() + migrator.index_record_to_new_table() + return + + +class IndexRecordMigrator: + def __init__(self): + self.logger = get_logger("migrate_single_table", log_level="debug") + conf_data = load_json("creds.json") + usr = conf_data.get("db_username", "{{db_username}}") + db = conf_data.get("db_database", "{{db_database}}") + psw = conf_data.get("db_password", "{{db_password}}") + pghost = conf_data.get("db_host", "{{db_host}}") + pgport = 5432 + index_config = conf_data.get("index_config") + + engine = create_engine( + f"postgresql+psycopg2://{usr}:{psw}@{pghost}:{pgport}/{db}" + ) + + Base = declarative_base() + Base.metadata.create_all(engine) + Session = sessionmaker(bind=engine) + + self.session = Session() + + def index_record_to_new_table(self, batch_size=1000): + try: + total_records = self.session.query(IndexRecord).count() + + for offset in range(0, total_records, batch_size): + stmt = self.session.query(IndexRecord).offset(offset).limit(batch_size) + + records_to_insert = [] + + for row in stmt: + hashes = self.get_index_record_hash(row.did) + urls = self.get_urls_record(row.did) + url_metadata = self.get_urls_metadata(row.did) + acl = self.get_index_record_ace(row.did) + authz = self.get_index_record_authz(row.did) + alias = self.get_index_record_alias(row.did) + metadata = self.get_index_record_metadata(row.did) + + records_to_insert.append( + Record( + guid=row.did, + baseid=row.baseid, + rev=row.rev, + form=row.form, + size=row.size, + created_date=row.created_date, + updated_date=row.updated_date, + content_created_date=row.content_created_date, + content_updated_date=row.content_updated_date, + file_name=row.file_name, + version=row.version, + uploader=row.uploader, + hashes=hashes, + urls=urls, + url_metadata=url_metadata, + acl=acl, + authz=authz, + alias=alias, + record_metadata=metadata, + ) + ) + + self.session.bulk_save_objects(records_to_insert) + + self.session.commit() + + inserted = min(batch_size, total_records - offset) + self.logger.info( + f"Inserted {offset} records out of {total_records}. Progress: {(offset*100)/total_records}%" + ) + + except Exception as e: + self.session.rollback() + self.logger.error(f"Errored at {offset}: {e}") + + finally: + self.session.close() + self.logger.info("Finished migrating :D") + + def get_index_record_hash(self, did): + try: + stmt = self.session.query(IndexRecordHash).filter( + IndexRecordHash.did == did + ) + res = {row.hash_type: row.hash_value for row in stmt} + return res + + except Exception as e: + self.logger.error(f"Error with hash for {did}: {e}") + + def get_urls_record(self, did): + try: + stmt = self.session.query(IndexRecordUrl).filter(IndexRecordUrl.did == did) + res = [row.url for row in stmt] + return res + + except Exception as e: + self.logger.error(f"Error with urls for {did}: {e}") + + def get_urls_metadata(self, did): + try: + stmt = self.session.query(IndexRecordUrlMetadata).filter( + IndexRecordUrlMetadata.did == did + ) + res = {row.url: {row.key: row.value} for row in stmt} + return res + except Exception as e: + self.logger.error(f"Error with url metadata for {did}: {e}") + + def get_index_record_ace(self, did): + try: + stmt = self.session.query(IndexRecordACE).filter(IndexRecordACE.did == did) + res = [row.ace for row in stmt] + return res + except Exception as e: + self.logger.error(f"Error with ace for {did}: {e}") + + def get_index_record_authz(self, did): + try: + stmt = self.session.query(IndexRecordAuthz).filter( + IndexRecordAuthz.did == did + ) + res = [row.resource for row in stmt] + return res + except Exception as e: + self.logger.error(f"Error with authz for {did}: {e}") + + def get_index_record_alias(self, did): + try: + stmt = self.session.query(IndexRecordAlias).filter( + IndexRecordAlias.did == did + ) + res = [row.name for row in stmt] + return res + except Exception as e: + self.logger.error(f"Error with alias for {did}: {e}") + + def get_index_record_metadata(self, did): + try: + stmt = self.session.query(IndexRecordMetadata).filter( + IndexRecordMetadata.did == did + ) + res = {row.key: row.value for row in stmt} + return res + except Exception as e: + self.logger.error(f"Error with alias for {did}: {e}") + + +if __name__ == "__main__": + main() From 253cfe4401a5ad8d282bc08579c9eb601ca9f528 Mon Sep 17 00:00:00 2001 From: BinamB Date: Fri, 24 May 2024 11:58:35 -0500 Subject: [PATCH 11/47] Add async code --- bin/migrate_to_single_table.py | 114 ++++-- bin/migrate_with_asyncio.py | 324 ++++++++++++++++++ bin/migration_with_copy.py | 0 docs/local_dev_environment.md | 6 +- indexd/default_settings.py | 6 +- indexd/single_table_settings.py | 2 +- indexd/utils.py | 6 +- poetry.lock | 285 ++++++++------- pyproject.toml | 2 +- tests/conftest.py | 2 +- tests/default_test_settings.py | 2 +- .../test_legacy_schema_migration.py | 2 +- tests/test_blueprint.py | 4 +- tests/test_driver_alchemy_auth.py | 6 +- tests/test_driver_alchemy_crud.py | 2 +- tests/test_single_table_migration.py | 5 + 16 files changed, 595 insertions(+), 173 deletions(-) create mode 100644 bin/migrate_with_asyncio.py create mode 100644 bin/migration_with_copy.py create mode 100644 tests/test_single_table_migration.py diff --git a/bin/migrate_to_single_table.py b/bin/migrate_to_single_table.py index 380e16cb..3384f1cd 100644 --- a/bin/migrate_to_single_table.py +++ b/bin/migrate_to_single_table.py @@ -1,12 +1,18 @@ """ """ +import argparse import json import config_helper from cdislogging import get_logger from sqlalchemy import create_engine, MetaData, Table, Column, Integer, String, DateTime from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker +import time +import random +import re + +import cProfile from indexd.index.drivers.alchemy import ( IndexRecord, @@ -37,15 +43,20 @@ def main(): class IndexRecordMigrator: - def __init__(self): + def __init__(self, conf_data=None): self.logger = get_logger("migrate_single_table", log_level="debug") - conf_data = load_json("creds.json") + + if conf_data: + with open(conf_data, "r") as reader: + conf_data = json.load(reader) + else: + conf_data = load_json("creds.json") + usr = conf_data.get("db_username", "{{db_username}}") db = conf_data.get("db_database", "{{db_database}}") psw = conf_data.get("db_password", "{{db_password}}") pghost = conf_data.get("db_host", "{{db_host}}") pgport = 5432 - index_config = conf_data.get("index_config") engine = create_engine( f"postgresql+psycopg2://{usr}:{psw}@{pghost}:{pgport}/{db}" @@ -57,12 +68,18 @@ def __init__(self): self.session = Session() - def index_record_to_new_table(self, batch_size=1000): + @profile # for memory-profiler + def index_record_to_new_table(self, batch_size=1000, retry_limit=4): try: total_records = self.session.query(IndexRecord).count() for offset in range(0, total_records, batch_size): - stmt = self.session.query(IndexRecord).offset(offset).limit(batch_size) + stmt = ( + self.session.query(IndexRecord) + .offset(offset) + .limit(batch_size) + .yield_per(batch_size) + ) records_to_insert = [] @@ -99,11 +116,21 @@ def index_record_to_new_table(self, batch_size=1000): ) ) - self.session.bulk_save_objects(records_to_insert) - - self.session.commit() - - inserted = min(batch_size, total_records - offset) + while len(records_to_insert) > 0: + try: + self.session.bulk_save_objects(records_to_insert) + self.session.commit() + break + except Exception as e: + self.session.rollback() + if "duplicate key value violates unique constraint" in str(e): + self.logger.error(f"Errored at {offset}: {e}") + records_to_insert = self.remove_duplicate_records( + records_to_insert, e + ) + else: + self.logger.error(f"Ran into error at {offset}: {e}") + break self.logger.info( f"Inserted {offset} records out of {total_records}. Progress: {(offset*100)/total_records}%" ) @@ -116,12 +143,15 @@ def index_record_to_new_table(self, batch_size=1000): self.session.close() self.logger.info("Finished migrating :D") + def get_record_info(self, did): + pass + def get_index_record_hash(self, did): try: - stmt = self.session.query(IndexRecordHash).filter( - IndexRecordHash.did == did - ) - res = {row.hash_type: row.hash_value for row in stmt} + stmt = self.session.query( + IndexRecordHash.hash_type, IndexRecordHash.hash_value + ).filter(IndexRecordHash.did == did) + res = {hash_type: hash_value for hash_type, hash_value in stmt} return res except Exception as e: @@ -129,8 +159,10 @@ def get_index_record_hash(self, did): def get_urls_record(self, did): try: - stmt = self.session.query(IndexRecordUrl).filter(IndexRecordUrl.did == did) - res = [row.url for row in stmt] + stmt = self.session.query(IndexRecordUrl.url).filter( + IndexRecordUrl.did == did + ) + res = [url for url in stmt] return res except Exception as e: @@ -138,35 +170,39 @@ def get_urls_record(self, did): def get_urls_metadata(self, did): try: - stmt = self.session.query(IndexRecordUrlMetadata).filter( - IndexRecordUrlMetadata.did == did - ) - res = {row.url: {row.key: row.value} for row in stmt} + stmt = self.session.query( + IndexRecordUrlMetadata.url, + IndexRecordUrlMetadata.key, + IndexRecordUrlMetadata.value, + ).filter(IndexRecordUrlMetadata.did == did) + res = {url: {key: value} for url, key, value in stmt} return res except Exception as e: self.logger.error(f"Error with url metadata for {did}: {e}") def get_index_record_ace(self, did): try: - stmt = self.session.query(IndexRecordACE).filter(IndexRecordACE.did == did) - res = [row.ace for row in stmt] + stmt = self.session.query(IndexRecordACE.ace).filter( + IndexRecordACE.did == did + ) + res = [ace for ace in stmt] return res except Exception as e: self.logger.error(f"Error with ace for {did}: {e}") def get_index_record_authz(self, did): try: - stmt = self.session.query(IndexRecordAuthz).filter( + stmt = self.session.query(IndexRecordAuthz.resource).filter( IndexRecordAuthz.did == did ) - res = [row.resource for row in stmt] + res = [resource for resource in stmt] return res except Exception as e: self.logger.error(f"Error with authz for {did}: {e}") def get_index_record_alias(self, did): try: - stmt = self.session.query(IndexRecordAlias).filter( + stmt = self.session.query(IndexRecordAlias.name).filter( IndexRecordAlias.did == did ) res = [row.name for row in stmt] @@ -184,6 +220,32 @@ def get_index_record_metadata(self, did): except Exception as e: self.logger.error(f"Error with alias for {did}: {e}") + def remove_duplicate_records(self, records, error): + # Extract the key value from the error message + key_value = re.search(r"\(guid\)=\((.*?)\)", str(error)) + key_value = key_value.group(1) + self.logger.info(f"Removing duplicate record {key_value}") + for record in records: + if key_value == str(record.guid): + records.remove(record) + break + + return records + if __name__ == "__main__": - main() + start_time = time.time() + parser = argparse.ArgumentParser( + description="Migrate data from old indexd database to new single table database" + ) + parser.add_argument( + "creds_path", + help="Path to the creds file for the database you're trying to copy data from multi-table to single records table. Defaults to original indexd database creds from the indexd block in the creds.json file.", + ) + args = parser.parse_args() + migrator = IndexRecordMigrator(conf_data=args.creds_path) + migrator.index_record_to_new_table() + # cProfile.run("migrator.index_record_to_new_table()", filename="profile_results.txt") + end_time = time.time() + + print("Total Time: {}".format(end_time - start_time)) diff --git a/bin/migrate_with_asyncio.py b/bin/migrate_with_asyncio.py new file mode 100644 index 00000000..8de00da9 --- /dev/null +++ b/bin/migrate_with_asyncio.py @@ -0,0 +1,324 @@ +import argparse +import json +import config_helper +from cdislogging import get_logger +from sqlalchemy import create_engine, MetaData, Table, Column, Integer, String, DateTime +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession +from sqlalchemy.orm import sessionmaker +import time +import random +import re +import asyncio + +import cProfile + +from indexd.index.drivers.alchemy import ( + IndexRecord, + IndexRecordAuthz, + BaseVersion, + IndexRecordAlias, + IndexRecordUrl, + IndexRecordACE, + IndexRecordMetadata, + IndexRecordUrlMetadata, + IndexRecordHash, +) +from indexd.index.drivers.single_table_alchemy import Record + +APP_NAME = "indexd" + +logger = get_logger("migrate_single_table", log_level="debug") + + +def load_json(file_name): + return config_helper.load_json(file_name, APP_NAME) + + +# @profile +# def main(): +# migrator = IndexRecordMigrator() +# asyncio.run(migrator.migrate_tables()) +# return + + +class IndexRecordMigrator: + def __init__(self, conf_data=None): + self.logger = get_logger("migrate_single_table", log_level="debug") + + if conf_data: + with open(conf_data, "r") as reader: + conf_data = json.load(reader) + else: + conf_data = load_json("creds.json") + + usr = conf_data.get("db_username", "{{db_username}}") + db = conf_data.get("db_database", "{{db_database}}") + psw = conf_data.get("db_password", "{{db_password}}") + pghost = conf_data.get("db_host", "{{db_host}}") + pgport = 5432 + + self.chunk_size = 10 + self.concurrency = 5 + self.thread_pool_size = 3 + self.buffer_size = 10 + self.batch_size = 1000 + self.n_workers = self.thread_pool_size + self.concurrency + + self.engine = create_async_engine( + f"postgresql+psycopg2://{usr}:{psw}@{pghost}:{pgport}/{db}", echo=True + ) + self.async_session = sessionmaker( + self.engine, expire_on_commit=False, class_=AsyncSession + ) + + # Base = declarative_base() + # Base.metadata.create_all(self.engine) + # Session = sessionmaker(bind=self.engine) + + # self.session = Session() + + async def init(self): + async with self.async_session() as session: + await session.run_sync(Base.metadata.create_all) + + async def migrate_tables(self): + self.logger.info("Starting migration job...") + async with self.async_session() as session: + self.total_records = await session.scalar( + select(func.count(IndexRecord.did)) + ) + self.logger.info(f"Total records to copy: {self.total_records}") + + collector_queue = asyncio.Queue(maxsize=self.n_workers) + inserter_queue = asyncio.Queue(maxsize=self.buffer_size) + # loop = asyncio.get_event_loop() + + self.logger.info("Collecting Data from old IndexD Table...") + offset = 0 + collecters = loop.create_task( + self.collect(collector_queue, self.batch_size, offset) + ) + self.logger.info("Initializing workers...") + workers = [ + loop.create_task(self.worker(j, inserter_queue, collector_queue)) + for j in range(self.n_workers) + ] + self.logger.info("Inserting Data to new table") + inserters = [ + loop.create_task(self.insert_to_db(i, inserter_queue)) + for i in range(self.concurrency) + ] + + await asyncio.gather(collecters) + await collector_queue.join() + + for w in workers: + w.cancel() + await asyncio.gather(*workers, return_exceptions=True) + + await inserter_queue.join() + + for i in inserters: + i.cancel() + await asyncio.gather(*inserters, return_exceptions=True) + + async def collect(self, collector_queue, batch_size, offset): + """ """ + while True: + self.logger.info( + f"Collecting {offset} - {offset+batch_size} records with collector" + ) + try: + records_to_insert = await self.query_record_with_offset( + offset, batch_size + ) + except Exception as e: + self.logger.error(f"Failed to query old table for offset {offset}") + + if not records_to_insert: + break + + await collector_queue.put(records_to_insert) + + if len(records_to_insert) < batch_size: + break + + offset += batch_size + + self.logger.info(f"Added {offset} records into the collector queue") + + async def worker(self, name, collector_queue, inserter_queue): + # Handles the semaphore + # while not collector_queue.empty(): + # self.logger.info(f"Worker {name} adding records to insert queue") + # bulk_rows = await collector_queue.get() + # print(bulk_rows) + # await inserter_queue.put(bulk_rows) + # collector_queue.task_done() + while True: + bulk_rows = await collector_queue.get() + if bulk_rows is None: + break + self.logger.info(f"Worker {name} adding records to insert queue") + await inserter_queue.put(bulk_rows) + collector_queue.task_done() + + async def insert_to_db(self, name, inserter_queue): + async with self.async_session() as session: + while True: + self.logger.info(f"Inserter {name} bulk inserting records") + bulk_rows = await inserter_queue.get() + try: + async with session.begin(): + session.add_all(bulk_rows) + await session.commit() + # self.session.bulk_save_objects(bulk_rows) + except Exception as e: + self.session.rollback() + if "duplicate key value violates unique constraint" in str(e): + self.logger.error(f"Errored at {offset}: {e}") + else: + self.logger.error(f"Ran into error at {offset}: {e}") + break + finally: + inserter_queue.task_done() + self.logger.info("Successfully inserted to new table!") + + async def query_record_with_offset(self, offset, batch_size, retry_limit=4): + async with self.async_session() as session: + stmt = ( + self.session.query(IndexRecord) + .offset(offset) + .limit(batch_size) + .yield_per(batch_size) + ) + records_to_insert = [] + for row in stmt: + tasks = [ + self.get_index_record_hash(row.did), + self.get_urls_record(row.did), + self.get_urls_metadata(row.did), + self.get_index_record_ace(row.did), + self.get_index_record_authz(row.did), + self.get_index_record_alias(row.did), + self.get_index_record_metadata(row.did), + ] + results = await asyncio.gather(*tasks) + + ( + hashes, + urls, + url_metadata, + acl, + authz, + alias, + metadata, + ) = results + + records_to_insert.append( + Record( + guid=row.did, + baseid=row.baseid, + rev=row.rev, + form=row.form, + size=row.size, + created_date=row.created_date, + updated_date=row.updated_date, + content_created_date=row.content_created_date, + content_updated_date=row.content_updated_date, + file_name=row.file_name, + version=row.version, + uploader=row.uploader, + hashes=hashes, + urls=urls, + url_metadata=url_metadata, + acl=acl, + authz=authz, + alias=alias, + record_metadata=metadata, + ) + ) + return records_to_insert + + async def get_index_record_hash(self, did): + async with self.async_session() as session: + stmt = select(IndexRecordHash.hash_type, IndexRecordHash.hash_value).where( + IndexRecordHash.did == did + ) + results = await session.execute(stmt) + return {hash_type: hash_value for hash_type, hash_value in results} + + async def get_urls_record(self, did): + async with self.async_session() as session: + stmt = select(IndexRecordUrl.url).where(IndexRecordUrl.did == did) + results = await session.execute(stmt) + return [url for url, in results] + + async def get_urls_metadata(self, did): + async with self.async_session() as session: + stmt = select( + IndexRecordUrlMetadata.url, + IndexRecordUrlMetadata.key, + IndexRecordUrlMetadata.value, + ).where(IndexRecordUrlMetadata.did == did) + results = await session.execute(stmt) + url_metadata = {} + for url, key, value in results: + if url not in url_metadata: + url_metadata[url] = {} + url_metadata[url][key] = value + return url_metadata + + async def get_index_record_ace(self, did): + async with self.async_session() as session: + stmt = select(IndexRecordACE.ace).where(IndexRecordACE.did == did) + results = await session.execute(stmt) + return [ace for ace, in results] + + async def get_index_record_authz(self, did): + async with self.async_session() as session: + stmt = select(IndexRecordAuthz.resource).where(IndexRecordAuthz.did == did) + results = await session.execute(stmt) + return [resource for resource, in results] + + async def get_index_record_alias(self, did): + async with self.async_session() as session: + stmt = select(IndexRecordAlias.name).where(IndexRecordAlias.did == did) + results = await session.execute(stmt) + return [name for name, in results] + + async def get_index_record_metadata(self, did): + async with self.async_session() as session: + stmt = select(IndexRecordMetadata.key, IndexRecordMetadata.value).where( + IndexRecordMetadata.did == did + ) + results = await session.execute(stmt) + return {key: value for key, value in results} + + def remove_duplicate_records(self, records, error): + # Extract the key value from the error message + key_value = re.search(r"\(guid\)=\((.*?)\)", str(error)).group(1) + self.logger.info(f"Removing duplicate record {key_value}") + for record in records: + if key_value == str(record.guid): + records.remove(record) + break + + +if __name__ == "__main__": + start_time = time.time() + parser = argparse.ArgumentParser( + description="Migrate data from old indexd database to new single table database" + ) + parser.add_argument( + "creds_path", + help="Path to the creds file for the database you're trying to copy data from multi-table to single records table. Defaults to original indexd database creds from the indexd block in the creds.json file.", + ) + args = parser.parse_args() + migrator = IndexRecordMigrator(conf_data=args.creds_path) + asyncio.run(migrator.migrate_tables()) + # cProfile.run("asyncio.run(migrator.index_record_to_new_table())", filename="profile_results.txt") + end_time = time.time() + + print("Total Time: {}".format(end_time - start_time)) diff --git a/bin/migration_with_copy.py b/bin/migration_with_copy.py new file mode 100644 index 00000000..e69de29b diff --git a/docs/local_dev_environment.md b/docs/local_dev_environment.md index a83239f8..e929e566 100644 --- a/docs/local_dev_environment.md +++ b/docs/local_dev_environment.md @@ -192,10 +192,10 @@ python3 -m pytest -vv --cov=indexd --cov-report xml --junitxml="test-results.xml You may also need to update the [test settings](./tests/default_test_settings.py) with the appropriate database connection information prior to running the tests. ```python -settings["config"]["TEST_DB"] = "postgres://{username}:{password}@localhost:{port}/indexd_tests" +settings["config"]["TEST_DB"] = "postgresql://{username}:{password}@localhost:{port}/indexd_tests" ``` -> If you are using Azure Postgresql, you will need to include the `username@hostname` for the `username` in the connection string. You may also need to include support for SSL in the connection string, e.g. `postgres://{username@hostname}:{password}@serverfqdn:{port}/{dbname}?sslmode=require`. +> If you are using Azure Postgresql, you will need to include the `username@hostname` for the `username` in the connection string. You may also need to include support for SSL in the connection string, e.g. `postgresql://{username@hostname}:{password}@serverfqdn:{port}/{dbname}?sslmode=require`. > Further, you may run into `sqlite` errors; it may be helpful to rename existing local `*.sq3` files before running `pytest`. ## Administration @@ -235,4 +235,4 @@ With the appropriate settings, you can run the following command to migrate a da python3 bin/index_admin.py migrate_database ``` -If the `bin/local_settings.py` are not reachable, the script will fallback according to these [configuration notes](#configuration). \ No newline at end of file +If the `bin/local_settings.py` are not reachable, the script will fallback according to these [configuration notes](#configuration). diff --git a/indexd/default_settings.py b/indexd/default_settings.py index 578d5ff4..b9b72c36 100644 --- a/indexd/default_settings.py +++ b/indexd/default_settings.py @@ -17,7 +17,7 @@ # will be created as "". CONFIG["INDEX"] = { "driver": SQLAlchemyIndexDriver( - "postgres://postgres:postgres@localhost:5432/indexd_tests", # pragma: allowlist secret + "postgresql://postgres:postgres@localhost:5432/indexd_tests", # pragma: allowlist secret echo=True, index_config={ "DEFAULT_PREFIX": "testprefix:", @@ -29,7 +29,7 @@ CONFIG["ALIAS"] = { "driver": SQLAlchemyAliasDriver( - "postgres://postgres:postgres@localhost:5432/indexd_tests", # pragma: allowlist secret + "postgresql://postgres:postgres@localhost:5432/indexd_tests", # pragma: allowlist secret echo=True, # pragma: allowlist secret ) } @@ -67,7 +67,7 @@ } AUTH = SQLAlchemyAuthDriver( - "postgres://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret + "postgresql://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret ) # pragma: allowlist secret settings = {"config": CONFIG, "auth": AUTH} diff --git a/indexd/single_table_settings.py b/indexd/single_table_settings.py index 461d9f99..79b8ce21 100644 --- a/indexd/single_table_settings.py +++ b/indexd/single_table_settings.py @@ -9,7 +9,7 @@ # will be created as "". default_settings.settings["config"]["INDEX"] = { "driver": SingleTableSQLAlchemyIndexDriver( - "postgres://postgres:postgres@localhost:5432/indexd_tests", # pragma: allowlist secret + "postgresql://postgres:postgres@localhost:5432/indexd_tests", # pragma: allowlist secret echo=True, index_config={ "DEFAULT_PREFIX": "testprefix:", diff --git a/indexd/utils.py b/indexd/utils.py index 29a467ab..4c184e0f 100644 --- a/indexd/utils.py +++ b/indexd/utils.py @@ -18,7 +18,7 @@ def try_drop_test_data( user, database, root_user="postgres", host="" ): # pragma: no cover engine = create_engine( - "postgres://{user}@{host}/postgres".format(user=root_user, host=host) + "postgresql://{user}@{host}/postgres".format(user=root_user, host=host) ) conn = engine.connect() @@ -50,7 +50,7 @@ def setup_database( try_drop_test_data(user, database) engine = create_engine( - "postgres://{user}@{host}/postgres".format(user=root_user, host=host) + "postgresql://{user}@{host}/postgres".format(user=root_user, host=host) ) conn = engine.connect() conn.execute("commit") @@ -84,7 +84,7 @@ def create_tables(host, user, password, database): # pragma: no cover create tables """ engine = create_engine( - "postgres://{user}:{pwd}@{host}/{db}".format( + "postgresql://{user}:{pwd}@{host}/{db}".format( user=user, host=host, pwd=password, db=database ) ) diff --git a/poetry.lock b/poetry.lock index b70402ba..5a86ed4c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,10 +1,9 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "alembic" version = "1.9.4" description = "A database migration tool for SQLAlchemy." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -23,7 +22,6 @@ tz = ["python-dateutil"] name = "anyio" version = "3.6.2" description = "High level compatibility layer for multiple asynchronous event loop implementations" -category = "main" optional = false python-versions = ">=3.6.2" files = [ @@ -44,7 +42,6 @@ trio = ["trio (>=0.16,<0.22)"] name = "atomicwrites" version = "1.4.1" description = "Atomic file writes." -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -55,7 +52,6 @@ files = [ name = "attrs" version = "22.2.0" description = "Classes Without Boilerplate" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -74,7 +70,6 @@ tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy name = "authlib" version = "0.11" description = "The ultimate Python library in building OAuth and OpenID Connect servers." -category = "main" optional = false python-versions = "*" files = [ @@ -88,33 +83,31 @@ requests = "*" [[package]] name = "authutils" -version = "6.2.2" +version = "6.1.2" description = "Gen3 auth utility functions" -category = "main" optional = false -python-versions = ">=3.9,<4.0" +python-versions = ">=3.6,<4.0" files = [ - {file = "authutils-6.2.2-py3-none-any.whl", hash = "sha256:df9b551b4ab561452f0f4b50edaddccc443905b4d77ee69ea7eea78938e7caed"}, - {file = "authutils-6.2.2.tar.gz", hash = "sha256:ded3e5c0e35160eab83bfb217976920396441e19ed977acacbb769e988323850"}, + {file = "authutils-6.1.2-py3-none-any.whl", hash = "sha256:5e45b7098a40ee9650326d3f9488f867a538d53d1e03304b59634d5e77a3a258"}, + {file = "authutils-6.1.2.tar.gz", hash = "sha256:b029daffcc8d1bca481e7ba0528c8982d05c8b8dc7eee72831d37ddc08a36842"}, ] [package.dependencies] authlib = "0.11.0" cached-property = ">=1.4,<2.0" cdiserrors = "<2.0.0" -httpx = ">=0.23.0,<1.0.0" -pyjwt = {version = ">=2.4.0,<3.0", extras = ["crypto"]} +httpx = ">=0.12.1,<1.0.0" +pyjwt = {version = ">=1.5,<2.0", extras = ["crypto"]} xmltodict = ">=0.9,<1.0" [package.extras] -fastapi = ["fastapi (>=0.65.2,<0.66.0)"] +fastapi = ["fastapi (>=0.54.1,<0.55.0)"] flask = ["Flask (>=0.10.1)"] [[package]] name = "backoff" version = "1.11.1" description = "Function decoration for backoff and retry" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -126,7 +119,6 @@ files = [ name = "cached-property" version = "1.5.2" description = "A decorator for caching properties in classes." -category = "main" optional = false python-versions = "*" files = [ @@ -138,7 +130,6 @@ files = [ name = "cdiserrors" version = "1.0.0" description = "Gen3 shared exceptions and utilities." -category = "main" optional = false python-versions = ">=3.6,<4.0" files = [ @@ -156,7 +147,6 @@ flask = ["Flask (>=1.1.2,<2.0.0)"] name = "cdislogging" version = "1.1.1" description = "Standardized logging tool and format for cdis applications" -category = "main" optional = false python-versions = "*" files = [ @@ -167,7 +157,6 @@ files = [ name = "cdisutilstest" version = "0.2.4" description = "Collection of test data and tools" -category = "dev" optional = false python-versions = "*" files = [] @@ -183,7 +172,6 @@ resolved_reference = "bdfdeb05e45407e839fd954ce6d195d847cd8024" name = "certifi" version = "2022.12.7" description = "Python package for providing Mozilla's CA Bundle." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -195,7 +183,6 @@ files = [ name = "cffi" version = "1.15.1" description = "Foreign Function Interface for Python calling C code." -category = "main" optional = false python-versions = "*" files = [ @@ -272,7 +259,6 @@ pycparser = "*" name = "charset-normalizer" version = "3.0.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" optional = false python-versions = "*" files = [ @@ -370,7 +356,6 @@ files = [ name = "click" version = "8.1.3" description = "Composable command line interface toolkit" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -385,7 +370,6 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -397,7 +381,6 @@ files = [ name = "coverage" version = "6.5.0" description = "Code coverage measurement for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -460,7 +443,6 @@ toml = ["tomli"] name = "coveralls" version = "3.3.1" description = "Show coverage stats online via coveralls.io" -category = "dev" optional = false python-versions = ">= 3.5" files = [ @@ -469,7 +451,7 @@ files = [ ] [package.dependencies] -coverage = ">=4.1,<6.0.0 || >6.1,<6.1.1 || >6.1.1,<7.0" +coverage = ">=4.1,<6.0.dev0 || >6.1,<6.1.1 || >6.1.1,<7.0" docopt = ">=0.6.1" requests = ">=1.0.0" @@ -480,7 +462,6 @@ yaml = ["PyYAML (>=3.10)"] name = "cryptography" version = "39.0.1" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -526,7 +507,6 @@ tox = ["tox"] name = "docopt" version = "0.6.2" description = "Pythonic argument parser, that will make you smile" -category = "dev" optional = false python-versions = "*" files = [ @@ -537,7 +517,6 @@ files = [ name = "doiclient" version = "0.1" description = "" -category = "main" optional = false python-versions = "*" files = [] @@ -556,7 +535,6 @@ resolved_reference = "1a5f4b2a0b04577f31f3dcec511ade117d9f4ba1" name = "dosclient" version = "0.1" description = "" -category = "main" optional = false python-versions = "*" files = [] @@ -575,7 +553,6 @@ resolved_reference = "38c0f1ab42edf3efb1ad6348d7dbdff81b131360" name = "flask" version = "2.2.3" description = "A simple framework for building complex web applications." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -598,7 +575,6 @@ dotenv = ["python-dotenv"] name = "gen3authz" version = "1.5.1" description = "Gen3 authz client" -category = "main" optional = false python-versions = ">=3.6,<4.0" files = [ @@ -612,11 +588,81 @@ cdiserrors = "<2.0.0" httpx = ">=0.20.0,<1.0.0" six = ">=1.16.0,<2.0.0" +[[package]] +name = "greenlet" +version = "3.0.3" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=3.7" +files = [ + {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, + {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, + {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, + {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, + {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, + {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, + {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, + {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, + {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, + {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, + {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, + {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, + {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, + {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, + {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, + {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, +] + +[package.extras] +docs = ["Sphinx", "furo"] +test = ["objgraph", "psutil"] + [[package]] name = "h11" version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -628,7 +674,6 @@ files = [ name = "hsclient" version = "0.1" description = "" -category = "main" optional = false python-versions = "*" files = [] @@ -647,7 +692,6 @@ resolved_reference = "f122072ee245216da5e4260f718d6f886db81773" name = "httpcore" version = "0.16.3" description = "A minimal low-level HTTP client." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -659,17 +703,16 @@ files = [ anyio = ">=3.0,<5.0" certifi = "*" h11 = ">=0.13,<0.15" -sniffio = ">=1.0.0,<2.0.0" +sniffio = "==1.*" [package.extras] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (>=1.0.0,<2.0.0)"] +socks = ["socksio (==1.*)"] [[package]] name = "httpx" version = "0.23.3" description = "The next generation HTTP client." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -685,15 +728,14 @@ sniffio = "*" [package.extras] brotli = ["brotli", "brotlicffi"] -cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<13)"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<13)"] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (>=1.0.0,<2.0.0)"] +socks = ["socksio (==1.*)"] [[package]] name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -705,7 +747,6 @@ files = [ name = "importlib-metadata" version = "6.0.0" description = "Read metadata from Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -725,7 +766,6 @@ testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packag name = "indexclient" version = "2.1.1" description = "" -category = "main" optional = false python-versions = "*" files = [ @@ -739,7 +779,6 @@ requests = ">=2.5.2,<3.0.0" name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -751,7 +790,6 @@ files = [ name = "itsdangerous" version = "2.1.2" description = "Safely pass data to untrusted environments and back." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -763,7 +801,6 @@ files = [ name = "jinja2" version = "3.1.2" description = "A very fast and expressive template engine." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -781,7 +818,6 @@ i18n = ["Babel (>=2.7)"] name = "jsonschema" version = "3.2.0" description = "An implementation of JSON Schema validation for Python" -category = "main" optional = false python-versions = "*" files = [ @@ -803,7 +839,6 @@ format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-va name = "mako" version = "1.2.4" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -823,7 +858,6 @@ testing = ["pytest"] name = "markupsafe" version = "2.1.2" description = "Safely add untrusted strings to HTML/XML markup." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -883,7 +917,6 @@ files = [ name = "mock" version = "4.0.3" description = "Rolling backport of unittest.mock for all Pythons" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -900,7 +933,6 @@ test = ["pytest (<5.4)", "pytest-cov"] name = "packaging" version = "23.0" description = "Core utilities for Python packages" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -912,7 +944,6 @@ files = [ name = "pluggy" version = "1.0.0" description = "plugin and hook calling mechanisms for python" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -928,7 +959,6 @@ testing = ["pytest", "pytest-benchmark"] name = "psycopg2" version = "2.9.5" description = "psycopg2 - Python-PostgreSQL Database Adapter" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -951,7 +981,6 @@ files = [ name = "py" version = "1.11.0" description = "library with cross-python path, ini-parsing, io, code, log facilities" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -963,7 +992,6 @@ files = [ name = "pycparser" version = "2.21" description = "C parser in Python" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -973,30 +1001,27 @@ files = [ [[package]] name = "pyjwt" -version = "2.6.0" +version = "1.7.1" description = "JSON Web Token implementation in Python" -category = "main" optional = false -python-versions = ">=3.7" +python-versions = "*" files = [ - {file = "PyJWT-2.6.0-py3-none-any.whl", hash = "sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14"}, - {file = "PyJWT-2.6.0.tar.gz", hash = "sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd"}, + {file = "PyJWT-1.7.1-py2.py3-none-any.whl", hash = "sha256:5c6eca3c2940464d106b99ba83b00c6add741c9becaec087fb7ccdefea71350e"}, + {file = "PyJWT-1.7.1.tar.gz", hash = "sha256:8d59a976fb773f3e6a39c85636357c4f0e242707394cadadd9814f5cbaa20e96"}, ] [package.dependencies] -cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"crypto\""} +cryptography = {version = ">=1.4", optional = true, markers = "extra == \"crypto\""} [package.extras] -crypto = ["cryptography (>=3.4.0)"] -dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] -docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] -tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] +crypto = ["cryptography (>=1.4)"] +flake8 = ["flake8", "flake8-import-order", "pep8-naming"] +test = ["pytest (>=4.0.1,<5.0.0)", "pytest-cov (>=2.6.0,<3.0.0)", "pytest-runner (>=4.2,<5.0.0)"] [[package]] name = "pyrsistent" version = "0.19.3" description = "Persistent/Functional/Immutable data structures" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1033,7 +1058,6 @@ files = [ name = "pytest" version = "6.2.5" description = "pytest: simple powerful testing with Python" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1058,7 +1082,6 @@ testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xm name = "pytest-cov" version = "2.12.1" description = "Pytest plugin for measuring coverage." -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -1078,7 +1101,6 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale name = "pytest-flask" version = "1.2.0" description = "A set of py.test fixtures to test Flask applications." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1098,7 +1120,6 @@ docs = ["Sphinx", "sphinx-rtd-theme"] name = "pyyaml" version = "5.4.1" description = "YAML parser and emitter for Python" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -1137,7 +1158,6 @@ files = [ name = "requests" version = "2.28.2" description = "Python HTTP for Humans." -category = "main" optional = false python-versions = ">=3.7, <4" files = [ @@ -1159,7 +1179,6 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "responses" version = "0.13.4" description = "A utility library for mocking out the `requests` Python library." -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -1179,7 +1198,6 @@ tests = ["coverage (>=3.7.1,<6.0.0)", "flake8", "mypy", "pytest (>=4.6)", "pytes name = "rfc3986" version = "1.5.0" description = "Validating URI References per RFC 3986" -category = "main" optional = false python-versions = "*" files = [ @@ -1197,7 +1215,6 @@ idna2008 = ["idna"] name = "setuptools" version = "67.4.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1214,7 +1231,6 @@ testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs ( name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -1226,7 +1242,6 @@ files = [ name = "sniffio" version = "1.3.0" description = "Sniff out which async library your code is running under" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1236,65 +1251,87 @@ files = [ [[package]] name = "sqlalchemy" -version = "1.3.24" +version = "1.4.52" description = "Database Abstraction Library" -category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "SQLAlchemy-1.3.24-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:87a2725ad7d41cd7376373c15fd8bf674e9c33ca56d0b8036add2d634dba372e"}, - {file = "SQLAlchemy-1.3.24-cp27-cp27m-win32.whl", hash = "sha256:f597a243b8550a3a0b15122b14e49d8a7e622ba1c9d29776af741f1845478d79"}, - {file = "SQLAlchemy-1.3.24-cp27-cp27m-win_amd64.whl", hash = "sha256:fc4cddb0b474b12ed7bdce6be1b9edc65352e8ce66bc10ff8cbbfb3d4047dbf4"}, - {file = "SQLAlchemy-1.3.24-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:f1149d6e5c49d069163e58a3196865e4321bad1803d7886e07d8710de392c548"}, - {file = "SQLAlchemy-1.3.24-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:14f0eb5db872c231b20c18b1e5806352723a3a89fb4254af3b3e14f22eaaec75"}, - {file = "SQLAlchemy-1.3.24-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:e98d09f487267f1e8d1179bf3b9d7709b30a916491997137dd24d6ae44d18d79"}, - {file = "SQLAlchemy-1.3.24-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:fc1f2a5a5963e2e73bac4926bdaf7790c4d7d77e8fc0590817880e22dd9d0b8b"}, - {file = "SQLAlchemy-1.3.24-cp35-cp35m-win32.whl", hash = "sha256:f3c5c52f7cb8b84bfaaf22d82cb9e6e9a8297f7c2ed14d806a0f5e4d22e83fb7"}, - {file = "SQLAlchemy-1.3.24-cp35-cp35m-win_amd64.whl", hash = "sha256:0352db1befcbed2f9282e72843f1963860bf0e0472a4fa5cf8ee084318e0e6ab"}, - {file = "SQLAlchemy-1.3.24-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:2ed6343b625b16bcb63c5b10523fd15ed8934e1ed0f772c534985e9f5e73d894"}, - {file = "SQLAlchemy-1.3.24-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:34fcec18f6e4b24b4a5f6185205a04f1eab1e56f8f1d028a2a03694ebcc2ddd4"}, - {file = "SQLAlchemy-1.3.24-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:e47e257ba5934550d7235665eee6c911dc7178419b614ba9e1fbb1ce6325b14f"}, - {file = "SQLAlchemy-1.3.24-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:816de75418ea0953b5eb7b8a74933ee5a46719491cd2b16f718afc4b291a9658"}, - {file = "SQLAlchemy-1.3.24-cp36-cp36m-win32.whl", hash = "sha256:26155ea7a243cbf23287f390dba13d7927ffa1586d3208e0e8d615d0c506f996"}, - {file = "SQLAlchemy-1.3.24-cp36-cp36m-win_amd64.whl", hash = "sha256:f03bd97650d2e42710fbe4cf8a59fae657f191df851fc9fc683ecef10746a375"}, - {file = "SQLAlchemy-1.3.24-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:a006d05d9aa052657ee3e4dc92544faae5fcbaafc6128217310945610d862d39"}, - {file = "SQLAlchemy-1.3.24-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1e2f89d2e5e3c7a88e25a3b0e43626dba8db2aa700253023b82e630d12b37109"}, - {file = "SQLAlchemy-1.3.24-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:0d5d862b1cfbec5028ce1ecac06a3b42bc7703eb80e4b53fceb2738724311443"}, - {file = "SQLAlchemy-1.3.24-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:0172423a27fbcae3751ef016663b72e1a516777de324a76e30efa170dbd3dd2d"}, - {file = "SQLAlchemy-1.3.24-cp37-cp37m-win32.whl", hash = "sha256:d37843fb8df90376e9e91336724d78a32b988d3d20ab6656da4eb8ee3a45b63c"}, - {file = "SQLAlchemy-1.3.24-cp37-cp37m-win_amd64.whl", hash = "sha256:c10ff6112d119f82b1618b6dc28126798481b9355d8748b64b9b55051eb4f01b"}, - {file = "SQLAlchemy-1.3.24-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:861e459b0e97673af6cc5e7f597035c2e3acdfb2608132665406cded25ba64c7"}, - {file = "SQLAlchemy-1.3.24-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:5de2464c254380d8a6c20a2746614d5a436260be1507491442cf1088e59430d2"}, - {file = "SQLAlchemy-1.3.24-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:d375d8ccd3cebae8d90270f7aa8532fe05908f79e78ae489068f3b4eee5994e8"}, - {file = "SQLAlchemy-1.3.24-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:014ea143572fee1c18322b7908140ad23b3994036ef4c0d630110faf942652f8"}, - {file = "SQLAlchemy-1.3.24-cp38-cp38-win32.whl", hash = "sha256:6607ae6cd3a07f8a4c3198ffbf256c261661965742e2b5265a77cd5c679c9bba"}, - {file = "SQLAlchemy-1.3.24-cp38-cp38-win_amd64.whl", hash = "sha256:fcb251305fa24a490b6a9ee2180e5f8252915fb778d3dafc70f9cc3f863827b9"}, - {file = "SQLAlchemy-1.3.24-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:01aa5f803db724447c1d423ed583e42bf5264c597fd55e4add4301f163b0be48"}, - {file = "SQLAlchemy-1.3.24-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:4d0e3515ef98aa4f0dc289ff2eebb0ece6260bbf37c2ea2022aad63797eacf60"}, - {file = "SQLAlchemy-1.3.24-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:bce28277f308db43a6b4965734366f533b3ff009571ec7ffa583cb77539b84d6"}, - {file = "SQLAlchemy-1.3.24-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:8110e6c414d3efc574543109ee618fe2c1f96fa31833a1ff36cc34e968c4f233"}, - {file = "SQLAlchemy-1.3.24-cp39-cp39-win32.whl", hash = "sha256:ee5f5188edb20a29c1cc4a039b074fdc5575337c9a68f3063449ab47757bb064"}, - {file = "SQLAlchemy-1.3.24-cp39-cp39-win_amd64.whl", hash = "sha256:09083c2487ca3c0865dc588e07aeaa25416da3d95f7482c07e92f47e080aa17b"}, - {file = "SQLAlchemy-1.3.24.tar.gz", hash = "sha256:ebbb777cbf9312359b897bf81ba00dae0f5cb69fba2a18265dcc18a6f5ef7519"}, + {file = "SQLAlchemy-1.4.52-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:f68016f9a5713684c1507cc37133c28035f29925c75c0df2f9d0f7571e23720a"}, + {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24bb0f81fbbb13d737b7f76d1821ec0b117ce8cbb8ee5e8641ad2de41aa916d3"}, + {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e93983cc0d2edae253b3f2141b0a3fb07e41c76cd79c2ad743fc27eb79c3f6db"}, + {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:84e10772cfc333eb08d0b7ef808cd76e4a9a30a725fb62a0495877a57ee41d81"}, + {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:427988398d2902de042093d17f2b9619a5ebc605bf6372f7d70e29bde6736842"}, + {file = "SQLAlchemy-1.4.52-cp310-cp310-win32.whl", hash = "sha256:1296f2cdd6db09b98ceb3c93025f0da4835303b8ac46c15c2136e27ee4d18d94"}, + {file = "SQLAlchemy-1.4.52-cp310-cp310-win_amd64.whl", hash = "sha256:80e7f697bccc56ac6eac9e2df5c98b47de57e7006d2e46e1a3c17c546254f6ef"}, + {file = "SQLAlchemy-1.4.52-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2f251af4c75a675ea42766880ff430ac33291c8d0057acca79710f9e5a77383d"}, + {file = "SQLAlchemy-1.4.52-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb8f9e4c4718f111d7b530c4e6fb4d28f9f110eb82e7961412955b3875b66de0"}, + {file = "SQLAlchemy-1.4.52-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afb1672b57f58c0318ad2cff80b384e816735ffc7e848d8aa51e0b0fc2f4b7bb"}, + {file = "SQLAlchemy-1.4.52-cp311-cp311-win32.whl", hash = "sha256:6e41cb5cda641f3754568d2ed8962f772a7f2b59403b95c60c89f3e0bd25f15e"}, + {file = "SQLAlchemy-1.4.52-cp311-cp311-win_amd64.whl", hash = "sha256:5bed4f8c3b69779de9d99eb03fd9ab67a850d74ab0243d1be9d4080e77b6af12"}, + {file = "SQLAlchemy-1.4.52-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:49e3772eb3380ac88d35495843daf3c03f094b713e66c7d017e322144a5c6b7c"}, + {file = "SQLAlchemy-1.4.52-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:618827c1a1c243d2540314c6e100aee7af09a709bd005bae971686fab6723554"}, + {file = "SQLAlchemy-1.4.52-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de9acf369aaadb71a725b7e83a5ef40ca3de1cf4cdc93fa847df6b12d3cd924b"}, + {file = "SQLAlchemy-1.4.52-cp312-cp312-win32.whl", hash = "sha256:763bd97c4ebc74136ecf3526b34808c58945023a59927b416acebcd68d1fc126"}, + {file = "SQLAlchemy-1.4.52-cp312-cp312-win_amd64.whl", hash = "sha256:f12aaf94f4d9679ca475975578739e12cc5b461172e04d66f7a3c39dd14ffc64"}, + {file = "SQLAlchemy-1.4.52-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:853fcfd1f54224ea7aabcf34b227d2b64a08cbac116ecf376907968b29b8e763"}, + {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f98dbb8fcc6d1c03ae8ec735d3c62110949a3b8bc6e215053aa27096857afb45"}, + {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e135fff2e84103bc15c07edd8569612ce317d64bdb391f49ce57124a73f45c5"}, + {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5b5de6af8852500d01398f5047d62ca3431d1e29a331d0b56c3e14cb03f8094c"}, + {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3491c85df263a5c2157c594f54a1a9c72265b75d3777e61ee13c556d9e43ffc9"}, + {file = "SQLAlchemy-1.4.52-cp36-cp36m-win32.whl", hash = "sha256:427c282dd0deba1f07bcbf499cbcc9fe9a626743f5d4989bfdfd3ed3513003dd"}, + {file = "SQLAlchemy-1.4.52-cp36-cp36m-win_amd64.whl", hash = "sha256:ca5ce82b11731492204cff8845c5e8ca1a4bd1ade85e3b8fcf86e7601bfc6a39"}, + {file = "SQLAlchemy-1.4.52-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:29d4247313abb2015f8979137fe65f4eaceead5247d39603cc4b4a610936cd2b"}, + {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a752bff4796bf22803d052d4841ebc3c55c26fb65551f2c96e90ac7c62be763a"}, + {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7ea11727feb2861deaa293c7971a4df57ef1c90e42cb53f0da40c3468388000"}, + {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d913f8953e098ca931ad7f58797f91deed26b435ec3756478b75c608aa80d139"}, + {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a251146b921725547ea1735b060a11e1be705017b568c9f8067ca61e6ef85f20"}, + {file = "SQLAlchemy-1.4.52-cp37-cp37m-win32.whl", hash = "sha256:1f8e1c6a6b7f8e9407ad9afc0ea41c1f65225ce505b79bc0342159de9c890782"}, + {file = "SQLAlchemy-1.4.52-cp37-cp37m-win_amd64.whl", hash = "sha256:346ed50cb2c30f5d7a03d888e25744154ceac6f0e6e1ab3bc7b5b77138d37710"}, + {file = "SQLAlchemy-1.4.52-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:4dae6001457d4497736e3bc422165f107ecdd70b0d651fab7f731276e8b9e12d"}, + {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5d2e08d79f5bf250afb4a61426b41026e448da446b55e4770c2afdc1e200fce"}, + {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bbce5dd7c7735e01d24f5a60177f3e589078f83c8a29e124a6521b76d825b85"}, + {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:bdb7b4d889631a3b2a81a3347c4c3f031812eb4adeaa3ee4e6b0d028ad1852b5"}, + {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c294ae4e6bbd060dd79e2bd5bba8b6274d08ffd65b58d106394cb6abbf35cf45"}, + {file = "SQLAlchemy-1.4.52-cp38-cp38-win32.whl", hash = "sha256:bcdfb4b47fe04967669874fb1ce782a006756fdbebe7263f6a000e1db969120e"}, + {file = "SQLAlchemy-1.4.52-cp38-cp38-win_amd64.whl", hash = "sha256:7d0dbc56cb6af5088f3658982d3d8c1d6a82691f31f7b0da682c7b98fa914e91"}, + {file = "SQLAlchemy-1.4.52-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:a551d5f3dc63f096ed41775ceec72fdf91462bb95abdc179010dc95a93957800"}, + {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ab773f9ad848118df7a9bbabca53e3f1002387cdbb6ee81693db808b82aaab0"}, + {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2de46f5d5396d5331127cfa71f837cca945f9a2b04f7cb5a01949cf676db7d1"}, + {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7027be7930a90d18a386b25ee8af30514c61f3852c7268899f23fdfbd3107181"}, + {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99224d621affbb3c1a4f72b631f8393045f4ce647dd3262f12fe3576918f8bf3"}, + {file = "SQLAlchemy-1.4.52-cp39-cp39-win32.whl", hash = "sha256:c124912fd4e1bb9d1e7dc193ed482a9f812769cb1e69363ab68e01801e859821"}, + {file = "SQLAlchemy-1.4.52-cp39-cp39-win_amd64.whl", hash = "sha256:2c286fab42e49db23c46ab02479f328b8bdb837d3e281cae546cc4085c83b680"}, + {file = "SQLAlchemy-1.4.52.tar.gz", hash = "sha256:80e63bbdc5217dad3485059bdf6f65a7d43f33c8bde619df5c220edf03d87296"}, ] +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} + [package.extras] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"] mssql = ["pyodbc"] mssql-pymssql = ["pymssql"] mssql-pyodbc = ["pyodbc"] -mysql = ["mysqlclient"] -oracle = ["cx-oracle"] -postgresql = ["psycopg2"] -postgresql-pg8000 = ["pg8000 (<1.16.6)"] +mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] +mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] postgresql-psycopg2binary = ["psycopg2-binary"] postgresql-psycopg2cffi = ["psycopg2cffi"] pymysql = ["pymysql", "pymysql (<1)"] +sqlcipher = ["sqlcipher3_binary"] [[package]] name = "sqlalchemy-utils" version = "0.37.9" description = "Various utility functions for SQLAlchemy." -category = "main" optional = false python-versions = "~=3.4" files = [ @@ -1324,7 +1361,6 @@ url = ["furl (>=0.4.1)"] name = "swagger-spec-validator" version = "2.7.6" description = "Validation of Swagger specifications" -category = "dev" optional = false python-versions = "*" files = [ @@ -1341,7 +1377,6 @@ six = "*" name = "toml" version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" -category = "dev" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -1353,7 +1388,6 @@ files = [ name = "urllib3" version = "1.26.14" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -1370,7 +1404,6 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] name = "werkzeug" version = "2.2.3" description = "The comprehensive WSGI web application library." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1388,7 +1421,6 @@ watchdog = ["watchdog"] name = "xmltodict" version = "0.13.0" description = "Makes working with XML feel like you are working with JSON" -category = "main" optional = false python-versions = ">=3.4" files = [ @@ -1400,7 +1432,6 @@ files = [ name = "zipp" version = "3.15.0" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1414,5 +1445,5 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more [metadata] lock-version = "2.0" -python-versions = "^3.9" -content-hash = "dbb013e70567b96835a357dfe393b4ca95e5fb973b73d81e228aef0dc884c1b9" +python-versions = "3.9.*" +content-hash = "6e1901355e4625895b36af41fe1bee8073274eeaf8920ce9d951d0e89d93e948" diff --git a/pyproject.toml b/pyproject.toml index 49bc2005..d274f237 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,7 +23,7 @@ indexclient = "^2.1.0" jsonschema = "^3.2" flask = "^2.0.1" psycopg2 = "^2.7" -sqlalchemy = "~1.3.3" +sqlalchemy = "^1.4.0" sqlalchemy-utils = "^0.37.3" PyYAML = "^5.4" diff --git a/tests/conftest.py b/tests/conftest.py index a78fafc6..6e3afb7f 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -20,7 +20,7 @@ from indexd.alias.drivers.alchemy import SQLAlchemyAliasDriver from indexd.auth.drivers.alchemy import SQLAlchemyAuthDriver -POSTGRES_CONNECTION = "postgres://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret +POSTGRES_CONNECTION = "postgresql://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret def clear_database(): diff --git a/tests/default_test_settings.py b/tests/default_test_settings.py index 7c78acd2..13b57770 100644 --- a/tests/default_test_settings.py +++ b/tests/default_test_settings.py @@ -36,4 +36,4 @@ # database used by the `/tests/postgres` tests settings["config"][ "TEST_DB" -] = "postgres://postgres:postgres@localhost:{0}/indexd_tests".format(psql_port) +] = "postgresql://postgres:postgres@localhost:{0}/indexd_tests".format(psql_port) diff --git a/tests/postgres/migrations/test_legacy_schema_migration.py b/tests/postgres/migrations/test_legacy_schema_migration.py index fbbd2aaa..76e20ff9 100644 --- a/tests/postgres/migrations/test_legacy_schema_migration.py +++ b/tests/postgres/migrations/test_legacy_schema_migration.py @@ -53,7 +53,7 @@ ], } -POSTGRES_CONNECTION = "postgres://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret +POSTGRES_CONNECTION = "postgresql://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret def update_version_table_for_testing(tb_name, val): diff --git a/tests/test_blueprint.py b/tests/test_blueprint.py index e9617fa3..f5ffd557 100644 --- a/tests/test_blueprint.py +++ b/tests/test_blueprint.py @@ -15,13 +15,13 @@ INDEX_CONFIG = { "driver": SQLAlchemyIndexDriver( - "postgres://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret + "postgresql://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret ) } ALIAS_CONFIG = { "driver": SQLAlchemyAliasDriver( - "postgres://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret + "postgresql://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret ) } diff --git a/tests/test_driver_alchemy_auth.py b/tests/test_driver_alchemy_auth.py index 1b391828..1e20543c 100644 --- a/tests/test_driver_alchemy_auth.py +++ b/tests/test_driver_alchemy_auth.py @@ -13,7 +13,7 @@ USERNAME = "abc" PASSWORD = "123" DIGESTED = SQLAlchemyAuthDriver.digest(PASSWORD) -POSTGRES_CONNECTION = "postgres://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret +POSTGRES_CONNECTION = "postgresql://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret # TODO check if pytest has utilities for meta-programming of tests @@ -52,7 +52,7 @@ def test_driver_auth_rejects_bad_creds(): Test driver rejects bad creds. """ driver = SQLAlchemyAuthDriver( - "postgres://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret + "postgresql://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret ) engine = create_engine(POSTGRES_CONNECTION) @@ -74,7 +74,7 @@ def test_driver_auth_returns_user_context(): Tests driver accepts good creds. """ driver = SQLAlchemyAuthDriver( - "postgres://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret + "postgresql://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret ) engine = create_engine(POSTGRES_CONNECTION) diff --git a/tests/test_driver_alchemy_crud.py b/tests/test_driver_alchemy_crud.py index ed13ee95..d2fb63b3 100644 --- a/tests/test_driver_alchemy_crud.py +++ b/tests/test_driver_alchemy_crud.py @@ -17,7 +17,7 @@ # TODO check if pytest has utilities for meta-programming of tests -POSTGRES_CONNECTION = "postgres://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret +POSTGRES_CONNECTION = "postgresql://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret def test_driver_init_does_not_create_records( diff --git a/tests/test_single_table_migration.py b/tests/test_single_table_migration.py new file mode 100644 index 00000000..e4fc95c1 --- /dev/null +++ b/tests/test_single_table_migration.py @@ -0,0 +1,5 @@ +# from migrate_with_asyncio import IndexRecordMigrator + + +# def test_migrate_tables(): +# migrator = IndexRecordMigrator(conf_data="/bin/creds.json") From b28594f5014880b541f174a5e24c1101d3948b16 Mon Sep 17 00:00:00 2001 From: BinamB Date: Fri, 24 May 2024 12:12:01 -0500 Subject: [PATCH 12/47] add asyncpg --- bin/migrate_with_asyncio.py | 2 +- poetry.lock | 70 ++++++++++++++++++++++++++++++++++++- pyproject.toml | 1 + 3 files changed, 71 insertions(+), 2 deletions(-) diff --git a/bin/migrate_with_asyncio.py b/bin/migrate_with_asyncio.py index 8de00da9..8692e6ae 100644 --- a/bin/migrate_with_asyncio.py +++ b/bin/migrate_with_asyncio.py @@ -66,7 +66,7 @@ def __init__(self, conf_data=None): self.n_workers = self.thread_pool_size + self.concurrency self.engine = create_async_engine( - f"postgresql+psycopg2://{usr}:{psw}@{pghost}:{pgport}/{db}", echo=True + f"postgresql+asyncpg://{usr}:{psw}@{pghost}:{pgport}/{db}", echo=True ) self.async_session = sessionmaker( self.engine, expire_on_commit=False, class_=AsyncSession diff --git a/poetry.lock b/poetry.lock index 5a86ed4c..433169ab 100644 --- a/poetry.lock +++ b/poetry.lock @@ -38,6 +38,74 @@ doc = ["packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] test = ["contextlib2", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (<0.15)", "uvloop (>=0.15)"] trio = ["trio (>=0.16,<0.22)"] +[[package]] +name = "async-timeout" +version = "4.0.3" +description = "Timeout context manager for asyncio programs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, + {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, +] + +[[package]] +name = "asyncpg" +version = "0.29.0" +description = "An asyncio PostgreSQL driver" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "asyncpg-0.29.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72fd0ef9f00aeed37179c62282a3d14262dbbafb74ec0ba16e1b1864d8a12169"}, + {file = "asyncpg-0.29.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:52e8f8f9ff6e21f9b39ca9f8e3e33a5fcdceaf5667a8c5c32bee158e313be385"}, + {file = "asyncpg-0.29.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9e6823a7012be8b68301342ba33b4740e5a166f6bbda0aee32bc01638491a22"}, + {file = "asyncpg-0.29.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:746e80d83ad5d5464cfbf94315eb6744222ab00aa4e522b704322fb182b83610"}, + {file = "asyncpg-0.29.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ff8e8109cd6a46ff852a5e6bab8b0a047d7ea42fcb7ca5ae6eaae97d8eacf397"}, + {file = "asyncpg-0.29.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:97eb024685b1d7e72b1972863de527c11ff87960837919dac6e34754768098eb"}, + {file = "asyncpg-0.29.0-cp310-cp310-win32.whl", hash = "sha256:5bbb7f2cafd8d1fa3e65431833de2642f4b2124be61a449fa064e1a08d27e449"}, + {file = "asyncpg-0.29.0-cp310-cp310-win_amd64.whl", hash = "sha256:76c3ac6530904838a4b650b2880f8e7af938ee049e769ec2fba7cd66469d7772"}, + {file = "asyncpg-0.29.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4900ee08e85af01adb207519bb4e14b1cae8fd21e0ccf80fac6aa60b6da37b4"}, + {file = "asyncpg-0.29.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a65c1dcd820d5aea7c7d82a3fdcb70e096f8f70d1a8bf93eb458e49bfad036ac"}, + {file = "asyncpg-0.29.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b52e46f165585fd6af4863f268566668407c76b2c72d366bb8b522fa66f1870"}, + {file = "asyncpg-0.29.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc600ee8ef3dd38b8d67421359779f8ccec30b463e7aec7ed481c8346decf99f"}, + {file = "asyncpg-0.29.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:039a261af4f38f949095e1e780bae84a25ffe3e370175193174eb08d3cecab23"}, + {file = "asyncpg-0.29.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6feaf2d8f9138d190e5ec4390c1715c3e87b37715cd69b2c3dfca616134efd2b"}, + {file = "asyncpg-0.29.0-cp311-cp311-win32.whl", hash = "sha256:1e186427c88225ef730555f5fdda6c1812daa884064bfe6bc462fd3a71c4b675"}, + {file = "asyncpg-0.29.0-cp311-cp311-win_amd64.whl", hash = "sha256:cfe73ffae35f518cfd6e4e5f5abb2618ceb5ef02a2365ce64f132601000587d3"}, + {file = "asyncpg-0.29.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6011b0dc29886ab424dc042bf9eeb507670a3b40aece3439944006aafe023178"}, + {file = "asyncpg-0.29.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b544ffc66b039d5ec5a7454667f855f7fec08e0dfaf5a5490dfafbb7abbd2cfb"}, + {file = "asyncpg-0.29.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d84156d5fb530b06c493f9e7635aa18f518fa1d1395ef240d211cb563c4e2364"}, + {file = "asyncpg-0.29.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54858bc25b49d1114178d65a88e48ad50cb2b6f3e475caa0f0c092d5f527c106"}, + {file = "asyncpg-0.29.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bde17a1861cf10d5afce80a36fca736a86769ab3579532c03e45f83ba8a09c59"}, + {file = "asyncpg-0.29.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:37a2ec1b9ff88d8773d3eb6d3784dc7e3fee7756a5317b67f923172a4748a175"}, + {file = "asyncpg-0.29.0-cp312-cp312-win32.whl", hash = "sha256:bb1292d9fad43112a85e98ecdc2e051602bce97c199920586be83254d9dafc02"}, + {file = "asyncpg-0.29.0-cp312-cp312-win_amd64.whl", hash = "sha256:2245be8ec5047a605e0b454c894e54bf2ec787ac04b1cb7e0d3c67aa1e32f0fe"}, + {file = "asyncpg-0.29.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0009a300cae37b8c525e5b449233d59cd9868fd35431abc470a3e364d2b85cb9"}, + {file = "asyncpg-0.29.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cad1324dbb33f3ca0cd2074d5114354ed3be2b94d48ddfd88af75ebda7c43cc"}, + {file = "asyncpg-0.29.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:012d01df61e009015944ac7543d6ee30c2dc1eb2f6b10b62a3f598beb6531548"}, + {file = "asyncpg-0.29.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:000c996c53c04770798053e1730d34e30cb645ad95a63265aec82da9093d88e7"}, + {file = "asyncpg-0.29.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e0bfe9c4d3429706cf70d3249089de14d6a01192d617e9093a8e941fea8ee775"}, + {file = "asyncpg-0.29.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:642a36eb41b6313ffa328e8a5c5c2b5bea6ee138546c9c3cf1bffaad8ee36dd9"}, + {file = "asyncpg-0.29.0-cp38-cp38-win32.whl", hash = "sha256:a921372bbd0aa3a5822dd0409da61b4cd50df89ae85150149f8c119f23e8c408"}, + {file = "asyncpg-0.29.0-cp38-cp38-win_amd64.whl", hash = "sha256:103aad2b92d1506700cbf51cd8bb5441e7e72e87a7b3a2ca4e32c840f051a6a3"}, + {file = "asyncpg-0.29.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5340dd515d7e52f4c11ada32171d87c05570479dc01dc66d03ee3e150fb695da"}, + {file = "asyncpg-0.29.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e17b52c6cf83e170d3d865571ba574577ab8e533e7361a2b8ce6157d02c665d3"}, + {file = "asyncpg-0.29.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f100d23f273555f4b19b74a96840aa27b85e99ba4b1f18d4ebff0734e78dc090"}, + {file = "asyncpg-0.29.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48e7c58b516057126b363cec8ca02b804644fd012ef8e6c7e23386b7d5e6ce83"}, + {file = "asyncpg-0.29.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f9ea3f24eb4c49a615573724d88a48bd1b7821c890c2effe04f05382ed9e8810"}, + {file = "asyncpg-0.29.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8d36c7f14a22ec9e928f15f92a48207546ffe68bc412f3be718eedccdf10dc5c"}, + {file = "asyncpg-0.29.0-cp39-cp39-win32.whl", hash = "sha256:797ab8123ebaed304a1fad4d7576d5376c3a006a4100380fb9d517f0b59c1ab2"}, + {file = "asyncpg-0.29.0-cp39-cp39-win_amd64.whl", hash = "sha256:cce08a178858b426ae1aa8409b5cc171def45d4293626e7aa6510696d46decd8"}, + {file = "asyncpg-0.29.0.tar.gz", hash = "sha256:d1c49e1f44fffafd9a55e1a9b101590859d881d639ea2922516f5d9c512d354e"}, +] + +[package.dependencies] +async-timeout = {version = ">=4.0.3", markers = "python_version < \"3.12.0\""} + +[package.extras] +docs = ["Sphinx (>=5.3.0,<5.4.0)", "sphinx-rtd-theme (>=1.2.2)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] +test = ["flake8 (>=6.1,<7.0)", "uvloop (>=0.15.3)"] + [[package]] name = "atomicwrites" version = "1.4.1" @@ -1446,4 +1514,4 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more [metadata] lock-version = "2.0" python-versions = "3.9.*" -content-hash = "6e1901355e4625895b36af41fe1bee8073274eeaf8920ce9d951d0e89d93e948" +content-hash = "3e45ad2463a580453c01263b47601520d905b6c70796d4de9a9d2e067460a883" diff --git a/pyproject.toml b/pyproject.toml index d274f237..b6f87c45 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,6 +26,7 @@ psycopg2 = "^2.7" sqlalchemy = "^1.4.0" sqlalchemy-utils = "^0.37.3" PyYAML = "^5.4" +asyncpg = "^0.29.0" [tool.poetry.dev-dependencies] From 7ecffd303c9b4d20bc449e8774ef3091cbd220a4 Mon Sep 17 00:00:00 2001 From: BinamB Date: Tue, 28 May 2024 13:35:13 -0500 Subject: [PATCH 13/47] fix async --- bin/migrate_with_asyncio.py | 41 +++++++++++++++++++++++-------------- 1 file changed, 26 insertions(+), 15 deletions(-) diff --git a/bin/migrate_with_asyncio.py b/bin/migrate_with_asyncio.py index 8692e6ae..49ebf90a 100644 --- a/bin/migrate_with_asyncio.py +++ b/bin/migrate_with_asyncio.py @@ -2,9 +2,19 @@ import json import config_helper from cdislogging import get_logger -from sqlalchemy import create_engine, MetaData, Table, Column, Integer, String, DateTime +from sqlalchemy import ( + create_engine, + MetaData, + Table, + Column, + Integer, + String, + DateTime, + func, +) from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession +from sqlalchemy.future import select from sqlalchemy.orm import sessionmaker import time import random @@ -64,6 +74,7 @@ def __init__(self, conf_data=None): self.buffer_size = 10 self.batch_size = 1000 self.n_workers = self.thread_pool_size + self.concurrency + self.counter = 0 self.engine = create_async_engine( f"postgresql+asyncpg://{usr}:{psw}@{pghost}:{pgport}/{db}", echo=True @@ -92,21 +103,20 @@ async def migrate_tables(self): collector_queue = asyncio.Queue(maxsize=self.n_workers) inserter_queue = asyncio.Queue(maxsize=self.buffer_size) - # loop = asyncio.get_event_loop() self.logger.info("Collecting Data from old IndexD Table...") offset = 0 - collecters = loop.create_task( + collecters = asyncio.create_task( self.collect(collector_queue, self.batch_size, offset) ) self.logger.info("Initializing workers...") workers = [ - loop.create_task(self.worker(j, inserter_queue, collector_queue)) + asyncio.create_task(self.worker(j, inserter_queue, collector_queue)) for j in range(self.n_workers) ] self.logger.info("Inserting Data to new table") inserters = [ - loop.create_task(self.insert_to_db(i, inserter_queue)) + asyncio.create_task(self.insert_to_db(i, inserter_queue)) for i in range(self.concurrency) ] @@ -125,7 +135,8 @@ async def migrate_tables(self): async def collect(self, collector_queue, batch_size, offset): """ """ - while True: + while True or self.counter <= 30: + records_to_insert = None self.logger.info( f"Collecting {offset} - {offset+batch_size} records with collector" ) @@ -134,7 +145,9 @@ async def collect(self, collector_queue, batch_size, offset): offset, batch_size ) except Exception as e: - self.logger.error(f"Failed to query old table for offset {offset}") + self.logger.error( + f"Failed to query old table for offset {offset} with {e}" + ) if not records_to_insert: break @@ -145,6 +158,7 @@ async def collect(self, collector_queue, batch_size, offset): break offset += batch_size + self.counter += 1 self.logger.info(f"Added {offset} records into the collector queue") @@ -175,7 +189,7 @@ async def insert_to_db(self, name, inserter_queue): await session.commit() # self.session.bulk_save_objects(bulk_rows) except Exception as e: - self.session.rollback() + session.rollback() if "duplicate key value violates unique constraint" in str(e): self.logger.error(f"Errored at {offset}: {e}") else: @@ -187,14 +201,11 @@ async def insert_to_db(self, name, inserter_queue): async def query_record_with_offset(self, offset, batch_size, retry_limit=4): async with self.async_session() as session: - stmt = ( - self.session.query(IndexRecord) - .offset(offset) - .limit(batch_size) - .yield_per(batch_size) - ) + stmt = select(IndexRecord).offset(offset).limit(batch_size) + results = await session.execute(stmt) + records = results.scalars().all() records_to_insert = [] - for row in stmt: + for row in records: tasks = [ self.get_index_record_hash(row.did), self.get_urls_record(row.did), From 6fa45b42dc76f4020bfb781dace2c4000354a077 Mon Sep 17 00:00:00 2001 From: BinamB Date: Wed, 29 May 2024 10:14:01 -0500 Subject: [PATCH 14/47] remove worker --- bin/migrate_with_asyncio.py | 29 ++--------------------------- 1 file changed, 2 insertions(+), 27 deletions(-) diff --git a/bin/migrate_with_asyncio.py b/bin/migrate_with_asyncio.py index 49ebf90a..05337c26 100644 --- a/bin/migrate_with_asyncio.py +++ b/bin/migrate_with_asyncio.py @@ -109,24 +109,15 @@ async def migrate_tables(self): collecters = asyncio.create_task( self.collect(collector_queue, self.batch_size, offset) ) - self.logger.info("Initializing workers...") - workers = [ - asyncio.create_task(self.worker(j, inserter_queue, collector_queue)) - for j in range(self.n_workers) - ] self.logger.info("Inserting Data to new table") inserters = [ - asyncio.create_task(self.insert_to_db(i, inserter_queue)) + asyncio.create_task(self.insert_to_db(i, collector_queue)) for i in range(self.concurrency) ] await asyncio.gather(collecters) await collector_queue.join() - for w in workers: - w.cancel() - await asyncio.gather(*workers, return_exceptions=True) - await inserter_queue.join() for i in inserters: @@ -135,7 +126,7 @@ async def migrate_tables(self): async def collect(self, collector_queue, batch_size, offset): """ """ - while True or self.counter <= 30: + while self.counter <= 3: records_to_insert = None self.logger.info( f"Collecting {offset} - {offset+batch_size} records with collector" @@ -162,22 +153,6 @@ async def collect(self, collector_queue, batch_size, offset): self.logger.info(f"Added {offset} records into the collector queue") - async def worker(self, name, collector_queue, inserter_queue): - # Handles the semaphore - # while not collector_queue.empty(): - # self.logger.info(f"Worker {name} adding records to insert queue") - # bulk_rows = await collector_queue.get() - # print(bulk_rows) - # await inserter_queue.put(bulk_rows) - # collector_queue.task_done() - while True: - bulk_rows = await collector_queue.get() - if bulk_rows is None: - break - self.logger.info(f"Worker {name} adding records to insert queue") - await inserter_queue.put(bulk_rows) - collector_queue.task_done() - async def insert_to_db(self, name, inserter_queue): async with self.async_session() as session: while True: From a3dccb516292233defb0247a6a3aaec89a5cbe36 Mon Sep 17 00:00:00 2001 From: BinamB Date: Mon, 10 Jun 2024 12:37:01 -0500 Subject: [PATCH 15/47] Async migration --- bin/migrate_with_asyncio.py | 148 ++++++++++++++++++------------------ 1 file changed, 75 insertions(+), 73 deletions(-) diff --git a/bin/migrate_with_asyncio.py b/bin/migrate_with_asyncio.py index 05337c26..914478b3 100644 --- a/bin/migrate_with_asyncio.py +++ b/bin/migrate_with_asyncio.py @@ -1,32 +1,18 @@ +import gc import argparse import json import config_helper from cdislogging import get_logger -from sqlalchemy import ( - create_engine, - MetaData, - Table, - Column, - Integer, - String, - DateTime, - func, -) -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy import create_engine, func from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession from sqlalchemy.future import select from sqlalchemy.orm import sessionmaker import time -import random -import re import asyncio -import cProfile - from indexd.index.drivers.alchemy import ( IndexRecord, IndexRecordAuthz, - BaseVersion, IndexRecordAlias, IndexRecordUrl, IndexRecordACE, @@ -45,13 +31,6 @@ def load_json(file_name): return config_helper.load_json(file_name, APP_NAME) -# @profile -# def main(): -# migrator = IndexRecordMigrator() -# asyncio.run(migrator.migrate_tables()) -# return - - class IndexRecordMigrator: def __init__(self, conf_data=None): self.logger = get_logger("migrate_single_table", log_level="debug") @@ -68,30 +47,27 @@ def __init__(self, conf_data=None): pghost = conf_data.get("db_host", "{{db_host}}") pgport = 5432 - self.chunk_size = 10 - self.concurrency = 5 - self.thread_pool_size = 3 - self.buffer_size = 10 - self.batch_size = 1000 - self.n_workers = self.thread_pool_size + self.concurrency + self.auto_job_config = True + self.insertion_workers = 10 + self.batch_size = 200 + self.collection_workers = 100 self.counter = 0 + self.psql_pool_size = 50 + self.max_overflow = 10 self.engine = create_async_engine( - f"postgresql+asyncpg://{usr}:{psw}@{pghost}:{pgport}/{db}", echo=True + f"postgresql+asyncpg://{usr}:{psw}@{pghost}:{pgport}/{db}", + echo=False, + pool_size=self.psql_pool_size, + max_overflow=self.max_overflow, ) self.async_session = sessionmaker( self.engine, expire_on_commit=False, class_=AsyncSession ) - # Base = declarative_base() - # Base.metadata.create_all(self.engine) - # Session = sessionmaker(bind=self.engine) - - # self.session = Session() - async def init(self): async with self.async_session() as session: - await session.run_sync(Base.metadata.create_all) + await session.run_sync(Record.metadata.create_all) async def migrate_tables(self): self.logger.info("Starting migration job...") @@ -101,78 +77,106 @@ async def migrate_tables(self): ) self.logger.info(f"Total records to copy: {self.total_records}") - collector_queue = asyncio.Queue(maxsize=self.n_workers) - inserter_queue = asyncio.Queue(maxsize=self.buffer_size) + if ( + self.total_records - self.batch_size * self.collection_workers + ) < 0 or self.auto_job_config: + self.collection_workers = int(self.total_records / self.batch_size) + # TODO: Change this log later + self.logger.info( + f"Batch size and number of workers exceeds total records to be copied. Changing number of collector workers to {self.batch_size}" + ) + + if self.auto_job_config: + self.insertion_workers = int(self.collection_workers / 2) + self.logger.info( + f"Setting number of insertion workers to {self.insertion_workers}." + ) + + collector_queue = asyncio.Queue(maxsize=self.collection_workers) + loop = asyncio.get_event_loop() self.logger.info("Collecting Data from old IndexD Table...") - offset = 0 - collecters = asyncio.create_task( - self.collect(collector_queue, self.batch_size, offset) - ) + collect_tasks = [ + loop.create_task( + self.collect(collector_queue, self.batch_size, i * self.batch_size) + ) + for i in range(self.collection_workers) + ] + self.logger.info("Inserting Data to new table") - inserters = [ - asyncio.create_task(self.insert_to_db(i, collector_queue)) - for i in range(self.concurrency) + insert_tasks = [ + loop.create_task(self.insert_to_db(i, collector_queue)) + for i in range(self.insertion_workers) ] - await asyncio.gather(collecters) + await asyncio.gather(*collect_tasks) + await collector_queue.join() - await inserter_queue.join() + for task in insert_tasks: + task.cancel() - for i in inserters: - i.cancel() - await asyncio.gather(*inserters, return_exceptions=True) + await asyncio.gather(*insert_tasks, return_exceptions=True) + + self.logger.info( + f"Migration job completed. {self.counter} records were considered duplicates." + ) async def collect(self, collector_queue, batch_size, offset): - """ """ - while self.counter <= 3: - records_to_insert = None + while offset < self.total_records: self.logger.info( - f"Collecting {offset} - {offset+batch_size} records with collector" + f"Collecting {offset} - {offset + batch_size} records with collector" ) try: records_to_insert = await self.query_record_with_offset( offset, batch_size ) + if not records_to_insert: + self.logger.info(f"No more records to collect at offset {offset}") + break except Exception as e: self.logger.error( f"Failed to query old table for offset {offset} with {e}" ) - - if not records_to_insert: break + self.logger.info(f"Adding records to collector queue at offset {offset}") await collector_queue.put(records_to_insert) - if len(records_to_insert) < batch_size: - break - offset += batch_size - self.counter += 1 - self.logger.info(f"Added {offset} records into the collector queue") + await collector_queue.put(None) + self.logger.info(f"Collector finished collecting records.") - async def insert_to_db(self, name, inserter_queue): + async def insert_to_db(self, name, collector_queue): async with self.async_session() as session: while True: + self.logger.info(f"Inserter {name} waiting for records") + bulk_rows = await collector_queue.get() + + if bulk_rows is None: + self.logger.info( + f"Inserter {name} didn't receive any records to insert. Killing worker..." + ) + break + self.logger.info(f"Inserter {name} bulk inserting records") - bulk_rows = await inserter_queue.get() try: async with session.begin(): session.add_all(bulk_rows) await session.commit() - # self.session.bulk_save_objects(bulk_rows) except Exception as e: - session.rollback() + await session.rollback() if "duplicate key value violates unique constraint" in str(e): - self.logger.error(f"Errored at {offset}: {e}") + self.counter += 1 + self.logger.error(f"Duplicate key error: {e}") else: - self.logger.error(f"Ran into error at {offset}: {e}") - break + self.logger.error(f"Error inserting records: {e}") finally: - inserter_queue.task_done() - self.logger.info("Successfully inserted to new table!") + gc.collect() + collector_queue.task_done() + + self.logger.info(f"Inserter {name} finished") async def query_record_with_offset(self, offset, batch_size, retry_limit=4): async with self.async_session() as session: @@ -283,7 +287,6 @@ async def get_index_record_metadata(self, did): return {key: value for key, value in results} def remove_duplicate_records(self, records, error): - # Extract the key value from the error message key_value = re.search(r"\(guid\)=\((.*?)\)", str(error)).group(1) self.logger.info(f"Removing duplicate record {key_value}") for record in records: @@ -304,7 +307,6 @@ def remove_duplicate_records(self, records, error): args = parser.parse_args() migrator = IndexRecordMigrator(conf_data=args.creds_path) asyncio.run(migrator.migrate_tables()) - # cProfile.run("asyncio.run(migrator.index_record_to_new_table())", filename="profile_results.txt") end_time = time.time() print("Total Time: {}".format(end_time - start_time)) From a76fc530205fbf0c6b527b240235bf9e25e2c6b3 Mon Sep 17 00:00:00 2001 From: BinamB Date: Wed, 12 Jun 2024 12:07:11 -0500 Subject: [PATCH 16/47] migrate no bulk inserts --- bin/migrate_with_asyncio.py | 30 +++++++++++++++++++----------- 1 file changed, 19 insertions(+), 11 deletions(-) diff --git a/bin/migrate_with_asyncio.py b/bin/migrate_with_asyncio.py index 914478b3..bf744f86 100644 --- a/bin/migrate_with_asyncio.py +++ b/bin/migrate_with_asyncio.py @@ -47,13 +47,13 @@ def __init__(self, conf_data=None): pghost = conf_data.get("db_host", "{{db_host}}") pgport = 5432 - self.auto_job_config = True + self.auto_job_config = False self.insertion_workers = 10 self.batch_size = 200 self.collection_workers = 100 self.counter = 0 - self.psql_pool_size = 50 - self.max_overflow = 10 + self.psql_pool_size = self.collection_workers + self.insertion_workers + self.max_overflow = self.insertion_workers self.engine = create_async_engine( f"postgresql+asyncpg://{usr}:{psw}@{pghost}:{pgport}/{db}", @@ -97,9 +97,7 @@ async def migrate_tables(self): self.logger.info("Collecting Data from old IndexD Table...") collect_tasks = [ - loop.create_task( - self.collect(collector_queue, self.batch_size, i * self.batch_size) - ) + loop.create_task(self.collect(collector_queue, self.batch_size, i)) for i in range(self.collection_workers) ] @@ -122,7 +120,8 @@ async def migrate_tables(self): f"Migration job completed. {self.counter} records were considered duplicates." ) - async def collect(self, collector_queue, batch_size, offset): + async def collect(self, collector_queue, batch_size, worker_id): + offset = worker_id * batch_size while offset < self.total_records: self.logger.info( f"Collecting {offset} - {offset + batch_size} records with collector" @@ -143,10 +142,10 @@ async def collect(self, collector_queue, batch_size, offset): self.logger.info(f"Adding records to collector queue at offset {offset}") await collector_queue.put(records_to_insert) - offset += batch_size + offset += self.collection_workers * batch_size - await collector_queue.put(None) self.logger.info(f"Collector finished collecting records.") + await collector_queue.put(None) async def insert_to_db(self, name, collector_queue): async with self.async_session() as session: @@ -160,10 +159,19 @@ async def insert_to_db(self, name, collector_queue): ) break + if not bulk_rows: + continue + self.logger.info(f"Inserter {name} bulk inserting records") try: async with session.begin(): - session.add_all(bulk_rows) + for record in bulk_rows: + exists = await session.execute( + select(Record).filter_by(guid=record.guid) + ) + if not exists.scalar(): + session.add(record) + # session.add_all(bulk_rows) await session.commit() except Exception as e: await session.rollback() @@ -176,7 +184,7 @@ async def insert_to_db(self, name, collector_queue): gc.collect() collector_queue.task_done() - self.logger.info(f"Inserter {name} finished") + self.logger.info(f"Inserter {name} finished. Killing Inserter...") async def query_record_with_offset(self, offset, batch_size, retry_limit=4): async with self.async_session() as session: From 1b7219a8220352570df71f47ebce10c16b20edae Mon Sep 17 00:00:00 2001 From: BinamB Date: Mon, 17 Jun 2024 09:20:59 -0500 Subject: [PATCH 17/47] async migration remove + PR comments --- bin/migrate_to_single_table.py | 111 +++++---- bin/migrate_with_asyncio.py | 320 -------------------------- bin/migration_with_copy.py | 0 deployment/Secrets/indexd_settings.py | 1 - indexd/index/blueprint.py | 1 + tests/test_single_table_migration.py | 5 - 6 files changed, 65 insertions(+), 373 deletions(-) delete mode 100644 bin/migrate_with_asyncio.py delete mode 100644 bin/migration_with_copy.py delete mode 100644 tests/test_single_table_migration.py diff --git a/bin/migrate_to_single_table.py b/bin/migrate_to_single_table.py index 3384f1cd..00412fd7 100644 --- a/bin/migrate_to_single_table.py +++ b/bin/migrate_to_single_table.py @@ -68,7 +68,6 @@ def __init__(self, conf_data=None): self.session = Session() - @profile # for memory-profiler def index_record_to_new_table(self, batch_size=1000, retry_limit=4): try: total_records = self.session.query(IndexRecord).count() @@ -92,29 +91,32 @@ def index_record_to_new_table(self, batch_size=1000, retry_limit=4): alias = self.get_index_record_alias(row.did) metadata = self.get_index_record_metadata(row.did) - records_to_insert.append( - Record( - guid=row.did, - baseid=row.baseid, - rev=row.rev, - form=row.form, - size=row.size, - created_date=row.created_date, - updated_date=row.updated_date, - content_created_date=row.content_created_date, - content_updated_date=row.content_updated_date, - file_name=row.file_name, - version=row.version, - uploader=row.uploader, - hashes=hashes, - urls=urls, - url_metadata=url_metadata, - acl=acl, - authz=authz, - alias=alias, - record_metadata=metadata, + try: + records_to_insert.append( + Record( + guid=row.did, + baseid=row.baseid, + rev=row.rev, + form=row.form, + size=row.size, + created_date=row.created_date, + updated_date=row.updated_date, + content_created_date=row.content_created_date, + content_updated_date=row.content_updated_date, + file_name=row.file_name, + version=row.version, + uploader=row.uploader, + hashes=hashes, + urls=urls, + url_metadata=url_metadata, + acl=acl, + authz=authz, + alias=alias, + record_metadata=metadata, + ) ) - ) + except Exception as e: + print(e) while len(records_to_insert) > 0: try: @@ -143,14 +145,15 @@ def index_record_to_new_table(self, batch_size=1000, retry_limit=4): self.session.close() self.logger.info("Finished migrating :D") - def get_record_info(self, did): - pass - def get_index_record_hash(self, did): try: - stmt = self.session.query( - IndexRecordHash.hash_type, IndexRecordHash.hash_value - ).filter(IndexRecordHash.did == did) + stmt = ( + self.session.query( + IndexRecordHash.hash_type, IndexRecordHash.hash_value + ) + .filter(IndexRecordHash.did == did) + .all() + ) res = {hash_type: hash_value for hash_type, hash_value in stmt} return res @@ -159,10 +162,12 @@ def get_index_record_hash(self, did): def get_urls_record(self, did): try: - stmt = self.session.query(IndexRecordUrl.url).filter( - IndexRecordUrl.did == did + stmt = ( + self.session.query(IndexRecordUrl.url) + .filter(IndexRecordUrl.did == did) + .all() ) - res = [url for url in stmt] + res = [u.url for u in stmt] return res except Exception as e: @@ -170,11 +175,15 @@ def get_urls_record(self, did): def get_urls_metadata(self, did): try: - stmt = self.session.query( - IndexRecordUrlMetadata.url, - IndexRecordUrlMetadata.key, - IndexRecordUrlMetadata.value, - ).filter(IndexRecordUrlMetadata.did == did) + stmt = ( + self.session.query( + IndexRecordUrlMetadata.url, + IndexRecordUrlMetadata.key, + IndexRecordUrlMetadata.value, + ) + .filter(IndexRecordUrlMetadata.did == did) + .all() + ) res = {url: {key: value} for url, key, value in stmt} return res except Exception as e: @@ -182,28 +191,34 @@ def get_urls_metadata(self, did): def get_index_record_ace(self, did): try: - stmt = self.session.query(IndexRecordACE.ace).filter( - IndexRecordACE.did == did + stmt = ( + self.session.query(IndexRecordACE.ace) + .filter(IndexRecordACE.did == did) + .all() ) - res = [ace for ace in stmt] + res = [a.ace for a in stmt] return res except Exception as e: self.logger.error(f"Error with ace for {did}: {e}") def get_index_record_authz(self, did): try: - stmt = self.session.query(IndexRecordAuthz.resource).filter( - IndexRecordAuthz.did == did + stmt = ( + self.session.query(IndexRecordAuthz.resource) + .filter(IndexRecordAuthz.did == did) + .all() ) - res = [resource for resource in stmt] + res = [r.resource for r in stmt] return res except Exception as e: self.logger.error(f"Error with authz for {did}: {e}") def get_index_record_alias(self, did): try: - stmt = self.session.query(IndexRecordAlias.name).filter( - IndexRecordAlias.did == did + stmt = ( + self.session.query(IndexRecordAlias.name) + .filter(IndexRecordAlias.did == did) + .all() ) res = [row.name for row in stmt] return res @@ -212,8 +227,10 @@ def get_index_record_alias(self, did): def get_index_record_metadata(self, did): try: - stmt = self.session.query(IndexRecordMetadata).filter( - IndexRecordMetadata.did == did + stmt = ( + self.session.query(IndexRecordMetadata) + .filter(IndexRecordMetadata.did == did) + .all() ) res = {row.key: row.value for row in stmt} return res diff --git a/bin/migrate_with_asyncio.py b/bin/migrate_with_asyncio.py deleted file mode 100644 index bf744f86..00000000 --- a/bin/migrate_with_asyncio.py +++ /dev/null @@ -1,320 +0,0 @@ -import gc -import argparse -import json -import config_helper -from cdislogging import get_logger -from sqlalchemy import create_engine, func -from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession -from sqlalchemy.future import select -from sqlalchemy.orm import sessionmaker -import time -import asyncio - -from indexd.index.drivers.alchemy import ( - IndexRecord, - IndexRecordAuthz, - IndexRecordAlias, - IndexRecordUrl, - IndexRecordACE, - IndexRecordMetadata, - IndexRecordUrlMetadata, - IndexRecordHash, -) -from indexd.index.drivers.single_table_alchemy import Record - -APP_NAME = "indexd" - -logger = get_logger("migrate_single_table", log_level="debug") - - -def load_json(file_name): - return config_helper.load_json(file_name, APP_NAME) - - -class IndexRecordMigrator: - def __init__(self, conf_data=None): - self.logger = get_logger("migrate_single_table", log_level="debug") - - if conf_data: - with open(conf_data, "r") as reader: - conf_data = json.load(reader) - else: - conf_data = load_json("creds.json") - - usr = conf_data.get("db_username", "{{db_username}}") - db = conf_data.get("db_database", "{{db_database}}") - psw = conf_data.get("db_password", "{{db_password}}") - pghost = conf_data.get("db_host", "{{db_host}}") - pgport = 5432 - - self.auto_job_config = False - self.insertion_workers = 10 - self.batch_size = 200 - self.collection_workers = 100 - self.counter = 0 - self.psql_pool_size = self.collection_workers + self.insertion_workers - self.max_overflow = self.insertion_workers - - self.engine = create_async_engine( - f"postgresql+asyncpg://{usr}:{psw}@{pghost}:{pgport}/{db}", - echo=False, - pool_size=self.psql_pool_size, - max_overflow=self.max_overflow, - ) - self.async_session = sessionmaker( - self.engine, expire_on_commit=False, class_=AsyncSession - ) - - async def init(self): - async with self.async_session() as session: - await session.run_sync(Record.metadata.create_all) - - async def migrate_tables(self): - self.logger.info("Starting migration job...") - async with self.async_session() as session: - self.total_records = await session.scalar( - select(func.count(IndexRecord.did)) - ) - self.logger.info(f"Total records to copy: {self.total_records}") - - if ( - self.total_records - self.batch_size * self.collection_workers - ) < 0 or self.auto_job_config: - self.collection_workers = int(self.total_records / self.batch_size) - # TODO: Change this log later - self.logger.info( - f"Batch size and number of workers exceeds total records to be copied. Changing number of collector workers to {self.batch_size}" - ) - - if self.auto_job_config: - self.insertion_workers = int(self.collection_workers / 2) - self.logger.info( - f"Setting number of insertion workers to {self.insertion_workers}." - ) - - collector_queue = asyncio.Queue(maxsize=self.collection_workers) - loop = asyncio.get_event_loop() - - self.logger.info("Collecting Data from old IndexD Table...") - collect_tasks = [ - loop.create_task(self.collect(collector_queue, self.batch_size, i)) - for i in range(self.collection_workers) - ] - - self.logger.info("Inserting Data to new table") - insert_tasks = [ - loop.create_task(self.insert_to_db(i, collector_queue)) - for i in range(self.insertion_workers) - ] - - await asyncio.gather(*collect_tasks) - - await collector_queue.join() - - for task in insert_tasks: - task.cancel() - - await asyncio.gather(*insert_tasks, return_exceptions=True) - - self.logger.info( - f"Migration job completed. {self.counter} records were considered duplicates." - ) - - async def collect(self, collector_queue, batch_size, worker_id): - offset = worker_id * batch_size - while offset < self.total_records: - self.logger.info( - f"Collecting {offset} - {offset + batch_size} records with collector" - ) - try: - records_to_insert = await self.query_record_with_offset( - offset, batch_size - ) - if not records_to_insert: - self.logger.info(f"No more records to collect at offset {offset}") - break - except Exception as e: - self.logger.error( - f"Failed to query old table for offset {offset} with {e}" - ) - break - - self.logger.info(f"Adding records to collector queue at offset {offset}") - await collector_queue.put(records_to_insert) - - offset += self.collection_workers * batch_size - - self.logger.info(f"Collector finished collecting records.") - await collector_queue.put(None) - - async def insert_to_db(self, name, collector_queue): - async with self.async_session() as session: - while True: - self.logger.info(f"Inserter {name} waiting for records") - bulk_rows = await collector_queue.get() - - if bulk_rows is None: - self.logger.info( - f"Inserter {name} didn't receive any records to insert. Killing worker..." - ) - break - - if not bulk_rows: - continue - - self.logger.info(f"Inserter {name} bulk inserting records") - try: - async with session.begin(): - for record in bulk_rows: - exists = await session.execute( - select(Record).filter_by(guid=record.guid) - ) - if not exists.scalar(): - session.add(record) - # session.add_all(bulk_rows) - await session.commit() - except Exception as e: - await session.rollback() - if "duplicate key value violates unique constraint" in str(e): - self.counter += 1 - self.logger.error(f"Duplicate key error: {e}") - else: - self.logger.error(f"Error inserting records: {e}") - finally: - gc.collect() - collector_queue.task_done() - - self.logger.info(f"Inserter {name} finished. Killing Inserter...") - - async def query_record_with_offset(self, offset, batch_size, retry_limit=4): - async with self.async_session() as session: - stmt = select(IndexRecord).offset(offset).limit(batch_size) - results = await session.execute(stmt) - records = results.scalars().all() - records_to_insert = [] - for row in records: - tasks = [ - self.get_index_record_hash(row.did), - self.get_urls_record(row.did), - self.get_urls_metadata(row.did), - self.get_index_record_ace(row.did), - self.get_index_record_authz(row.did), - self.get_index_record_alias(row.did), - self.get_index_record_metadata(row.did), - ] - results = await asyncio.gather(*tasks) - - ( - hashes, - urls, - url_metadata, - acl, - authz, - alias, - metadata, - ) = results - - records_to_insert.append( - Record( - guid=row.did, - baseid=row.baseid, - rev=row.rev, - form=row.form, - size=row.size, - created_date=row.created_date, - updated_date=row.updated_date, - content_created_date=row.content_created_date, - content_updated_date=row.content_updated_date, - file_name=row.file_name, - version=row.version, - uploader=row.uploader, - hashes=hashes, - urls=urls, - url_metadata=url_metadata, - acl=acl, - authz=authz, - alias=alias, - record_metadata=metadata, - ) - ) - return records_to_insert - - async def get_index_record_hash(self, did): - async with self.async_session() as session: - stmt = select(IndexRecordHash.hash_type, IndexRecordHash.hash_value).where( - IndexRecordHash.did == did - ) - results = await session.execute(stmt) - return {hash_type: hash_value for hash_type, hash_value in results} - - async def get_urls_record(self, did): - async with self.async_session() as session: - stmt = select(IndexRecordUrl.url).where(IndexRecordUrl.did == did) - results = await session.execute(stmt) - return [url for url, in results] - - async def get_urls_metadata(self, did): - async with self.async_session() as session: - stmt = select( - IndexRecordUrlMetadata.url, - IndexRecordUrlMetadata.key, - IndexRecordUrlMetadata.value, - ).where(IndexRecordUrlMetadata.did == did) - results = await session.execute(stmt) - url_metadata = {} - for url, key, value in results: - if url not in url_metadata: - url_metadata[url] = {} - url_metadata[url][key] = value - return url_metadata - - async def get_index_record_ace(self, did): - async with self.async_session() as session: - stmt = select(IndexRecordACE.ace).where(IndexRecordACE.did == did) - results = await session.execute(stmt) - return [ace for ace, in results] - - async def get_index_record_authz(self, did): - async with self.async_session() as session: - stmt = select(IndexRecordAuthz.resource).where(IndexRecordAuthz.did == did) - results = await session.execute(stmt) - return [resource for resource, in results] - - async def get_index_record_alias(self, did): - async with self.async_session() as session: - stmt = select(IndexRecordAlias.name).where(IndexRecordAlias.did == did) - results = await session.execute(stmt) - return [name for name, in results] - - async def get_index_record_metadata(self, did): - async with self.async_session() as session: - stmt = select(IndexRecordMetadata.key, IndexRecordMetadata.value).where( - IndexRecordMetadata.did == did - ) - results = await session.execute(stmt) - return {key: value for key, value in results} - - def remove_duplicate_records(self, records, error): - key_value = re.search(r"\(guid\)=\((.*?)\)", str(error)).group(1) - self.logger.info(f"Removing duplicate record {key_value}") - for record in records: - if key_value == str(record.guid): - records.remove(record) - break - - -if __name__ == "__main__": - start_time = time.time() - parser = argparse.ArgumentParser( - description="Migrate data from old indexd database to new single table database" - ) - parser.add_argument( - "creds_path", - help="Path to the creds file for the database you're trying to copy data from multi-table to single records table. Defaults to original indexd database creds from the indexd block in the creds.json file.", - ) - args = parser.parse_args() - migrator = IndexRecordMigrator(conf_data=args.creds_path) - asyncio.run(migrator.migrate_tables()) - end_time = time.time() - - print("Total Time: {}".format(end_time - start_time)) diff --git a/bin/migration_with_copy.py b/bin/migration_with_copy.py deleted file mode 100644 index e69de29b..00000000 diff --git a/deployment/Secrets/indexd_settings.py b/deployment/Secrets/indexd_settings.py index ca09f10a..4b45badf 100644 --- a/deployment/Secrets/indexd_settings.py +++ b/deployment/Secrets/indexd_settings.py @@ -1,7 +1,6 @@ from os import environ import json import config_helper -from indexd.index.drivers.alchemy import SQLAlchemyIndexDriver from indexd.alias.drivers.alchemy import SQLAlchemyAliasDriver from indexd.auth.drivers.alchemy import SQLAlchemyAuthDriver from indexd.index.drivers.single_table_alchemy import SingleTableSQLAlchemyIndexDriver diff --git a/indexd/index/blueprint.py b/indexd/index/blueprint.py index 7c79513f..b15a328e 100644 --- a/indexd/index/blueprint.py +++ b/indexd/index/blueprint.py @@ -104,6 +104,7 @@ def get_index(form=None): validate_hashes(**hashes) hashes = hashes if hashes else None + metadata = flask.request.args.getlist("metadata") metadata = {k: v for k, v in (x.split(":", 1) for x in metadata)} acl = flask.request.args.get("acl") diff --git a/tests/test_single_table_migration.py b/tests/test_single_table_migration.py deleted file mode 100644 index e4fc95c1..00000000 --- a/tests/test_single_table_migration.py +++ /dev/null @@ -1,5 +0,0 @@ -# from migrate_with_asyncio import IndexRecordMigrator - - -# def test_migrate_tables(): -# migrator = IndexRecordMigrator(conf_data="/bin/creds.json") From 71cac1a1d2c81a760b35e00e3515c207e6a75159 Mon Sep 17 00:00:00 2001 From: BinamB Date: Thu, 20 Jun 2024 12:20:53 -0500 Subject: [PATCH 18/47] Updated sync migration --- bin/migrate_to_single_table.py | 93 +++++++++++++++++++--------------- 1 file changed, 51 insertions(+), 42 deletions(-) diff --git a/bin/migrate_to_single_table.py b/bin/migrate_to_single_table.py index 00412fd7..e8a3e4b4 100644 --- a/bin/migrate_to_single_table.py +++ b/bin/migrate_to_single_table.py @@ -71,14 +71,28 @@ def __init__(self, conf_data=None): def index_record_to_new_table(self, batch_size=1000, retry_limit=4): try: total_records = self.session.query(IndexRecord).count() - - for offset in range(0, total_records, batch_size): - stmt = ( - self.session.query(IndexRecord) - .offset(offset) - .limit(batch_size) - .yield_per(batch_size) - ) + last_seen_guid = None + count = 0 + + while True: + if last_seen_guid == None: + stmt = ( + self.session.query(IndexRecord) + .order_by(IndexRecord.did) + .limit(batch_size) + .all() + ) + else: + stmt = ( + self.session.query(IndexRecord) + .order_by(IndexRecord.did) + .filter(IndexRecord.did > last_seen_guid) + .limit(batch_size) + .all() + ) + + if stmt == None: + break records_to_insert = [] @@ -91,55 +105,50 @@ def index_record_to_new_table(self, batch_size=1000, retry_limit=4): alias = self.get_index_record_alias(row.did) metadata = self.get_index_record_metadata(row.did) - try: - records_to_insert.append( - Record( - guid=row.did, - baseid=row.baseid, - rev=row.rev, - form=row.form, - size=row.size, - created_date=row.created_date, - updated_date=row.updated_date, - content_created_date=row.content_created_date, - content_updated_date=row.content_updated_date, - file_name=row.file_name, - version=row.version, - uploader=row.uploader, - hashes=hashes, - urls=urls, - url_metadata=url_metadata, - acl=acl, - authz=authz, - alias=alias, - record_metadata=metadata, - ) + records_to_insert.append( + Record( + guid=row.did, + baseid=row.baseid, + rev=row.rev, + form=row.form, + size=row.size, + created_date=row.created_date, + updated_date=row.updated_date, + content_created_date=row.content_created_date, + content_updated_date=row.content_updated_date, + file_name=row.file_name, + version=row.version, + uploader=row.uploader, + hashes=hashes, + urls=urls, + url_metadata=url_metadata, + acl=acl, + authz=authz, + alias=alias, + record_metadata=metadata, ) - except Exception as e: - print(e) + ) + + last_seen_guid = stmt[-1].did while len(records_to_insert) > 0: try: self.session.bulk_save_objects(records_to_insert) self.session.commit() + count += len(records_to_insert) + self.logger.info( + f"Done processing {count}/{total_records} records. {(count * 100)/total_records}%" + ) break except Exception as e: self.session.rollback() if "duplicate key value violates unique constraint" in str(e): - self.logger.error(f"Errored at {offset}: {e}") records_to_insert = self.remove_duplicate_records( records_to_insert, e ) - else: - self.logger.error(f"Ran into error at {offset}: {e}") - break - self.logger.info( - f"Inserted {offset} records out of {total_records}. Progress: {(offset*100)/total_records}%" - ) - except Exception as e: self.session.rollback() - self.logger.error(f"Errored at {offset}: {e}") + self.logger.error(f"Error in migration: {e}") finally: self.session.close() From 0e977b38028cc79b971a579d15a4faa4ba6a03cb Mon Sep 17 00:00:00 2001 From: BinamB Date: Tue, 2 Jul 2024 13:36:25 -0500 Subject: [PATCH 19/47] pr review changes --- bin/migrate_to_single_table.py | 210 +++++++++++------- indexd/index/blueprint.py | 1 - indexd/index/drivers/single_table_alchemy.py | 45 ++-- .../bb3d7586a096_createsingletable.py | 8 +- tests/conftest.py | 8 +- .../test_bb3d7586a096_createsingletable.py | 6 + 6 files changed, 162 insertions(+), 116 deletions(-) diff --git a/bin/migrate_to_single_table.py b/bin/migrate_to_single_table.py index e8a3e4b4..47b41ab0 100644 --- a/bin/migrate_to_single_table.py +++ b/bin/migrate_to_single_table.py @@ -1,5 +1,5 @@ """ - +to run: python migrate_to_single_table.py "/dir/containing/db_creds" """ import argparse import json @@ -37,11 +37,31 @@ def load_json(file_name): def main(): - migrator = IndexRecordMigrator() + args = parse_args() + try: + migrator = IndexRecordMigrator(conf_data=args.creds_path) + except Exception as e: + logger.error(f"Set up failed: {e}") migrator.index_record_to_new_table() return +def parse_args(): + parser = argparse.ArgumentParser( + description="Migrate data from old indexd database to new single table database" + ) + parser.add_argument( + "--creds-path", + help="Path to the creds file for the database you're trying to copy data from multi-table to single records table. Defaults to original indexd database creds from the indexd block in the creds.json file.", + ) + parser.add_argument( + "--start-did", + help="did to start at", + default=False, + ) + return parser.parse_args() + + class IndexRecordMigrator: def __init__(self, conf_data=None): self.logger = get_logger("migrate_single_table", log_level="debug") @@ -75,15 +95,15 @@ def index_record_to_new_table(self, batch_size=1000, retry_limit=4): count = 0 while True: - if last_seen_guid == None: - stmt = ( + if last_seen_guid is None: + records = ( self.session.query(IndexRecord) .order_by(IndexRecord.did) .limit(batch_size) .all() ) else: - stmt = ( + records = ( self.session.query(IndexRecord) .order_by(IndexRecord.did) .filter(IndexRecord.did > last_seen_guid) @@ -91,34 +111,45 @@ def index_record_to_new_table(self, batch_size=1000, retry_limit=4): .all() ) - if stmt == None: + if not records: break + # Collect all dids in the current batch + dids = [record.did for record in records] + # Fetch related data for all dids in the current batch + hashes = self.get_index_record_hash(dids) + urls = self.get_urls_record(dids) + url_metadata = self.get_urls_metadata(dids) + acl = self.get_index_record_ace(dids) + authz = self.get_index_record_authz(dids) + alias = self.get_index_record_alias(dids) + metadata = self.get_index_record_metadata(dids) + records_to_insert = [] - for row in stmt: - hashes = self.get_index_record_hash(row.did) - urls = self.get_urls_record(row.did) - url_metadata = self.get_urls_metadata(row.did) - acl = self.get_index_record_ace(row.did) - authz = self.get_index_record_authz(row.did) - alias = self.get_index_record_alias(row.did) - metadata = self.get_index_record_metadata(row.did) + for record in records: + record_hashes = hashes.get(record.did, {}) + record_urls = urls.get(record.did, []) + record_url_metadata = url_metadata.get(record.did, {}) + record_acl = acl.get(record.did, []) + record_authz = authz.get(record.did, []) + record_alias = alias.get(record.did, []) + record_metadata = metadata.get(record.did, {}) records_to_insert.append( Record( - guid=row.did, - baseid=row.baseid, - rev=row.rev, - form=row.form, - size=row.size, - created_date=row.created_date, - updated_date=row.updated_date, - content_created_date=row.content_created_date, - content_updated_date=row.content_updated_date, - file_name=row.file_name, - version=row.version, - uploader=row.uploader, + guid=record.did, + baseid=record.baseid, + rev=record.rev, + form=record.form, + size=record.size, + created_date=record.created_date, + updated_date=record.updated_date, + content_created_date=record.content_created_date, + content_updated_date=record.content_updated_date, + file_name=record.file_name, + version=record.version, + uploader=record.uploader, hashes=hashes, urls=urls, url_metadata=url_metadata, @@ -129,9 +160,9 @@ def index_record_to_new_table(self, batch_size=1000, retry_limit=4): ) ) - last_seen_guid = stmt[-1].did + last_seen_guid = records[-1].did - while len(records_to_insert) > 0: + while records_to_insert: try: self.session.bulk_save_objects(records_to_insert) self.session.commit() @@ -154,97 +185,132 @@ def index_record_to_new_table(self, batch_size=1000, retry_limit=4): self.session.close() self.logger.info("Finished migrating :D") - def get_index_record_hash(self, did): + def get_index_record_hash(self, dids): try: stmt = ( self.session.query( - IndexRecordHash.hash_type, IndexRecordHash.hash_value + IndexRecordHash.did, + IndexRecordHash.hash_type, + IndexRecordHash.hash_value, ) - .filter(IndexRecordHash.did == did) + .filter(IndexRecordHash.did.in_(dids)) .all() ) - res = {hash_type: hash_value for hash_type, hash_value in stmt} + res = {} + for did, hash_type, hash_value in stmt: + if did not in res: + res[did] = {} + res[did][hash_type] = hash_value return res - except Exception as e: - self.logger.error(f"Error with hash for {did}: {e}") + self.logger.error(f"Error with hashes: {e}") - def get_urls_record(self, did): + def get_urls_record(self, dids): try: stmt = ( - self.session.query(IndexRecordUrl.url) - .filter(IndexRecordUrl.did == did) + self.session.query(IndexRecordUrl.did, IndexRecordUrl.url) + .filter(IndexRecordUrl.did.in_(dids)) .all() ) - res = [u.url for u in stmt] + res = {} + for did, url in stmt: + if did not in res: + res[did] = [] + res[did].append(url) return res - except Exception as e: - self.logger.error(f"Error with urls for {did}: {e}") + self.logger.error(f"Error with urls: {e}") - def get_urls_metadata(self, did): + def get_urls_metadata(self, dids): try: stmt = ( self.session.query( + IndexRecordUrlMetadata.did, IndexRecordUrlMetadata.url, IndexRecordUrlMetadata.key, IndexRecordUrlMetadata.value, ) - .filter(IndexRecordUrlMetadata.did == did) + .filter(IndexRecordUrlMetadata.did.in_(dids)) .all() ) - res = {url: {key: value} for url, key, value in stmt} + res = {} + for did, url, key, value in stmt: + if did not in res: + res[did] = {} + if url not in res[did]: + res[did][url] = {} + res[did][url][key] = value return res except Exception as e: - self.logger.error(f"Error with url metadata for {did}: {e}") + self.logger.error(f"Error with url metadata: {e}") - def get_index_record_ace(self, did): + def get_index_record_ace(self, dids): try: stmt = ( - self.session.query(IndexRecordACE.ace) - .filter(IndexRecordACE.did == did) + self.session.query(IndexRecordACE.did, IndexRecordACE.ace) + .filter(IndexRecordACE.did.in_(dids)) .all() ) - res = [a.ace for a in stmt] + res = {} + for did, ace in stmt: + if did not in res: + res[did] = [] + res[did].append(ace) return res except Exception as e: - self.logger.error(f"Error with ace for {did}: {e}") + self.logger.error(f"Error with ace: {e}") - def get_index_record_authz(self, did): + def get_index_record_authz(self, dids): try: stmt = ( - self.session.query(IndexRecordAuthz.resource) - .filter(IndexRecordAuthz.did == did) + self.session.query(IndexRecordAuthz.did, IndexRecordAuthz.resource) + .filter(IndexRecordAuthz.did.in_(dids)) .all() ) - res = [r.resource for r in stmt] + res = {} + for did, resource in stmt: + if did not in res: + res[did] = [] + res[did].append(resource) return res except Exception as e: - self.logger.error(f"Error with authz for {did}: {e}") + self.logger.error(f"Error with authz: {e}") - def get_index_record_alias(self, did): + def get_index_record_alias(self, dids): try: stmt = ( - self.session.query(IndexRecordAlias.name) - .filter(IndexRecordAlias.did == did) + self.session.query(IndexRecordAlias.did, IndexRecordAlias.name) + .filter(IndexRecordAlias.did.in_(dids)) .all() ) - res = [row.name for row in stmt] + res = {} + for did, name in stmt: + if did not in res: + res[did] = [] + res[did].append(name) return res except Exception as e: - self.logger.error(f"Error with alias for {did}: {e}") + self.logger.error(f"Error with alias: {e}") - def get_index_record_metadata(self, did): + def get_index_record_metadata(self, dids): try: stmt = ( - self.session.query(IndexRecordMetadata) - .filter(IndexRecordMetadata.did == did) + self.session.query( + IndexRecordMetadata.did, + IndexRecordMetadata.key, + IndexRecordMetadata.value, + ) + .filter(IndexRecordMetadata.did.in_(dids)) .all() ) - res = {row.key: row.value for row in stmt} + res = {} + for did, key, value in stmt: + if did not in res: + res[did] = {} + res[did][key] = value return res except Exception as e: - self.logger.error(f"Error with alias for {did}: {e}") + self.logger.error(f"Error with alias: {e}") def remove_duplicate_records(self, records, error): # Extract the key value from the error message @@ -260,18 +326,4 @@ def remove_duplicate_records(self, records, error): if __name__ == "__main__": - start_time = time.time() - parser = argparse.ArgumentParser( - description="Migrate data from old indexd database to new single table database" - ) - parser.add_argument( - "creds_path", - help="Path to the creds file for the database you're trying to copy data from multi-table to single records table. Defaults to original indexd database creds from the indexd block in the creds.json file.", - ) - args = parser.parse_args() - migrator = IndexRecordMigrator(conf_data=args.creds_path) - migrator.index_record_to_new_table() - # cProfile.run("migrator.index_record_to_new_table()", filename="profile_results.txt") - end_time = time.time() - - print("Total Time: {}".format(end_time - start_time)) + main() diff --git a/indexd/index/blueprint.py b/indexd/index/blueprint.py index b15a328e..7c79513f 100644 --- a/indexd/index/blueprint.py +++ b/indexd/index/blueprint.py @@ -104,7 +104,6 @@ def get_index(form=None): validate_hashes(**hashes) hashes = hashes if hashes else None - metadata = flask.request.args.getlist("metadata") metadata = {k: v for k, v in (x.split(":", 1) for x in metadata)} acl = flask.request.args.get("acl") diff --git a/indexd/index/drivers/single_table_alchemy.py b/indexd/index/drivers/single_table_alchemy.py index 07294fce..4d47f7d1 100644 --- a/indexd/index/drivers/single_table_alchemy.py +++ b/indexd/index/drivers/single_table_alchemy.py @@ -52,12 +52,12 @@ class Record(Base): baseid = Column(String, index=True) rev = Column(String) form = Column(String) - size = Column(BigInteger, index=True) + size = Column(BigInteger) created_date = Column(DateTime, default=datetime.datetime.utcnow) updated_date = Column(DateTime, default=datetime.datetime.utcnow) - file_name = Column(String, index=True) - version = Column(String, index=True) - uploader = Column(String, index=True) + file_name = Column(String) + version = Column(String) + uploader = Column(String) description = Column(String) content_created_date = Column(DateTime) content_updated_date = Column(DateTime) @@ -73,15 +73,8 @@ def to_document_dict(self): """ Get the full index document """ - # TODO: some of these fields may not need to be a variable and could directly go to the return object -Binam - urls = self.urls acl = self.acl or [] authz = self.authz or [] - hashes = self.hashes - record_metadata = self.record_metadata - url_metadata = self.url_metadata - created_date = self.created_date.isoformat() - updated_date = self.updated_date.isoformat() content_created_date = ( self.content_created_date.isoformat() if self.content_created_date is not None @@ -101,15 +94,15 @@ def to_document_dict(self): "file_name": self.file_name, "version": self.version, "uploader": self.uploader, - "urls": urls, - "urls_metadata": url_metadata, + "urls": self.urls, + "urls_metadata": self.url_metadata, "acl": acl, "authz": authz, - "hashes": hashes, - "metadata": record_metadata, + "hashes": self.hashes, + "metadata": self.record_metadata, "form": self.form, - "created_date": created_date, - "updated_date": updated_date, + "created_date": self.created_date.isoformat(), + "updated_date": self.updated_date.isoformat(), "description": self.description, "content_created_date": content_created_date, "content_updated_date": content_updated_date, @@ -124,20 +117,6 @@ def __init__(self, conn, logger=None, index_config=None, **config): Base.metadata.bind = self.engine self.Session = sessionmaker(bind=self.engine) - def migrate_index_database(self): - """ - This migration logic is DEPRECATED. It is still supported for backwards compatibility, - but any new migration should be added using Alembic. - - migrate index database to match CURRENT_SCHEMA_VERSION - """ - migrate_database( - driver=self, - migrate_functions=SCHEMA_MIGRATION_FUNCTIONS, - current_schema_version=CURRENT_SCHEMA_VERSION, - model=IndexSchemaVersion, - ) - @property @contextmanager def session(self): @@ -145,6 +124,7 @@ def session(self): Provide a transactional scope around a series of operations. """ session = self.Session() + # return session.begin() try: yield session @@ -173,6 +153,9 @@ def ids( negate_params=None, page=None, ): + """ + Returns list of records stored by the backend. + """ with self.session as session: query = session.query(Record) diff --git a/migrations/versions/bb3d7586a096_createsingletable.py b/migrations/versions/bb3d7586a096_createsingletable.py index d4f51f87..fa186620 100644 --- a/migrations/versions/bb3d7586a096_createsingletable.py +++ b/migrations/versions/bb3d7586a096_createsingletable.py @@ -24,12 +24,12 @@ def upgrade() -> None: sa.Column("baseid", sa.VARCHAR(), index=True), sa.Column("rev", sa.VARCHAR()), sa.Column("form", sa.VARCHAR()), - sa.Column("size", sa.BIGINT(), index=True), + sa.Column("size", sa.BIGINT()), sa.Column("created_date", sa.DateTime, nullable=True), sa.Column("updated_date", sa.DateTime, nullable=True), - sa.Column("file_name", sa.VARCHAR(), index=True), - sa.Column("version", sa.VARCHAR(), index=True), - sa.Column("uploader", sa.VARCHAR(), index=True), + sa.Column("file_name", sa.VARCHAR()), + sa.Column("version", sa.VARCHAR()), + sa.Column("uploader", sa.VARCHAR()), sa.Column("description", sa.VARCHAR()), sa.Column("content_created_date", sa.DateTime), sa.Column("content_updated_date", sa.DateTime), diff --git a/tests/conftest.py b/tests/conftest.py index 6e3afb7f..6c110e93 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -24,11 +24,14 @@ def clear_database(): + """ + Clean up test data from unit test + """ engine = create_engine(POSTGRES_CONNECTION) with engine.connect() as conn: - # Clear the Index records index_driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + # IndexD table needs to be delete in this order to avoid foreign key constraint error table_delete_order = [ "index_record_url_metadata", "index_record_url", @@ -67,6 +70,9 @@ def clear_database(): @pytest.fixture(scope="function", params=["default_settings", "single_table_settings"]) def combined_default_and_single_table_settings(request): + """ + Fixture to run a unit test with both multi-table and single-table driver + """ from indexd import default_settings from tests import default_test_settings diff --git a/tests/postgres/migrations/test_bb3d7586a096_createsingletable.py b/tests/postgres/migrations/test_bb3d7586a096_createsingletable.py index 52cd138c..7035ee28 100644 --- a/tests/postgres/migrations/test_bb3d7586a096_createsingletable.py +++ b/tests/postgres/migrations/test_bb3d7586a096_createsingletable.py @@ -2,6 +2,9 @@ def test_upgrade(postgres_driver): + """ + Make sure single table migration created record table and has the correct schema. + """ conn = postgres_driver.engine.connect() # state before migration @@ -41,6 +44,9 @@ def test_upgrade(postgres_driver): def test_downgrade(postgres_driver): + """ + Test downgrade to before single table. record table should not exist before this upgrade + """ conn = postgres_driver.engine.connect() alembic_main(["--raiseerr", "downgrade", "a72f117515c5"]) From de64991b21d81a0524e21ee4e63098581df807cb Mon Sep 17 00:00:00 2001 From: BinamB Date: Tue, 2 Jul 2024 14:48:56 -0500 Subject: [PATCH 20/47] fix migration --- bin/migrate_to_single_table.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/bin/migrate_to_single_table.py b/bin/migrate_to_single_table.py index 47b41ab0..b2e5fd2a 100644 --- a/bin/migrate_to_single_table.py +++ b/bin/migrate_to_single_table.py @@ -38,10 +38,7 @@ def load_json(file_name): def main(): args = parse_args() - try: - migrator = IndexRecordMigrator(conf_data=args.creds_path) - except Exception as e: - logger.error(f"Set up failed: {e}") + migrator = IndexRecordMigrator(conf_data=args.creds_path) migrator.index_record_to_new_table() return From eaf14c4260a0d9e7c8880f54c1c9ffd73657dbc4 Mon Sep 17 00:00:00 2001 From: BinamB Date: Fri, 5 Jul 2024 13:47:29 -0500 Subject: [PATCH 21/47] undo select queries --- bin/migrate_to_single_table.py | 112 +++++++++++---------------------- 1 file changed, 36 insertions(+), 76 deletions(-) diff --git a/bin/migrate_to_single_table.py b/bin/migrate_to_single_table.py index b2e5fd2a..5109330b 100644 --- a/bin/migrate_to_single_table.py +++ b/bin/migrate_to_single_table.py @@ -111,27 +111,16 @@ def index_record_to_new_table(self, batch_size=1000, retry_limit=4): if not records: break - # Collect all dids in the current batch - dids = [record.did for record in records] - # Fetch related data for all dids in the current batch - hashes = self.get_index_record_hash(dids) - urls = self.get_urls_record(dids) - url_metadata = self.get_urls_metadata(dids) - acl = self.get_index_record_ace(dids) - authz = self.get_index_record_authz(dids) - alias = self.get_index_record_alias(dids) - metadata = self.get_index_record_metadata(dids) - records_to_insert = [] for record in records: - record_hashes = hashes.get(record.did, {}) - record_urls = urls.get(record.did, []) - record_url_metadata = url_metadata.get(record.did, {}) - record_acl = acl.get(record.did, []) - record_authz = authz.get(record.did, []) - record_alias = alias.get(record.did, []) - record_metadata = metadata.get(record.did, {}) + hashes = self.get_index_record_hash(record.did) + urls = self.get_urls_record(record.did) + url_metadata = self.get_urls_metadata(record.did) + acl = self.get_index_record_ace(record.did) + authz = self.get_index_record_authz(record.did) + alias = self.get_index_record_alias(record.did) + metadata = self.get_index_record_metadata(record.did) records_to_insert.append( Record( @@ -182,102 +171,78 @@ def index_record_to_new_table(self, batch_size=1000, retry_limit=4): self.session.close() self.logger.info("Finished migrating :D") - def get_index_record_hash(self, dids): + def get_index_record_hash(self, did): try: stmt = ( self.session.query( - IndexRecordHash.did, IndexRecordHash.hash_type, IndexRecordHash.hash_value, ) - .filter(IndexRecordHash.did.in_(dids)) + .filter(IndexRecordHash.did == did) .all() ) - res = {} - for did, hash_type, hash_value in stmt: - if did not in res: - res[did] = {} - res[did][hash_type] = hash_value + res = {hash_type: hash_value for hash_type, hash_value in stmt} return res except Exception as e: - self.logger.error(f"Error with hashes: {e}") + self.logger.error(f"Error with hash for {did}: {e}") - def get_urls_record(self, dids): + def get_urls_record(self, did): try: stmt = ( - self.session.query(IndexRecordUrl.did, IndexRecordUrl.url) - .filter(IndexRecordUrl.did.in_(dids)) + self.session.query(IndexRecordUrl.url) + .filter(IndexRecordUrl.did == did) .all() ) - res = {} - for did, url in stmt: - if did not in res: - res[did] = [] - res[did].append(url) + res = [u.url for u in stmt] return res except Exception as e: - self.logger.error(f"Error with urls: {e}") + self.logger.error(f"Error with urls for {did}: {e}") - def get_urls_metadata(self, dids): + def get_urls_metadata(self, did): try: stmt = ( self.session.query( - IndexRecordUrlMetadata.did, IndexRecordUrlMetadata.url, IndexRecordUrlMetadata.key, IndexRecordUrlMetadata.value, ) - .filter(IndexRecordUrlMetadata.did.in_(dids)) + .filter(IndexRecordUrlMetadata.did == did) .all() ) - res = {} - for did, url, key, value in stmt: - if did not in res: - res[did] = {} - if url not in res[did]: - res[did][url] = {} - res[did][url][key] = value + res = {url: {key: value} for url, key, value in stmt} return res except Exception as e: - self.logger.error(f"Error with url metadata: {e}") + self.logger.error(f"Error with url metadata for {did}: {e}") - def get_index_record_ace(self, dids): + def get_index_record_ace(self, did): try: stmt = ( - self.session.query(IndexRecordACE.did, IndexRecordACE.ace) - .filter(IndexRecordACE.did.in_(dids)) + self.session.query(IndexRecordACE.ace) + .filter(IndexRecordACE.did == did) .all() ) - res = {} - for did, ace in stmt: - if did not in res: - res[did] = [] - res[did].append(ace) + res = [a.ace for a in stmt] return res except Exception as e: - self.logger.error(f"Error with ace: {e}") + self.logger.error(f"Error with ace for did {did}: {e}") - def get_index_record_authz(self, dids): + def get_index_record_authz(self, did): try: stmt = ( - self.session.query(IndexRecordAuthz.did, IndexRecordAuthz.resource) - .filter(IndexRecordAuthz.did.in_(dids)) + self.session.query(IndexRecordAuthz.resource) + .filter(IndexRecordAuthz.did == did) .all() ) - res = {} - for did, resource in stmt: - if did not in res: - res[did] = [] - res[did].append(resource) + res = [r.resource for r in stmt] return res except Exception as e: self.logger.error(f"Error with authz: {e}") - def get_index_record_alias(self, dids): + def get_index_record_alias(self, did): try: stmt = ( - self.session.query(IndexRecordAlias.did, IndexRecordAlias.name) - .filter(IndexRecordAlias.did.in_(dids)) + self.session.query(IndexRecordAlias.name) + .filter(IndexRecordAlias.did == did) .all() ) res = {} @@ -289,25 +254,20 @@ def get_index_record_alias(self, dids): except Exception as e: self.logger.error(f"Error with alias: {e}") - def get_index_record_metadata(self, dids): + def get_index_record_metadata(self, did): try: stmt = ( self.session.query( - IndexRecordMetadata.did, IndexRecordMetadata.key, IndexRecordMetadata.value, ) - .filter(IndexRecordMetadata.did.in_(dids)) + .filter(IndexRecordMetadata.did == did) .all() ) - res = {} - for did, key, value in stmt: - if did not in res: - res[did] = {} - res[did][key] = value + res = [row.name for row in stmt] return res except Exception as e: - self.logger.error(f"Error with alias: {e}") + self.logger.error(f"Error with alias for did {did}: {e}") def remove_duplicate_records(self, records, error): # Extract the key value from the error message From 15166c95a30329daab3f74d312193d7c98285730 Mon Sep 17 00:00:00 2001 From: BinamB Date: Thu, 11 Jul 2024 12:57:26 -0500 Subject: [PATCH 22/47] Add unit tests for migration + fix alias --- bin/migrate_to_single_table.py | 44 +++--- indexd/index/drivers/single_table_alchemy.py | 13 +- tests/test_creds.json | 7 + tests/test_migration.py | 134 +++++++++++++++++++ 4 files changed, 169 insertions(+), 29 deletions(-) create mode 100644 tests/test_creds.json create mode 100644 tests/test_migration.py diff --git a/bin/migrate_to_single_table.py b/bin/migrate_to_single_table.py index 5109330b..bd835539 100644 --- a/bin/migrate_to_single_table.py +++ b/bin/migrate_to_single_table.py @@ -1,23 +1,18 @@ """ -to run: python migrate_to_single_table.py "/dir/containing/db_creds" +to run: python migrate_to_single_table.py --creds-path /dir/containing/db_creds --start-did """ import argparse import json -import config_helper +import bin.config_helper as config_helper from cdislogging import get_logger -from sqlalchemy import create_engine, MetaData, Table, Column, Integer, String, DateTime +from sqlalchemy import create_engine from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker -import time -import random import re -import cProfile - from indexd.index.drivers.alchemy import ( IndexRecord, IndexRecordAuthz, - BaseVersion, IndexRecordAlias, IndexRecordUrl, IndexRecordACE, @@ -75,10 +70,12 @@ def __init__(self, conf_data=None): pghost = conf_data.get("db_host", "{{db_host}}") pgport = 5432 - engine = create_engine( - f"postgresql+psycopg2://{usr}:{psw}@{pghost}:{pgport}/{db}" - ) - + try: + engine = create_engine( + f"postgresql+psycopg2://{usr}:{psw}@{pghost}:{pgport}/{db}" + ) + except Exception as e: + print(e) Base = declarative_base() Base.metadata.create_all(engine) Session = sessionmaker(bind=engine) @@ -121,7 +118,6 @@ def index_record_to_new_table(self, batch_size=1000, retry_limit=4): authz = self.get_index_record_authz(record.did) alias = self.get_index_record_alias(record.did) metadata = self.get_index_record_metadata(record.did) - records_to_insert.append( Record( guid=record.did, @@ -145,9 +141,6 @@ def index_record_to_new_table(self, batch_size=1000, retry_limit=4): record_metadata=metadata, ) ) - - last_seen_guid = records[-1].did - while records_to_insert: try: self.session.bulk_save_objects(records_to_insert) @@ -163,12 +156,25 @@ def index_record_to_new_table(self, batch_size=1000, retry_limit=4): records_to_insert = self.remove_duplicate_records( records_to_insert, e ) + last_seen_guid = records[-1].did except Exception as e: self.session.rollback() - self.logger.error(f"Error in migration: {e}") - + self.logger.error( + f"Error in migration: {e}. Last seen guid: {last_seen_guid}. Please " + ) finally: self.session.close() + new_total_records = self.session.query(Record).count() + self.logger.info(f"Number of records in old table: {total_records}") + self.logger.info(f"Number of records in new table: {new_total_records}") + if total_records == new_total_records: + self.logger.info( + "Number of records in the new table matches the number of records in old table" + ) + else: + self.logger.info( + "Number of records in the new table DOES NOT MATCH the number of records in old table. Check logs to see if there are records that were not migrated" + ) self.logger.info("Finished migrating :D") def get_index_record_hash(self, did): @@ -264,7 +270,7 @@ def get_index_record_metadata(self, did): .filter(IndexRecordMetadata.did == did) .all() ) - res = [row.name for row in stmt] + res = {key: value for key, value in stmt} return res except Exception as e: self.logger.error(f"Error with alias for did {did}: {e}") diff --git a/indexd/index/drivers/single_table_alchemy.py b/indexd/index/drivers/single_table_alchemy.py index 4d47f7d1..cb1a4c10 100644 --- a/indexd/index/drivers/single_table_alchemy.py +++ b/indexd/index/drivers/single_table_alchemy.py @@ -464,9 +464,9 @@ def add( try: checked_url_metadata = check_url_metadata(url_metadata, record) record.url_metadata = checked_url_metadata - - if self.config.get("Add_PREFIX_ALIAS"): - self.add_prefix_alias(record, session) + if self.config.get("ADD_PREFIX_ALIAS"): + prefix = self.config["DEFAULT_PREFIX"] + record.alias = list(set([prefix + record.guid])) session.add(record) session.commit() except IntegrityError: @@ -593,13 +593,6 @@ def update_blank_record(self, did, rev, size, hashes, urls, authz=None): return record.guid, record.rev, record.baseid - def add_prefix_alias(self, record, session): - """ - Create a index alias with the alias as {prefix:did} - """ - prefix = self.config["DEFAULT_PREFIX"] - session.add(Record().alias.append(prefix + record.guid)) - def get_by_alias(self, alias): """ Gets a record given a record alias diff --git a/tests/test_creds.json b/tests/test_creds.json new file mode 100644 index 00000000..1be446c8 --- /dev/null +++ b/tests/test_creds.json @@ -0,0 +1,7 @@ +{ + "db_host": "localhost", + "db_username": "postgres", + "db_password": "postgres", // pragma: allowlist secret + "db_database": "indexd_tests", + "fence_database": "fence_db" + } diff --git a/tests/test_migration.py b/tests/test_migration.py new file mode 100644 index 00000000..7c9183f9 --- /dev/null +++ b/tests/test_migration.py @@ -0,0 +1,134 @@ +import datetime +import pytest +import random +import json +import uuid + +from sqlalchemy import create_engine +from unittest.mock import patch, MagicMock, mock_open + +from bin.migrate_to_single_table import IndexRecordMigrator, parse_args, main +from indexd.index.drivers.alchemy import SQLAlchemyIndexDriver, IndexRecord + + +POSTGRES_CONNECTION = "postgresql://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret + + +def create_record(n_records=1): + """ + Create n_records number of records in multitable + """ + + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + did_list = [] + for _ in range(n_records): + did = str(uuid.uuid4()) + baseid = str(uuid.uuid4()) + size = random.randint(0, 1024) + file_name = f"file_{random.randint(0, 1024)}" + index_metadata = { + "metadata_key": "metadata_value", + "some_other_key": "some_other_value", + } + hashes = {"md5": "some_md5", "sha1": "some_sha1"} + urls = ["s3://bucket/data.json", "gs://bucket/data.txt"] + urls_metadata = { + "s3://bucket/data.json": {"metadata_key": "metadata_value"}, + "gs://bucket/data.txt": {"metadata_key": "metadata_value"}, + } + version = str(uuid.uuid4())[:5] + acl = random.choice(["*", "phs00001", "phs00002", "phs00003"]) + authz = random.choice(["/open", "phs00001", "phs00002"]) + rev = str(uuid.uuid4())[:8] + uploader = "uploader" + description = "this is a test file" + + driver.add( + "object", + did=did, + size=size, + file_name=file_name, + metadata=index_metadata, + urls_metadata=urls_metadata, + version=version, + urls=urls, + acl=acl, + authz=authz, + hashes=hashes, + baseid=baseid, + uploader=uploader, + description=description, + ) + did_list.append(did) + + with engine.connect() as conn: + result = conn.execute("SELECT COUNT(*) FROM index_record") + count = result.scalar() + assert count == n_records + + return did_list + + +def test_index_record_to_new_table(): + """ + Test index_record_to_new_table copies records from old tables to new record table. + """ + index_record_migrator = IndexRecordMigrator(conf_data="tests/test_creds.json") + n_records = 100 + create_record(n_records) + index_record_migrator.index_record_to_new_table(batch_size=10) + + engine = create_engine(POSTGRES_CONNECTION) + with engine.connect() as conn: + result = conn.execute("SELECT COUNT(*) FROM record") + count = result.scalar() + assert count == n_records + + +def test_get_index_record_hash(): + index_record_migrator = IndexRecordMigrator(conf_data="tests/test_creds.json") + did = create_record()[0] + result = index_record_migrator.get_index_record_hash(did) + assert result == {"md5": "some_md5", "sha1": "some_sha1"} + + +def test_get_urls_record(): + index_record_migrator = IndexRecordMigrator(conf_data="tests/test_creds.json") + did = create_record()[0] + result = index_record_migrator.get_urls_record(did) + assert result == ["s3://bucket/data.json", "gs://bucket/data.txt"] + + +def test_get_urls_metadata(): + index_record_migrator = IndexRecordMigrator(conf_data="tests/test_creds.json") + did = create_record()[0] + result = index_record_migrator.get_urls_metadata(did) + assert result == { + "s3://bucket/data.json": {"metadata_key": "metadata_value"}, + "gs://bucket/data.txt": {"metadata_key": "metadata_value"}, + } + + +def test_get_index_record_ace(): + index_record_migrator = IndexRecordMigrator(conf_data="tests/test_creds.json") + did = create_record()[0] + result = index_record_migrator.get_index_record_ace(did) + assert type(result) == list + + +def test_get_index_record_authz(): + index_record_migrator = IndexRecordMigrator(conf_data="tests/test_creds.json") + did = create_record()[0] + result = index_record_migrator.get_index_record_authz(did) + assert type(result) == list + + +def test_get_index_record_metadata(): + index_record_migrator = IndexRecordMigrator(conf_data="tests/test_creds.json") + did = create_record()[0] + result = index_record_migrator.get_index_record_metadata(did) + assert result == { + "metadata_key": "metadata_value", + "some_other_key": "some_other_value", + } From bd607efe4b8d50164e34fb325e7602df348bd5d0 Mon Sep 17 00:00:00 2001 From: BinamB Date: Thu, 11 Jul 2024 13:06:35 -0500 Subject: [PATCH 23/47] remove comment + password --- tests/test_creds.json | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/test_creds.json b/tests/test_creds.json index 1be446c8..784abb7d 100644 --- a/tests/test_creds.json +++ b/tests/test_creds.json @@ -1,7 +1,6 @@ { "db_host": "localhost", "db_username": "postgres", - "db_password": "postgres", // pragma: allowlist secret "db_database": "indexd_tests", "fence_database": "fence_db" - } +} From 2959513213cf509ef61c9d57d65ade89ca4e3413 Mon Sep 17 00:00:00 2001 From: BinamB Date: Thu, 11 Jul 2024 13:18:36 -0500 Subject: [PATCH 24/47] Add doc string --- bin/migrate_to_single_table.py | 29 ++++++++++++++++++++++++++++- tests/test_migration.py | 18 ++++++++++++++++++ 2 files changed, 46 insertions(+), 1 deletion(-) diff --git a/bin/migrate_to_single_table.py b/bin/migrate_to_single_table.py index bd835539..a423be64 100644 --- a/bin/migrate_to_single_table.py +++ b/bin/migrate_to_single_table.py @@ -82,7 +82,10 @@ def __init__(self, conf_data=None): self.session = Session() - def index_record_to_new_table(self, batch_size=1000, retry_limit=4): + def index_record_to_new_table(self, batch_size=1000): + """ + Collect records from index_record table, collect additional info from multiple tables and bulk insert to new record table. + """ try: total_records = self.session.query(IndexRecord).count() last_seen_guid = None @@ -178,6 +181,9 @@ def index_record_to_new_table(self, batch_size=1000, retry_limit=4): self.logger.info("Finished migrating :D") def get_index_record_hash(self, did): + """ + Get the index record hash for the given did and return correctly formatted value + """ try: stmt = ( self.session.query( @@ -193,6 +199,9 @@ def get_index_record_hash(self, did): self.logger.error(f"Error with hash for {did}: {e}") def get_urls_record(self, did): + """ + Get the urls record for the given did and return correctly formatted value + """ try: stmt = ( self.session.query(IndexRecordUrl.url) @@ -205,6 +214,9 @@ def get_urls_record(self, did): self.logger.error(f"Error with urls for {did}: {e}") def get_urls_metadata(self, did): + """ + Get the urls metadata for the given did and return correctly formatted value + """ try: stmt = ( self.session.query( @@ -221,6 +233,9 @@ def get_urls_metadata(self, did): self.logger.error(f"Error with url metadata for {did}: {e}") def get_index_record_ace(self, did): + """ + Get the index record ace for the given did and return correctly formatted value + """ try: stmt = ( self.session.query(IndexRecordACE.ace) @@ -233,6 +248,9 @@ def get_index_record_ace(self, did): self.logger.error(f"Error with ace for did {did}: {e}") def get_index_record_authz(self, did): + """ + Get the index record authz for the given did and return the correctly formatted value + """ try: stmt = ( self.session.query(IndexRecordAuthz.resource) @@ -245,6 +263,9 @@ def get_index_record_authz(self, did): self.logger.error(f"Error with authz: {e}") def get_index_record_alias(self, did): + """ + Get the index record alias for the given did and return the correctly formatted + """ try: stmt = ( self.session.query(IndexRecordAlias.name) @@ -261,6 +282,9 @@ def get_index_record_alias(self, did): self.logger.error(f"Error with alias: {e}") def get_index_record_metadata(self, did): + """ + Get the index record metadata for the given did and return the correctly fortmatted value + """ try: stmt = ( self.session.query( @@ -276,6 +300,9 @@ def get_index_record_metadata(self, did): self.logger.error(f"Error with alias for did {did}: {e}") def remove_duplicate_records(self, records, error): + """ + Remove duplicate records from the bulk insert records list + """ # Extract the key value from the error message key_value = re.search(r"\(guid\)=\((.*?)\)", str(error)) key_value = key_value.group(1) diff --git a/tests/test_migration.py b/tests/test_migration.py index 7c9183f9..a65b8ef8 100644 --- a/tests/test_migration.py +++ b/tests/test_migration.py @@ -87,6 +87,9 @@ def test_index_record_to_new_table(): def test_get_index_record_hash(): + """ + Test get_index_record_hash from IndexRecordMigrator returns the correct format + """ index_record_migrator = IndexRecordMigrator(conf_data="tests/test_creds.json") did = create_record()[0] result = index_record_migrator.get_index_record_hash(did) @@ -94,6 +97,9 @@ def test_get_index_record_hash(): def test_get_urls_record(): + """ + Test get_urls_record from IndexRecordMigrator returns the correct format + """ index_record_migrator = IndexRecordMigrator(conf_data="tests/test_creds.json") did = create_record()[0] result = index_record_migrator.get_urls_record(did) @@ -101,6 +107,9 @@ def test_get_urls_record(): def test_get_urls_metadata(): + """ + Test get_urls_metadata from IndexRecordMigrator returns the correct format + """ index_record_migrator = IndexRecordMigrator(conf_data="tests/test_creds.json") did = create_record()[0] result = index_record_migrator.get_urls_metadata(did) @@ -111,6 +120,9 @@ def test_get_urls_metadata(): def test_get_index_record_ace(): + """ + Test get_index_record_ace from IndexRecordMigrator returns the correct format + """ index_record_migrator = IndexRecordMigrator(conf_data="tests/test_creds.json") did = create_record()[0] result = index_record_migrator.get_index_record_ace(did) @@ -118,6 +130,9 @@ def test_get_index_record_ace(): def test_get_index_record_authz(): + """ + Test get_index_record_authz from IndexRecordMigrator returns the correct format + """ index_record_migrator = IndexRecordMigrator(conf_data="tests/test_creds.json") did = create_record()[0] result = index_record_migrator.get_index_record_authz(did) @@ -125,6 +140,9 @@ def test_get_index_record_authz(): def test_get_index_record_metadata(): + """ + Test get_index_record_metadata from IndexRecordMigrator returns the correct format + """ index_record_migrator = IndexRecordMigrator(conf_data="tests/test_creds.json") did = create_record()[0] result = index_record_migrator.get_index_record_metadata(did) From cd7d5f5e642b0ca3b2c56860eac1fb7f2e73742d Mon Sep 17 00:00:00 2001 From: BinamB Date: Fri, 12 Jul 2024 11:56:50 -0500 Subject: [PATCH 25/47] remove unused imports --- tests/test_migration.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/tests/test_migration.py b/tests/test_migration.py index a65b8ef8..5f0f4538 100644 --- a/tests/test_migration.py +++ b/tests/test_migration.py @@ -1,14 +1,10 @@ -import datetime -import pytest import random -import json import uuid from sqlalchemy import create_engine -from unittest.mock import patch, MagicMock, mock_open -from bin.migrate_to_single_table import IndexRecordMigrator, parse_args, main -from indexd.index.drivers.alchemy import SQLAlchemyIndexDriver, IndexRecord +from bin.migrate_to_single_table import IndexRecordMigrator +from indexd.index.drivers.alchemy import SQLAlchemyIndexDriver POSTGRES_CONNECTION = "postgresql://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret From f8a65b4c968840664ee245f4bdb1d5bf8c522636 Mon Sep 17 00:00:00 2001 From: BinamB Date: Mon, 19 Aug 2024 16:46:24 -0500 Subject: [PATCH 26/47] Resolve --- bin/migrate_to_single_table.py | 159 +-- indexd/index/drivers/single_table_alchemy.py | 3 +- poetry.lock | 992 ++++++++++--------- tests/conftest.py | 1 - 4 files changed, 623 insertions(+), 532 deletions(-) diff --git a/bin/migrate_to_single_table.py b/bin/migrate_to_single_table.py index a423be64..d6680c16 100644 --- a/bin/migrate_to_single_table.py +++ b/bin/migrate_to_single_table.py @@ -2,11 +2,13 @@ to run: python migrate_to_single_table.py --creds-path /dir/containing/db_creds --start-did """ import argparse +import backoff import json import bin.config_helper as config_helper from cdislogging import get_logger from sqlalchemy import create_engine from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.exc import IntegrityError from sqlalchemy.orm import sessionmaker import re @@ -21,6 +23,7 @@ IndexRecordHash, ) from indexd.index.drivers.single_table_alchemy import Record +from indexd.index.errors import MultipleRecordsFound APP_NAME = "indexd" @@ -34,7 +37,9 @@ def load_json(file_name): def main(): args = parse_args() migrator = IndexRecordMigrator(conf_data=args.creds_path) - migrator.index_record_to_new_table() + migrator.index_record_to_new_table( + offset=args.offset, last_seen_guid=args.start_did + ) return @@ -48,8 +53,15 @@ def parse_args(): ) parser.add_argument( "--start-did", + dest="start_did", help="did to start at", - default=False, + default=None, + ) + parser.add_argument( + "--start-offset", + dest="start_offset", + help="offset to start at", + default=None, ) return parser.parse_args() @@ -75,20 +87,21 @@ def __init__(self, conf_data=None): f"postgresql+psycopg2://{usr}:{psw}@{pghost}:{pgport}/{db}" ) except Exception as e: - print(e) + self.logger.error(f"Failed to connect to postgres: {e}") Base = declarative_base() Base.metadata.create_all(engine) Session = sessionmaker(bind=engine) self.session = Session() - def index_record_to_new_table(self, batch_size=1000): + def index_record_to_new_table( + self, batch_size=1000, offset=None, last_seen_guid=None + ): """ Collect records from index_record table, collect additional info from multiple tables and bulk insert to new record table. """ try: - total_records = self.session.query(IndexRecord).count() - last_seen_guid = None + self.total_records = self.session.query(IndexRecord).count() count = 0 while True: @@ -99,6 +112,14 @@ def index_record_to_new_table(self, batch_size=1000): .limit(batch_size) .all() ) + elif offset is not None: + records = ( + self.session.query(IndexRecord) + .order_by(IndexRecord.did) + .offset(offset - 1) + .limit(batch_size) + .all() + ) else: records = ( self.session.query(IndexRecord) @@ -111,66 +132,23 @@ def index_record_to_new_table(self, batch_size=1000): if not records: break - records_to_insert = [] - - for record in records: - hashes = self.get_index_record_hash(record.did) - urls = self.get_urls_record(record.did) - url_metadata = self.get_urls_metadata(record.did) - acl = self.get_index_record_ace(record.did) - authz = self.get_index_record_authz(record.did) - alias = self.get_index_record_alias(record.did) - metadata = self.get_index_record_metadata(record.did) - records_to_insert.append( - Record( - guid=record.did, - baseid=record.baseid, - rev=record.rev, - form=record.form, - size=record.size, - created_date=record.created_date, - updated_date=record.updated_date, - content_created_date=record.content_created_date, - content_updated_date=record.content_updated_date, - file_name=record.file_name, - version=record.version, - uploader=record.uploader, - hashes=hashes, - urls=urls, - url_metadata=url_metadata, - acl=acl, - authz=authz, - alias=alias, - record_metadata=metadata, - ) - ) - while records_to_insert: - try: - self.session.bulk_save_objects(records_to_insert) - self.session.commit() - count += len(records_to_insert) - self.logger.info( - f"Done processing {count}/{total_records} records. {(count * 100)/total_records}%" - ) - break - except Exception as e: - self.session.rollback() - if "duplicate key value violates unique constraint" in str(e): - records_to_insert = self.remove_duplicate_records( - records_to_insert, e - ) + records_to_insert = self.get_info_from_mult_tables(records) + + self.bulk_insert_records(records_to_insert) + last_seen_guid = records[-1].did + except Exception as e: self.session.rollback() self.logger.error( - f"Error in migration: {e}. Last seen guid: {last_seen_guid}. Please " + f"Error in migration: {e}. Last seen guid: {last_seen_guid} at position: {count}." ) finally: self.session.close() new_total_records = self.session.query(Record).count() - self.logger.info(f"Number of records in old table: {total_records}") + self.logger.info(f"Number of records in old table: {self.total_records}") self.logger.info(f"Number of records in new table: {new_total_records}") - if total_records == new_total_records: + if self.total_records == new_total_records: self.logger.info( "Number of records in the new table matches the number of records in old table" ) @@ -180,6 +158,73 @@ def index_record_to_new_table(self, batch_size=1000): ) self.logger.info("Finished migrating :D") + @backoff.on_exception( + backoff.expo, Exception, max_tries=5, max_time=60, jitter=backoff.full_jitter + ) + def bulk_insert_records(self, records_to_insert): + """ + bulk insert records into the new Record table + Args: + records_to_insert (list): List of Record objects + """ + try: + self.session.bulk_save_objects(records_to_insert) + self.session.commit() + count += len(records_to_insert) + self.logger.info( + f"Done processing {count}/{self.total_records} records. {(count * 100)/self.total_records}%" + ) + except IntegrityError: + self.session.rollback() + self.logger.error(f"Duplicate record found for records {records_to_insert}") + except Exception as e: + self.session.rollback() + self.logger.error(f"Error bulk insert for records at {count} records") + + def get_info_from_mult_tables(self, records): + """ + Collect records from multiple tables from old multi table infrastructure and create a list of records to insert into the new single table infrastructure + + Args: + records (list): list of IndexRecord objects + + Returns: + records_to_insert (list): List of Record objects + """ + records_to_insert = [] + for record in records: + hashes = self.get_index_record_hash(record.did) + urls = self.get_urls_record(record.did) + url_metadata = self.get_urls_metadata(record.did) + acl = self.get_index_record_ace(record.did) + authz = self.get_index_record_authz(record.did) + alias = self.get_index_record_alias(record.did) + metadata = self.get_index_record_metadata(record.did) + records_to_insert.append( + Record( + guid=record.did, + baseid=record.baseid, + rev=record.rev, + form=record.form, + size=record.size, + created_date=record.created_date, + updated_date=record.updated_date, + content_created_date=record.content_created_date, + content_updated_date=record.content_updated_date, + file_name=record.file_name, + version=record.version, + uploader=record.uploader, + hashes=hashes, + urls=urls, + url_metadata=url_metadata, + acl=acl, + authz=authz, + alias=alias, + record_metadata=metadata, + ) + ) + return records_to_insert + def get_index_record_hash(self, did): """ Get the index record hash for the given did and return correctly formatted value diff --git a/indexd/index/drivers/single_table_alchemy.py b/indexd/index/drivers/single_table_alchemy.py index cb1a4c10..e369efa7 100644 --- a/indexd/index/drivers/single_table_alchemy.py +++ b/indexd/index/drivers/single_table_alchemy.py @@ -124,8 +124,7 @@ def session(self): Provide a transactional scope around a series of operations. """ session = self.Session() - # return session.begin() - + session.begin() try: yield session session.commit() diff --git a/poetry.lock b/poetry.lock index 433169ab..76b6cb81 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,41 +2,44 @@ [[package]] name = "alembic" -version = "1.9.4" +version = "1.13.2" description = "A database migration tool for SQLAlchemy." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "alembic-1.9.4-py3-none-any.whl", hash = "sha256:6f1c2207369bf4f49f952057a33bb017fbe5c148c2a773b46906b806ea6e825f"}, - {file = "alembic-1.9.4.tar.gz", hash = "sha256:4d3bd32ecdbb7bbfb48a9fe9e6d6fd6a831a1b59d03e26e292210237373e7db5"}, + {file = "alembic-1.13.2-py3-none-any.whl", hash = "sha256:6b8733129a6224a9a711e17c99b08462dbf7cc9670ba8f2e2ae9af860ceb1953"}, + {file = "alembic-1.13.2.tar.gz", hash = "sha256:1ff0ae32975f4fd96028c39ed9bb3c867fe3af956bd7bb37343b54c9fe7445ef"}, ] [package.dependencies] Mako = "*" SQLAlchemy = ">=1.3.0" +typing-extensions = ">=4" [package.extras] -tz = ["python-dateutil"] +tz = ["backports.zoneinfo"] [[package]] name = "anyio" -version = "3.6.2" +version = "4.4.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false -python-versions = ">=3.6.2" +python-versions = ">=3.8" files = [ - {file = "anyio-3.6.2-py3-none-any.whl", hash = "sha256:fbbe32bd270d2a2ef3ed1c5d45041250284e31fc0a4df4a5a6071842051a51e3"}, - {file = "anyio-3.6.2.tar.gz", hash = "sha256:25ea0d673ae30af41a0c442f81cf3b38c7e79fdc7b60335a4c14e05eb0947421"}, + {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, + {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, ] [package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} idna = ">=2.8" sniffio = ">=1.1" +typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} [package.extras] -doc = ["packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["contextlib2", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (<0.15)", "uvloop (>=0.15)"] -trio = ["trio (>=0.16,<0.22)"] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.23)"] [[package]] name = "async-timeout" @@ -118,21 +121,22 @@ files = [ [[package]] name = "attrs" -version = "22.2.0" +version = "24.2.0" description = "Classes Without Boilerplate" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, - {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, + {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, + {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, ] [package.extras] -cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] -dev = ["attrs[docs,tests]"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] -tests = ["attrs[tests-no-zope]", "zope.interface"] -tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"] +benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] [[package]] name = "authlib" @@ -183,6 +187,17 @@ files = [ {file = "backoff-1.11.1.tar.gz", hash = "sha256:ccb962a2378418c667b3c979b504fdeb7d9e0d29c0579e3b13b86467177728cb"}, ] +[[package]] +name = "blinker" +version = "1.8.2" +description = "Fast, simple object-to-object and broadcast signaling" +optional = false +python-versions = ">=3.8" +files = [ + {file = "blinker-1.8.2-py3-none-any.whl", hash = "sha256:1779309f71bf239144b9399d06ae925637cf6634cf6bd131104184531bf67c01"}, + {file = "blinker-1.8.2.tar.gz", hash = "sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83"}, +] + [[package]] name = "cached-property" version = "1.5.2" @@ -238,86 +253,89 @@ resolved_reference = "bdfdeb05e45407e839fd954ce6d195d847cd8024" [[package]] name = "certifi" -version = "2022.12.7" +version = "2024.7.4" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, - {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, + {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, + {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, ] [[package]] name = "cffi" -version = "1.15.1" +version = "1.17.0" description = "Foreign Function Interface for Python calling C code." optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, - {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, - {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, - {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, - {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, - {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, - {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, - {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, - {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, - {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, - {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, - {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, - {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, - {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, - {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, - {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, - {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, - {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, - {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, + {file = "cffi-1.17.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f9338cc05451f1942d0d8203ec2c346c830f8e86469903d5126c1f0a13a2bcbb"}, + {file = "cffi-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0ce71725cacc9ebf839630772b07eeec220cbb5f03be1399e0457a1464f8e1a"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c815270206f983309915a6844fe994b2fa47e5d05c4c4cef267c3b30e34dbe42"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6bdcd415ba87846fd317bee0774e412e8792832e7805938987e4ede1d13046d"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a98748ed1a1df4ee1d6f927e151ed6c1a09d5ec21684de879c7ea6aa96f58f2"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a048d4f6630113e54bb4b77e315e1ba32a5a31512c31a273807d0027a7e69ab"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24aa705a5f5bd3a8bcfa4d123f03413de5d86e497435693b638cbffb7d5d8a1b"}, + {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:856bf0924d24e7f93b8aee12a3a1095c34085600aa805693fb7f5d1962393206"}, + {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4304d4416ff032ed50ad6bb87416d802e67139e31c0bde4628f36a47a3164bfa"}, + {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:331ad15c39c9fe9186ceaf87203a9ecf5ae0ba2538c9e898e3a6967e8ad3db6f"}, + {file = "cffi-1.17.0-cp310-cp310-win32.whl", hash = "sha256:669b29a9eca6146465cc574659058ed949748f0809a2582d1f1a324eb91054dc"}, + {file = "cffi-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:48b389b1fd5144603d61d752afd7167dfd205973a43151ae5045b35793232aa2"}, + {file = "cffi-1.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5d97162c196ce54af6700949ddf9409e9833ef1003b4741c2b39ef46f1d9720"}, + {file = "cffi-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ba5c243f4004c750836f81606a9fcb7841f8874ad8f3bf204ff5e56332b72b9"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb9333f58fc3a2296fb1d54576138d4cf5d496a2cc118422bd77835e6ae0b9cb"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:435a22d00ec7d7ea533db494da8581b05977f9c37338c80bc86314bec2619424"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1df34588123fcc88c872f5acb6f74ae59e9d182a2707097f9e28275ec26a12d"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df8bb0010fdd0a743b7542589223a2816bdde4d94bb5ad67884348fa2c1c67e8"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8b5b9712783415695663bd463990e2f00c6750562e6ad1d28e072a611c5f2a6"}, + {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ffef8fd58a36fb5f1196919638f73dd3ae0db1a878982b27a9a5a176ede4ba91"}, + {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e67d26532bfd8b7f7c05d5a766d6f437b362c1bf203a3a5ce3593a645e870b8"}, + {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45f7cd36186db767d803b1473b3c659d57a23b5fa491ad83c6d40f2af58e4dbb"}, + {file = "cffi-1.17.0-cp311-cp311-win32.whl", hash = "sha256:a9015f5b8af1bb6837a3fcb0cdf3b874fe3385ff6274e8b7925d81ccaec3c5c9"}, + {file = "cffi-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:b50aaac7d05c2c26dfd50c3321199f019ba76bb650e346a6ef3616306eed67b0"}, + {file = "cffi-1.17.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aec510255ce690d240f7cb23d7114f6b351c733a74c279a84def763660a2c3bc"}, + {file = "cffi-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2770bb0d5e3cc0e31e7318db06efcbcdb7b31bcb1a70086d3177692a02256f59"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db9a30ec064129d605d0f1aedc93e00894b9334ec74ba9c6bdd08147434b33eb"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a47eef975d2b8b721775a0fa286f50eab535b9d56c70a6e62842134cf7841195"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3e0992f23bbb0be00a921eae5363329253c3b86287db27092461c887b791e5e"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6107e445faf057c118d5050560695e46d272e5301feffda3c41849641222a828"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb862356ee9391dc5a0b3cbc00f416b48c1b9a52d252d898e5b7696a5f9fe150"}, + {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c1c13185b90bbd3f8b5963cd8ce7ad4ff441924c31e23c975cb150e27c2bf67a"}, + {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17c6d6d3260c7f2d94f657e6872591fe8733872a86ed1345bda872cfc8c74885"}, + {file = "cffi-1.17.0-cp312-cp312-win32.whl", hash = "sha256:c3b8bd3133cd50f6b637bb4322822c94c5ce4bf0d724ed5ae70afce62187c492"}, + {file = "cffi-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:dca802c8db0720ce1c49cce1149ff7b06e91ba15fa84b1d59144fef1a1bc7ac2"}, + {file = "cffi-1.17.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6ce01337d23884b21c03869d2f68c5523d43174d4fc405490eb0091057943118"}, + {file = "cffi-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cab2eba3830bf4f6d91e2d6718e0e1c14a2f5ad1af68a89d24ace0c6b17cced7"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14b9cbc8f7ac98a739558eb86fabc283d4d564dafed50216e7f7ee62d0d25377"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b00e7bcd71caa0282cbe3c90966f738e2db91e64092a877c3ff7f19a1628fdcb"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:41f4915e09218744d8bae14759f983e466ab69b178de38066f7579892ff2a555"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4760a68cab57bfaa628938e9c2971137e05ce48e762a9cb53b76c9b569f1204"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:011aff3524d578a9412c8b3cfaa50f2c0bd78e03eb7af7aa5e0df59b158efb2f"}, + {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:a003ac9edc22d99ae1286b0875c460351f4e101f8c9d9d2576e78d7e048f64e0"}, + {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ef9528915df81b8f4c7612b19b8628214c65c9b7f74db2e34a646a0a2a0da2d4"}, + {file = "cffi-1.17.0-cp313-cp313-win32.whl", hash = "sha256:70d2aa9fb00cf52034feac4b913181a6e10356019b18ef89bc7c12a283bf5f5a"}, + {file = "cffi-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:b7b6ea9e36d32582cda3465f54c4b454f62f23cb083ebc7a94e2ca6ef011c3a7"}, + {file = "cffi-1.17.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:964823b2fc77b55355999ade496c54dde161c621cb1f6eac61dc30ed1b63cd4c"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:516a405f174fd3b88829eabfe4bb296ac602d6a0f68e0d64d5ac9456194a5b7e"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dec6b307ce928e8e112a6bb9921a1cb00a0e14979bf28b98e084a4b8a742bd9b"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4094c7b464cf0a858e75cd14b03509e84789abf7b79f8537e6a72152109c76e"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2404f3de742f47cb62d023f0ba7c5a916c9c653d5b368cc966382ae4e57da401"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3aa9d43b02a0c681f0bfbc12d476d47b2b2b6a3f9287f11ee42989a268a1833c"}, + {file = "cffi-1.17.0-cp38-cp38-win32.whl", hash = "sha256:0bb15e7acf8ab35ca8b24b90af52c8b391690ef5c4aec3d31f38f0d37d2cc499"}, + {file = "cffi-1.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:93a7350f6706b31f457c1457d3a3259ff9071a66f312ae64dc024f049055f72c"}, + {file = "cffi-1.17.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1a2ddbac59dc3716bc79f27906c010406155031a1c801410f1bafff17ea304d2"}, + {file = "cffi-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6327b572f5770293fc062a7ec04160e89741e8552bf1c358d1a23eba68166759"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbc183e7bef690c9abe5ea67b7b60fdbca81aa8da43468287dae7b5c046107d4"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bdc0f1f610d067c70aa3737ed06e2726fd9d6f7bfee4a351f4c40b6831f4e82"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6d872186c1617d143969defeadac5a904e6e374183e07977eedef9c07c8953bf"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d46ee4764b88b91f16661a8befc6bfb24806d885e27436fdc292ed7e6f6d058"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f76a90c345796c01d85e6332e81cab6d70de83b829cf1d9762d0a3da59c7932"}, + {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0e60821d312f99d3e1569202518dddf10ae547e799d75aef3bca3a2d9e8ee693"}, + {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:eb09b82377233b902d4c3fbeeb7ad731cdab579c6c6fda1f763cd779139e47c3"}, + {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:24658baf6224d8f280e827f0a50c46ad819ec8ba380a42448e24459daf809cf4"}, + {file = "cffi-1.17.0-cp39-cp39-win32.whl", hash = "sha256:0fdacad9e0d9fc23e519efd5ea24a70348305e8d7d85ecbb1a5fa66dc834e7fb"}, + {file = "cffi-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:7cbc78dc018596315d4e7841c8c3a7ae31cc4d638c9b627f87d52e8abaaf2d29"}, + {file = "cffi-1.17.0.tar.gz", hash = "sha256:f3157624b7558b914cb039fd1af735e5e8049a87c817cc215109ad1c8779df76"}, ] [package.dependencies] @@ -325,110 +343,112 @@ pycparser = "*" [[package]] name = "charset-normalizer" -version = "3.0.1" +version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false -python-versions = "*" +python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.0.1.tar.gz", hash = "sha256:ebea339af930f8ca5d7a699b921106c6e29c617fe9606fa7baa043c1cdae326f"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88600c72ef7587fe1708fd242b385b6ed4b8904976d5da0893e31df8b3480cb6"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c75ffc45f25324e68ab238cb4b5c0a38cd1c3d7f1fb1f72b5541de469e2247db"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db72b07027db150f468fbada4d85b3b2729a3db39178abf5c543b784c1254539"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62595ab75873d50d57323a91dd03e6966eb79c41fa834b7a1661ed043b2d404d"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff6f3db31555657f3163b15a6b7c6938d08df7adbfc9dd13d9d19edad678f1e8"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:772b87914ff1152b92a197ef4ea40efe27a378606c39446ded52c8f80f79702e"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70990b9c51340e4044cfc394a81f614f3f90d41397104d226f21e66de668730d"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:292d5e8ba896bbfd6334b096e34bffb56161c81408d6d036a7dfa6929cff8783"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2edb64ee7bf1ed524a1da60cdcd2e1f6e2b4f66ef7c077680739f1641f62f555"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:31a9ddf4718d10ae04d9b18801bd776693487cbb57d74cc3458a7673f6f34639"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:44ba614de5361b3e5278e1241fda3dc1838deed864b50a10d7ce92983797fa76"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:12db3b2c533c23ab812c2b25934f60383361f8a376ae272665f8e48b88e8e1c6"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c512accbd6ff0270939b9ac214b84fb5ada5f0409c44298361b2f5e13f9aed9e"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-win32.whl", hash = "sha256:502218f52498a36d6bf5ea77081844017bf7982cdbe521ad85e64cabee1b608b"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:601f36512f9e28f029d9481bdaf8e89e5148ac5d89cffd3b05cd533eeb423b59"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0298eafff88c99982a4cf66ba2efa1128e4ddaca0b05eec4c456bbc7db691d8d"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a8d0fc946c784ff7f7c3742310cc8a57c5c6dc31631269876a88b809dbeff3d3"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:87701167f2a5c930b403e9756fab1d31d4d4da52856143b609e30a1ce7160f3c"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14e76c0f23218b8f46c4d87018ca2e441535aed3632ca134b10239dfb6dadd6b"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c0a590235ccd933d9892c627dec5bc7511ce6ad6c1011fdf5b11363022746c1"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8c7fe7afa480e3e82eed58e0ca89f751cd14d767638e2550c77a92a9e749c317"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79909e27e8e4fcc9db4addea88aa63f6423ebb171db091fb4373e3312cb6d603"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ac7b6a045b814cf0c47f3623d21ebd88b3e8cf216a14790b455ea7ff0135d18"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:72966d1b297c741541ca8cf1223ff262a6febe52481af742036a0b296e35fa5a"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f9d0c5c045a3ca9bedfc35dca8526798eb91a07aa7a2c0fee134c6c6f321cbd7"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:5995f0164fa7df59db4746112fec3f49c461dd6b31b841873443bdb077c13cfc"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4a8fcf28c05c1f6d7e177a9a46a1c52798bfe2ad80681d275b10dcf317deaf0b"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:761e8904c07ad053d285670f36dd94e1b6ab7f16ce62b9805c475b7aa1cffde6"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-win32.whl", hash = "sha256:71140351489970dfe5e60fc621ada3e0f41104a5eddaca47a7acb3c1b851d6d3"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:9ab77acb98eba3fd2a85cd160851816bfce6871d944d885febf012713f06659c"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:84c3990934bae40ea69a82034912ffe5a62c60bbf6ec5bc9691419641d7d5c9a"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74292fc76c905c0ef095fe11e188a32ebd03bc38f3f3e9bcb85e4e6db177b7ea"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c95a03c79bbe30eec3ec2b7f076074f4281526724c8685a42872974ef4d36b72"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c39b0e3eac288fedc2b43055cfc2ca7a60362d0e5e87a637beac5d801ef478"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df2c707231459e8a4028eabcd3cfc827befd635b3ef72eada84ab13b52e1574d"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93ad6d87ac18e2a90b0fe89df7c65263b9a99a0eb98f0a3d2e079f12a0735837"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:59e5686dd847347e55dffcc191a96622f016bc0ad89105e24c14e0d6305acbc6"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:cd6056167405314a4dc3c173943f11249fa0f1b204f8b51ed4bde1a9cd1834dc"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:083c8d17153ecb403e5e1eb76a7ef4babfc2c48d58899c98fcaa04833e7a2f9a"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:f5057856d21e7586765171eac8b9fc3f7d44ef39425f85dbcccb13b3ebea806c"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:7eb33a30d75562222b64f569c642ff3dc6689e09adda43a082208397f016c39a"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-win32.whl", hash = "sha256:95dea361dd73757c6f1c0a1480ac499952c16ac83f7f5f4f84f0658a01b8ef41"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:eaa379fcd227ca235d04152ca6704c7cb55564116f8bc52545ff357628e10602"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3e45867f1f2ab0711d60c6c71746ac53537f1684baa699f4f668d4c6f6ce8e14"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cadaeaba78750d58d3cc6ac4d1fd867da6fc73c88156b7a3212a3cd4819d679d"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:911d8a40b2bef5b8bbae2e36a0b103f142ac53557ab421dc16ac4aafee6f53dc"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:503e65837c71b875ecdd733877d852adbc465bd82c768a067badd953bf1bc5a3"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a60332922359f920193b1d4826953c507a877b523b2395ad7bc716ddd386d866"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16a8663d6e281208d78806dbe14ee9903715361cf81f6d4309944e4d1e59ac5b"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a16418ecf1329f71df119e8a65f3aa68004a3f9383821edcb20f0702934d8087"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9d9153257a3f70d5f69edf2325357251ed20f772b12e593f3b3377b5f78e7ef8"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:02a51034802cbf38db3f89c66fb5d2ec57e6fe7ef2f4a44d070a593c3688667b"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:2e396d70bc4ef5325b72b593a72c8979999aa52fb8bcf03f701c1b03e1166918"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:11b53acf2411c3b09e6af37e4b9005cba376c872503c8f28218c7243582df45d"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-win32.whl", hash = "sha256:0bf2dae5291758b6f84cf923bfaa285632816007db0330002fa1de38bfcb7154"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:2c03cc56021a4bd59be889c2b9257dae13bf55041a3372d3295416f86b295fb5"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:024e606be3ed92216e2b6952ed859d86b4cfa52cd5bc5f050e7dc28f9b43ec42"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4b0d02d7102dd0f997580b51edc4cebcf2ab6397a7edf89f1c73b586c614272c"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:358a7c4cb8ba9b46c453b1dd8d9e431452d5249072e4f56cfda3149f6ab1405e"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81d6741ab457d14fdedc215516665050f3822d3e56508921cc7239f8c8e66a58"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8b8af03d2e37866d023ad0ddea594edefc31e827fee64f8de5611a1dbc373174"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9cf4e8ad252f7c38dd1f676b46514f92dc0ebeb0db5552f5f403509705e24753"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e696f0dd336161fca9adbb846875d40752e6eba585843c768935ba5c9960722b"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c22d3fe05ce11d3671297dc8973267daa0f938b93ec716e12e0f6dee81591dc1"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:109487860ef6a328f3eec66f2bf78b0b72400280d8f8ea05f69c51644ba6521a"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:37f8febc8ec50c14f3ec9637505f28e58d4f66752207ea177c1d67df25da5aed"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:f97e83fa6c25693c7a35de154681fcc257c1c41b38beb0304b9c4d2d9e164479"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a152f5f33d64a6be73f1d30c9cc82dfc73cec6477ec268e7c6e4c7d23c2d2291"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:39049da0ffb96c8cbb65cbf5c5f3ca3168990adf3551bd1dee10c48fce8ae820"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-win32.whl", hash = "sha256:4457ea6774b5611f4bed5eaa5df55f70abde42364d498c5134b7ef4c6958e20e"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:e62164b50f84e20601c1ff8eb55620d2ad25fb81b59e3cd776a1902527a788af"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8eade758719add78ec36dc13201483f8e9b5d940329285edcd5f70c0a9edbd7f"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8499ca8f4502af841f68135133d8258f7b32a53a1d594aa98cc52013fff55678"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3fc1c4a2ffd64890aebdb3f97e1278b0cc72579a08ca4de8cd2c04799a3a22be"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00d3ffdaafe92a5dc603cb9bd5111aaa36dfa187c8285c543be562e61b755f6b"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2ac1b08635a8cd4e0cbeaf6f5e922085908d48eb05d44c5ae9eabab148512ca"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6f45710b4459401609ebebdbcfb34515da4fc2aa886f95107f556ac69a9147e"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ae1de54a77dc0d6d5fcf623290af4266412a7c4be0b1ff7444394f03f5c54e3"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b590df687e3c5ee0deef9fc8c547d81986d9a1b56073d82de008744452d6541"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab5de034a886f616a5668aa5d098af2b5385ed70142090e2a31bcbd0af0fdb3d"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9cb3032517f1627cc012dbc80a8ec976ae76d93ea2b5feaa9d2a5b8882597579"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:608862a7bf6957f2333fc54ab4399e405baad0163dc9f8d99cb236816db169d4"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0f438ae3532723fb6ead77e7c604be7c8374094ef4ee2c5e03a3a17f1fca256c"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:356541bf4381fa35856dafa6a965916e54bed415ad8a24ee6de6e37deccf2786"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-win32.whl", hash = "sha256:39cf9ed17fe3b1bc81f33c9ceb6ce67683ee7526e65fde1447c772afc54a1bb8"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:0a11e971ed097d24c534c037d298ad32c6ce81a45736d31e0ff0ad37ab437d59"}, - {file = "charset_normalizer-3.0.1-py3-none-any.whl", hash = "sha256:7e189e2e1d3ed2f4aebabd2d5b0f931e883676e51c7624826e0a4e5fe8a0bf24"}, + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, ] [[package]] name = "click" -version = "8.1.3" +version = "8.1.7" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" files = [ - {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, - {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, ] [package.dependencies] @@ -528,48 +548,52 @@ yaml = ["PyYAML (>=3.10)"] [[package]] name = "cryptography" -version = "39.0.1" +version = "43.0.0" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "cryptography-39.0.1-cp36-abi3-macosx_10_12_universal2.whl", hash = "sha256:6687ef6d0a6497e2b58e7c5b852b53f62142cfa7cd1555795758934da363a965"}, - {file = "cryptography-39.0.1-cp36-abi3-macosx_10_12_x86_64.whl", hash = "sha256:706843b48f9a3f9b9911979761c91541e3d90db1ca905fd63fee540a217698bc"}, - {file = "cryptography-39.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:5d2d8b87a490bfcd407ed9d49093793d0f75198a35e6eb1a923ce1ee86c62b41"}, - {file = "cryptography-39.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83e17b26de248c33f3acffb922748151d71827d6021d98c70e6c1a25ddd78505"}, - {file = "cryptography-39.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e124352fd3db36a9d4a21c1aa27fd5d051e621845cb87fb851c08f4f75ce8be6"}, - {file = "cryptography-39.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:5aa67414fcdfa22cf052e640cb5ddc461924a045cacf325cd164e65312d99502"}, - {file = "cryptography-39.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:35f7c7d015d474f4011e859e93e789c87d21f6f4880ebdc29896a60403328f1f"}, - {file = "cryptography-39.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f24077a3b5298a5a06a8e0536e3ea9ec60e4c7ac486755e5fb6e6ea9b3500106"}, - {file = "cryptography-39.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:f0c64d1bd842ca2633e74a1a28033d139368ad959872533b1bab8c80e8240a0c"}, - {file = "cryptography-39.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:0f8da300b5c8af9f98111ffd512910bc792b4c77392a9523624680f7956a99d4"}, - {file = "cryptography-39.0.1-cp36-abi3-win32.whl", hash = "sha256:fe913f20024eb2cb2f323e42a64bdf2911bb9738a15dba7d3cce48151034e3a8"}, - {file = "cryptography-39.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:ced4e447ae29ca194449a3f1ce132ded8fcab06971ef5f618605aacaa612beac"}, - {file = "cryptography-39.0.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:807ce09d4434881ca3a7594733669bd834f5b2c6d5c7e36f8c00f691887042ad"}, - {file = "cryptography-39.0.1-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c5caeb8188c24888c90b5108a441c106f7faa4c4c075a2bcae438c6e8ca73cef"}, - {file = "cryptography-39.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4789d1e3e257965e960232345002262ede4d094d1a19f4d3b52e48d4d8f3b885"}, - {file = "cryptography-39.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:96f1157a7c08b5b189b16b47bc9db2332269d6680a196341bf30046330d15388"}, - {file = "cryptography-39.0.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e422abdec8b5fa8462aa016786680720d78bdce7a30c652b7fadf83a4ba35336"}, - {file = "cryptography-39.0.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:b0afd054cd42f3d213bf82c629efb1ee5f22eba35bf0eec88ea9ea7304f511a2"}, - {file = "cryptography-39.0.1-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:6f8ba7f0328b79f08bdacc3e4e66fb4d7aab0c3584e0bd41328dce5262e26b2e"}, - {file = "cryptography-39.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ef8b72fa70b348724ff1218267e7f7375b8de4e8194d1636ee60510aae104cd0"}, - {file = "cryptography-39.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:aec5a6c9864be7df2240c382740fcf3b96928c46604eaa7f3091f58b878c0bb6"}, - {file = "cryptography-39.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fdd188c8a6ef8769f148f88f859884507b954cc64db6b52f66ef199bb9ad660a"}, - {file = "cryptography-39.0.1.tar.gz", hash = "sha256:d1f6198ee6d9148405e49887803907fe8962a23e6c6f83ea7d98f1c0de375695"}, + {file = "cryptography-43.0.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:64c3f16e2a4fc51c0d06af28441881f98c5d91009b8caaff40cf3548089e9c74"}, + {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3dcdedae5c7710b9f97ac6bba7e1052b95c7083c9d0e9df96e02a1932e777895"}, + {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d9a1eca329405219b605fac09ecfc09ac09e595d6def650a437523fcd08dd22"}, + {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ea9e57f8ea880eeea38ab5abf9fbe39f923544d7884228ec67d666abd60f5a47"}, + {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9a8d6802e0825767476f62aafed40532bd435e8a5f7d23bd8b4f5fd04cc80ecf"}, + {file = "cryptography-43.0.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:cc70b4b581f28d0a254d006f26949245e3657d40d8857066c2ae22a61222ef55"}, + {file = "cryptography-43.0.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:4a997df8c1c2aae1e1e5ac49c2e4f610ad037fc5a3aadc7b64e39dea42249431"}, + {file = "cryptography-43.0.0-cp37-abi3-win32.whl", hash = "sha256:6e2b11c55d260d03a8cf29ac9b5e0608d35f08077d8c087be96287f43af3ccdc"}, + {file = "cryptography-43.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:31e44a986ceccec3d0498e16f3d27b2ee5fdf69ce2ab89b52eaad1d2f33d8778"}, + {file = "cryptography-43.0.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:7b3f5fe74a5ca32d4d0f302ffe6680fcc5c28f8ef0dc0ae8f40c0f3a1b4fca66"}, + {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac1955ce000cb29ab40def14fd1bbfa7af2017cca696ee696925615cafd0dce5"}, + {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:299d3da8e00b7e2b54bb02ef58d73cd5f55fb31f33ebbf33bd00d9aa6807df7e"}, + {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ee0c405832ade84d4de74b9029bedb7b31200600fa524d218fc29bfa371e97f5"}, + {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cb013933d4c127349b3948aa8aaf2f12c0353ad0eccd715ca789c8a0f671646f"}, + {file = "cryptography-43.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:fdcb265de28585de5b859ae13e3846a8e805268a823a12a4da2597f1f5afc9f0"}, + {file = "cryptography-43.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2905ccf93a8a2a416f3ec01b1a7911c3fe4073ef35640e7ee5296754e30b762b"}, + {file = "cryptography-43.0.0-cp39-abi3-win32.whl", hash = "sha256:47ca71115e545954e6c1d207dd13461ab81f4eccfcb1345eac874828b5e3eaaf"}, + {file = "cryptography-43.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:0663585d02f76929792470451a5ba64424acc3cd5227b03921dab0e2f27b1709"}, + {file = "cryptography-43.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2c6d112bf61c5ef44042c253e4859b3cbbb50df2f78fa8fae6747a7814484a70"}, + {file = "cryptography-43.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:844b6d608374e7d08f4f6e6f9f7b951f9256db41421917dfb2d003dde4cd6b66"}, + {file = "cryptography-43.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:51956cf8730665e2bdf8ddb8da0056f699c1a5715648c1b0144670c1ba00b48f"}, + {file = "cryptography-43.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:aae4d918f6b180a8ab8bf6511a419473d107df4dbb4225c7b48c5c9602c38c7f"}, + {file = "cryptography-43.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:232ce02943a579095a339ac4b390fbbe97f5b5d5d107f8a08260ea2768be8cc2"}, + {file = "cryptography-43.0.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5bcb8a5620008a8034d39bce21dc3e23735dfdb6a33a06974739bfa04f853947"}, + {file = "cryptography-43.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:08a24a7070b2b6804c1940ff0f910ff728932a9d0e80e7814234269f9d46d069"}, + {file = "cryptography-43.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e9c5266c432a1e23738d178e51c2c7a5e2ddf790f248be939448c0ba2021f9d1"}, + {file = "cryptography-43.0.0.tar.gz", hash = "sha256:b88075ada2d51aa9f18283532c9f60e72170041bba88d7f37e49cbb10275299e"}, ] [package.dependencies] -cffi = ">=1.12" +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] -docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] -pep8test = ["black", "check-manifest", "mypy", "ruff", "types-pytz", "types-requests"] -sdist = ["setuptools-rust (>=0.11.4)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] +nox = ["nox"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] +sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-shard (>=0.1.2)", "pytest-subtests", "pytest-xdist", "pytz"] +test = ["certifi", "cryptography-vectors (==43.0.0)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] -tox = ["tox"] [[package]] name = "docopt" @@ -617,23 +641,38 @@ url = "https://github.com/uc-cdis/dosclient" reference = "1.1.0" resolved_reference = "38c0f1ab42edf3efb1ad6348d7dbdff81b131360" +[[package]] +name = "exceptiongroup" +version = "1.2.2" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, +] + +[package.extras] +test = ["pytest (>=6)"] + [[package]] name = "flask" -version = "2.2.3" +version = "2.3.3" description = "A simple framework for building complex web applications." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "Flask-2.2.3-py3-none-any.whl", hash = "sha256:c0bec9477df1cb867e5a67c9e1ab758de9cb4a3e52dd70681f59fa40a62b3f2d"}, - {file = "Flask-2.2.3.tar.gz", hash = "sha256:7eb373984bf1c770023fce9db164ed0c3353cd0b53f130f4693da0ca756a2e6d"}, + {file = "flask-2.3.3-py3-none-any.whl", hash = "sha256:f69fcd559dc907ed196ab9df0e48471709175e696d6e698dd4dbe940f96ce66b"}, + {file = "flask-2.3.3.tar.gz", hash = "sha256:09c347a92aa7ff4a8e7f3206795f30d826654baf38b873d0744cd571ca609efc"}, ] [package.dependencies] -click = ">=8.0" +blinker = ">=1.6.2" +click = ">=8.1.3" importlib-metadata = {version = ">=3.6.0", markers = "python_version < \"3.10\""} -itsdangerous = ">=2.0" -Jinja2 = ">=3.0" -Werkzeug = ">=2.2.2" +itsdangerous = ">=2.1.2" +Jinja2 = ">=3.1.2" +Werkzeug = ">=2.3.7" [package.extras] async = ["asgiref (>=3.2)"] @@ -758,86 +797,87 @@ resolved_reference = "f122072ee245216da5e4260f718d6f886db81773" [[package]] name = "httpcore" -version = "0.16.3" +version = "1.0.5" description = "A minimal low-level HTTP client." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "httpcore-0.16.3-py3-none-any.whl", hash = "sha256:da1fb708784a938aa084bde4feb8317056c55037247c787bd7e19eb2c2949dc0"}, - {file = "httpcore-0.16.3.tar.gz", hash = "sha256:c5d6f04e2fc530f39e0c077e6a30caa53f1451096120f1f38b954afd0b17c0cb"}, + {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, + {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, ] [package.dependencies] -anyio = ">=3.0,<5.0" certifi = "*" h11 = ">=0.13,<0.15" -sniffio = "==1.*" [package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<0.26.0)"] [[package]] name = "httpx" -version = "0.23.3" +version = "0.27.0" description = "The next generation HTTP client." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "httpx-0.23.3-py3-none-any.whl", hash = "sha256:a211fcce9b1254ea24f0cd6af9869b3d29aba40154e947d2a07bb499b3e310d6"}, - {file = "httpx-0.23.3.tar.gz", hash = "sha256:9818458eb565bb54898ccb9b8b251a28785dd4a55afbc23d0eb410754fe7d0f9"}, + {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, + {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, ] [package.dependencies] +anyio = "*" certifi = "*" -httpcore = ">=0.15.0,<0.17.0" -rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]} +httpcore = "==1.*" +idna = "*" sniffio = "*" [package.extras] brotli = ["brotli", "brotlicffi"] -cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<13)"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] [[package]] name = "idna" -version = "3.4" +version = "3.7" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.5" files = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, ] [[package]] name = "importlib-metadata" -version = "6.0.0" +version = "8.3.0" description = "Read metadata from Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "importlib_metadata-6.0.0-py3-none-any.whl", hash = "sha256:7efb448ec9a5e313a57655d35aa54cd3e01b7e1fbcf72dce1bf06119420f5bad"}, - {file = "importlib_metadata-6.0.0.tar.gz", hash = "sha256:e354bedeb60efa6affdcc8ae121b73544a7aa74156d047311948f6d711cd378d"}, + {file = "importlib_metadata-8.3.0-py3-none-any.whl", hash = "sha256:42817a4a0be5845d22c6e212db66a94ad261e2318d80b3e0d363894a79df2b67"}, + {file = "importlib_metadata-8.3.0.tar.gz", hash = "sha256:9c8fa6e8ea0f9516ad5c8db9246a731c948193c7754d3babb0114a05b27dd364"}, ] [package.dependencies] zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] [[package]] name = "indexclient" -version = "2.1.1" +version = "2.3.1" description = "" optional = false python-versions = "*" files = [ - {file = "indexclient-2.1.1.tar.gz", hash = "sha256:03f0ef104d14e0e4117063892da0747d5db3c0e952143d3408f611797d909b2e"}, + {file = "indexclient-2.3.1.tar.gz", hash = "sha256:0beaf865aab58112961092aa58d06e31ca1cc8da26e9cd5cf84430d2f6567a0d"}, ] [package.dependencies] @@ -856,24 +896,24 @@ files = [ [[package]] name = "itsdangerous" -version = "2.1.2" +version = "2.2.0" description = "Safely pass data to untrusted environments and back." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"}, - {file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"}, + {file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"}, + {file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"}, ] [[package]] name = "jinja2" -version = "3.1.2" +version = "3.1.4" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ - {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, - {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, + {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, + {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, ] [package.dependencies] @@ -905,13 +945,13 @@ format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-va [[package]] name = "mako" -version = "1.2.4" +version = "1.3.5" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "Mako-1.2.4-py3-none-any.whl", hash = "sha256:c97c79c018b9165ac9922ae4f32da095ffd3c4e6872b45eded42926deea46818"}, - {file = "Mako-1.2.4.tar.gz", hash = "sha256:d60a3903dc3bb01a18ad6a89cdbe2e4eadc69c0bc8ef1e3773ba53d44c3f7a34"}, + {file = "Mako-1.3.5-py3-none-any.whl", hash = "sha256:260f1dbc3a519453a9c856dedfe4beb4e50bd5a26d96386cb6c80856556bb91a"}, + {file = "Mako-1.3.5.tar.gz", hash = "sha256:48dbc20568c1d276a2698b36d968fa76161bf127194907ea6fc594fa81f943bc"}, ] [package.dependencies] @@ -924,61 +964,71 @@ testing = ["pytest"] [[package]] name = "markupsafe" -version = "2.1.2" +version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-win32.whl", hash = "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-win32.whl", hash = "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-win32.whl", hash = "sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-win32.whl", hash = "sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-win32.whl", hash = "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed"}, - {file = "MarkupSafe-2.1.2.tar.gz", hash = "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] [[package]] @@ -999,24 +1049,24 @@ test = ["pytest (<5.4)", "pytest-cov"] [[package]] name = "packaging" -version = "23.0" +version = "24.1" description = "Core utilities for Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "packaging-23.0-py3-none-any.whl", hash = "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2"}, - {file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"}, + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] [[package]] name = "pluggy" -version = "1.0.0" +version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, - {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, ] [package.extras] @@ -1025,24 +1075,24 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "psycopg2" -version = "2.9.5" +version = "2.9.9" description = "psycopg2 - Python-PostgreSQL Database Adapter" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "psycopg2-2.9.5-cp310-cp310-win32.whl", hash = "sha256:d3ef67e630b0de0779c42912fe2cbae3805ebaba30cda27fea2a3de650a9414f"}, - {file = "psycopg2-2.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:4cb9936316d88bfab614666eb9e32995e794ed0f8f6b3b718666c22819c1d7ee"}, - {file = "psycopg2-2.9.5-cp311-cp311-win32.whl", hash = "sha256:093e3894d2d3c592ab0945d9eba9d139c139664dcf83a1c440b8a7aa9bb21955"}, - {file = "psycopg2-2.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:920bf418000dd17669d2904472efeab2b20546efd0548139618f8fa305d1d7ad"}, - {file = "psycopg2-2.9.5-cp36-cp36m-win32.whl", hash = "sha256:b9ac1b0d8ecc49e05e4e182694f418d27f3aedcfca854ebd6c05bb1cffa10d6d"}, - {file = "psycopg2-2.9.5-cp36-cp36m-win_amd64.whl", hash = "sha256:fc04dd5189b90d825509caa510f20d1d504761e78b8dfb95a0ede180f71d50e5"}, - {file = "psycopg2-2.9.5-cp37-cp37m-win32.whl", hash = "sha256:922cc5f0b98a5f2b1ff481f5551b95cd04580fd6f0c72d9b22e6c0145a4840e0"}, - {file = "psycopg2-2.9.5-cp37-cp37m-win_amd64.whl", hash = "sha256:1e5a38aa85bd660c53947bd28aeaafb6a97d70423606f1ccb044a03a1203fe4a"}, - {file = "psycopg2-2.9.5-cp38-cp38-win32.whl", hash = "sha256:f5b6320dbc3cf6cfb9f25308286f9f7ab464e65cfb105b64cc9c52831748ced2"}, - {file = "psycopg2-2.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:1a5c7d7d577e0eabfcf15eb87d1e19314c8c4f0e722a301f98e0e3a65e238b4e"}, - {file = "psycopg2-2.9.5-cp39-cp39-win32.whl", hash = "sha256:322fd5fca0b1113677089d4ebd5222c964b1760e361f151cbb2706c4912112c5"}, - {file = "psycopg2-2.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:190d51e8c1b25a47484e52a79638a8182451d6f6dff99f26ad9bd81e5359a0fa"}, - {file = "psycopg2-2.9.5.tar.gz", hash = "sha256:a5246d2e683a972e2187a8714b5c2cf8156c064629f9a9b1a873c1730d9e245a"}, + {file = "psycopg2-2.9.9-cp310-cp310-win32.whl", hash = "sha256:38a8dcc6856f569068b47de286b472b7c473ac7977243593a288ebce0dc89516"}, + {file = "psycopg2-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:426f9f29bde126913a20a96ff8ce7d73fd8a216cfb323b1f04da402d452853c3"}, + {file = "psycopg2-2.9.9-cp311-cp311-win32.whl", hash = "sha256:ade01303ccf7ae12c356a5e10911c9e1c51136003a9a1d92f7aa9d010fb98372"}, + {file = "psycopg2-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:121081ea2e76729acfb0673ff33755e8703d45e926e416cb59bae3a86c6a4981"}, + {file = "psycopg2-2.9.9-cp312-cp312-win32.whl", hash = "sha256:d735786acc7dd25815e89cc4ad529a43af779db2e25aa7c626de864127e5a024"}, + {file = "psycopg2-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:a7653d00b732afb6fc597e29c50ad28087dcb4fbfb28e86092277a559ae4e693"}, + {file = "psycopg2-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:5e0d98cade4f0e0304d7d6f25bbfbc5bd186e07b38eac65379309c4ca3193efa"}, + {file = "psycopg2-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:7e2dacf8b009a1c1e843b5213a87f7c544b2b042476ed7755be813eaf4e8347a"}, + {file = "psycopg2-2.9.9-cp38-cp38-win32.whl", hash = "sha256:ff432630e510709564c01dafdbe996cb552e0b9f3f065eb89bdce5bd31fabf4c"}, + {file = "psycopg2-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:bac58c024c9922c23550af2a581998624d6e02350f4ae9c5f0bc642c633a2d5e"}, + {file = "psycopg2-2.9.9-cp39-cp39-win32.whl", hash = "sha256:c92811b2d4c9b6ea0285942b2e7cac98a59e166d59c588fe5cfe1eda58e72d59"}, + {file = "psycopg2-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:de80739447af31525feddeb8effd640782cf5998e1a4e9192ebdf829717e3913"}, + {file = "psycopg2-2.9.9.tar.gz", hash = "sha256:d1454bde93fb1e224166811694d600e746430c006fbb031ea06ecc2ea41bf156"}, ] [[package]] @@ -1058,13 +1108,13 @@ files = [ [[package]] name = "pycparser" -version = "2.21" +version = "2.22" description = "C parser in Python" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.8" files = [ - {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, - {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, ] [[package]] @@ -1088,38 +1138,43 @@ test = ["pytest (>=4.0.1,<5.0.0)", "pytest-cov (>=2.6.0,<3.0.0)", "pytest-runner [[package]] name = "pyrsistent" -version = "0.19.3" +version = "0.20.0" description = "Persistent/Functional/Immutable data structures" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pyrsistent-0.19.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:20460ac0ea439a3e79caa1dbd560344b64ed75e85d8703943e0b66c2a6150e4a"}, - {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c18264cb84b5e68e7085a43723f9e4c1fd1d935ab240ce02c0324a8e01ccb64"}, - {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b774f9288dda8d425adb6544e5903f1fb6c273ab3128a355c6b972b7df39dcf"}, - {file = "pyrsistent-0.19.3-cp310-cp310-win32.whl", hash = "sha256:5a474fb80f5e0d6c9394d8db0fc19e90fa540b82ee52dba7d246a7791712f74a"}, - {file = "pyrsistent-0.19.3-cp310-cp310-win_amd64.whl", hash = "sha256:49c32f216c17148695ca0e02a5c521e28a4ee6c5089f97e34fe24163113722da"}, - {file = "pyrsistent-0.19.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f0774bf48631f3a20471dd7c5989657b639fd2d285b861237ea9e82c36a415a9"}, - {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ab2204234c0ecd8b9368dbd6a53e83c3d4f3cab10ecaf6d0e772f456c442393"}, - {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e42296a09e83028b3476f7073fcb69ffebac0e66dbbfd1bd847d61f74db30f19"}, - {file = "pyrsistent-0.19.3-cp311-cp311-win32.whl", hash = "sha256:64220c429e42a7150f4bfd280f6f4bb2850f95956bde93c6fda1b70507af6ef3"}, - {file = "pyrsistent-0.19.3-cp311-cp311-win_amd64.whl", hash = "sha256:016ad1afadf318eb7911baa24b049909f7f3bb2c5b1ed7b6a8f21db21ea3faa8"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c4db1bd596fefd66b296a3d5d943c94f4fac5bcd13e99bffe2ba6a759d959a28"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aeda827381f5e5d65cced3024126529ddc4289d944f75e090572c77ceb19adbf"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:42ac0b2f44607eb92ae88609eda931a4f0dfa03038c44c772e07f43e738bcac9"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-win32.whl", hash = "sha256:e8f2b814a3dc6225964fa03d8582c6e0b6650d68a232df41e3cc1b66a5d2f8d1"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-win_amd64.whl", hash = "sha256:c9bb60a40a0ab9aba40a59f68214eed5a29c6274c83b2cc206a359c4a89fa41b"}, - {file = "pyrsistent-0.19.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a2471f3f8693101975b1ff85ffd19bb7ca7dd7c38f8a81701f67d6b4f97b87d8"}, - {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc5d149f31706762c1f8bda2e8c4f8fead6e80312e3692619a75301d3dbb819a"}, - {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3311cb4237a341aa52ab8448c27e3a9931e2ee09561ad150ba94e4cfd3fc888c"}, - {file = "pyrsistent-0.19.3-cp38-cp38-win32.whl", hash = "sha256:f0e7c4b2f77593871e918be000b96c8107da48444d57005b6a6bc61fb4331b2c"}, - {file = "pyrsistent-0.19.3-cp38-cp38-win_amd64.whl", hash = "sha256:c147257a92374fde8498491f53ffa8f4822cd70c0d85037e09028e478cababb7"}, - {file = "pyrsistent-0.19.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b735e538f74ec31378f5a1e3886a26d2ca6351106b4dfde376a26fc32a044edc"}, - {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99abb85579e2165bd8522f0c0138864da97847875ecbd45f3e7e2af569bfc6f2"}, - {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a8cb235fa6d3fd7aae6a4f1429bbb1fec1577d978098da1252f0489937786f3"}, - {file = "pyrsistent-0.19.3-cp39-cp39-win32.whl", hash = "sha256:c74bed51f9b41c48366a286395c67f4e894374306b197e62810e0fdaf2364da2"}, - {file = "pyrsistent-0.19.3-cp39-cp39-win_amd64.whl", hash = "sha256:878433581fc23e906d947a6814336eee031a00e6defba224234169ae3d3d6a98"}, - {file = "pyrsistent-0.19.3-py3-none-any.whl", hash = "sha256:ccf0d6bd208f8111179f0c26fdf84ed7c3891982f2edaeae7422575f47e66b64"}, - {file = "pyrsistent-0.19.3.tar.gz", hash = "sha256:1a2994773706bbb4995c31a97bc94f1418314923bd1048c6d964837040376440"}, + {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, + {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, + {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, + {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, + {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, + {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, + {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, ] [[package]] @@ -1167,19 +1222,19 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale [[package]] name = "pytest-flask" -version = "1.2.0" +version = "1.3.0" description = "A set of py.test fixtures to test Flask applications." optional = false -python-versions = ">=3.5" +python-versions = ">=3.7" files = [ - {file = "pytest-flask-1.2.0.tar.gz", hash = "sha256:46fde652f77777bf02dc91205aec4ce20cdf2acbbbd66a918ab91f5c14693d3d"}, - {file = "pytest_flask-1.2.0-py3-none-any.whl", hash = "sha256:fe25b39ad0db09c3d1fe728edecf97ced85e774c775db259a6d25f0270a4e7c9"}, + {file = "pytest-flask-1.3.0.tar.gz", hash = "sha256:58be1c97b21ba3c4d47e0a7691eb41007748506c36bf51004f78df10691fa95e"}, + {file = "pytest_flask-1.3.0-py3-none-any.whl", hash = "sha256:c0e36e6b0fddc3b91c4362661db83fa694d1feb91fa505475be6732b5bc8c253"}, ] [package.dependencies] Flask = "*" pytest = ">=5.2" -Werkzeug = ">=0.7" +Werkzeug = "*" [package.extras] docs = ["Sphinx", "sphinx-rtd-theme"] @@ -1224,20 +1279,20 @@ files = [ [[package]] name = "requests" -version = "2.28.2" +version = "2.32.3" description = "Python HTTP for Humans." optional = false -python-versions = ">=3.7, <4" +python-versions = ">=3.8" files = [ - {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"}, - {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"}, + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] [package.dependencies] certifi = ">=2017.4.17" charset-normalizer = ">=2,<4" idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<1.27" +urllib3 = ">=1.21.1,<3" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)"] @@ -1262,38 +1317,21 @@ urllib3 = ">=1.25.10" [package.extras] tests = ["coverage (>=3.7.1,<6.0.0)", "flake8", "mypy", "pytest (>=4.6)", "pytest (>=4.6,<5.0)", "pytest-cov", "pytest-localserver", "types-mock", "types-requests", "types-six"] -[[package]] -name = "rfc3986" -version = "1.5.0" -description = "Validating URI References per RFC 3986" -optional = false -python-versions = "*" -files = [ - {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"}, - {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"}, -] - -[package.dependencies] -idna = {version = "*", optional = true, markers = "extra == \"idna2008\""} - -[package.extras] -idna2008 = ["idna"] - [[package]] name = "setuptools" -version = "67.4.0" +version = "73.0.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "setuptools-67.4.0-py3-none-any.whl", hash = "sha256:f106dee1b506dee5102cc3f3e9e68137bbad6d47b616be7991714b0c62204251"}, - {file = "setuptools-67.4.0.tar.gz", hash = "sha256:e5fd0a713141a4a105412233c63dc4e17ba0090c8e8334594ac790ec97792330"}, + {file = "setuptools-73.0.0-py3-none-any.whl", hash = "sha256:f2bfcce7ae1784d90b04c57c2802e8649e1976530bb25dc72c2b078d3ecf4864"}, + {file = "setuptools-73.0.0.tar.gz", hash = "sha256:3c08705fadfc8c7c445cf4d98078f0fafb9225775b2b4e8447e40348f82597c0"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.11.*)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (<0.4)", "pytest-ruff (>=0.2.1)", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] [[package]] name = "six" @@ -1308,68 +1346,66 @@ files = [ [[package]] name = "sniffio" -version = "1.3.0" +version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" files = [ - {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, - {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, ] [[package]] name = "sqlalchemy" -version = "1.4.52" +version = "1.4.53" description = "Database Abstraction Library" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "SQLAlchemy-1.4.52-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:f68016f9a5713684c1507cc37133c28035f29925c75c0df2f9d0f7571e23720a"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24bb0f81fbbb13d737b7f76d1821ec0b117ce8cbb8ee5e8641ad2de41aa916d3"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e93983cc0d2edae253b3f2141b0a3fb07e41c76cd79c2ad743fc27eb79c3f6db"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:84e10772cfc333eb08d0b7ef808cd76e4a9a30a725fb62a0495877a57ee41d81"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:427988398d2902de042093d17f2b9619a5ebc605bf6372f7d70e29bde6736842"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-win32.whl", hash = "sha256:1296f2cdd6db09b98ceb3c93025f0da4835303b8ac46c15c2136e27ee4d18d94"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-win_amd64.whl", hash = "sha256:80e7f697bccc56ac6eac9e2df5c98b47de57e7006d2e46e1a3c17c546254f6ef"}, - {file = "SQLAlchemy-1.4.52-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2f251af4c75a675ea42766880ff430ac33291c8d0057acca79710f9e5a77383d"}, - {file = "SQLAlchemy-1.4.52-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb8f9e4c4718f111d7b530c4e6fb4d28f9f110eb82e7961412955b3875b66de0"}, - {file = "SQLAlchemy-1.4.52-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afb1672b57f58c0318ad2cff80b384e816735ffc7e848d8aa51e0b0fc2f4b7bb"}, - {file = "SQLAlchemy-1.4.52-cp311-cp311-win32.whl", hash = "sha256:6e41cb5cda641f3754568d2ed8962f772a7f2b59403b95c60c89f3e0bd25f15e"}, - {file = "SQLAlchemy-1.4.52-cp311-cp311-win_amd64.whl", hash = "sha256:5bed4f8c3b69779de9d99eb03fd9ab67a850d74ab0243d1be9d4080e77b6af12"}, - {file = "SQLAlchemy-1.4.52-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:49e3772eb3380ac88d35495843daf3c03f094b713e66c7d017e322144a5c6b7c"}, - {file = "SQLAlchemy-1.4.52-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:618827c1a1c243d2540314c6e100aee7af09a709bd005bae971686fab6723554"}, - {file = "SQLAlchemy-1.4.52-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de9acf369aaadb71a725b7e83a5ef40ca3de1cf4cdc93fa847df6b12d3cd924b"}, - {file = "SQLAlchemy-1.4.52-cp312-cp312-win32.whl", hash = "sha256:763bd97c4ebc74136ecf3526b34808c58945023a59927b416acebcd68d1fc126"}, - {file = "SQLAlchemy-1.4.52-cp312-cp312-win_amd64.whl", hash = "sha256:f12aaf94f4d9679ca475975578739e12cc5b461172e04d66f7a3c39dd14ffc64"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:853fcfd1f54224ea7aabcf34b227d2b64a08cbac116ecf376907968b29b8e763"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f98dbb8fcc6d1c03ae8ec735d3c62110949a3b8bc6e215053aa27096857afb45"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e135fff2e84103bc15c07edd8569612ce317d64bdb391f49ce57124a73f45c5"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5b5de6af8852500d01398f5047d62ca3431d1e29a331d0b56c3e14cb03f8094c"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3491c85df263a5c2157c594f54a1a9c72265b75d3777e61ee13c556d9e43ffc9"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-win32.whl", hash = "sha256:427c282dd0deba1f07bcbf499cbcc9fe9a626743f5d4989bfdfd3ed3513003dd"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-win_amd64.whl", hash = "sha256:ca5ce82b11731492204cff8845c5e8ca1a4bd1ade85e3b8fcf86e7601bfc6a39"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:29d4247313abb2015f8979137fe65f4eaceead5247d39603cc4b4a610936cd2b"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a752bff4796bf22803d052d4841ebc3c55c26fb65551f2c96e90ac7c62be763a"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7ea11727feb2861deaa293c7971a4df57ef1c90e42cb53f0da40c3468388000"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d913f8953e098ca931ad7f58797f91deed26b435ec3756478b75c608aa80d139"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a251146b921725547ea1735b060a11e1be705017b568c9f8067ca61e6ef85f20"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-win32.whl", hash = "sha256:1f8e1c6a6b7f8e9407ad9afc0ea41c1f65225ce505b79bc0342159de9c890782"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-win_amd64.whl", hash = "sha256:346ed50cb2c30f5d7a03d888e25744154ceac6f0e6e1ab3bc7b5b77138d37710"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:4dae6001457d4497736e3bc422165f107ecdd70b0d651fab7f731276e8b9e12d"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5d2e08d79f5bf250afb4a61426b41026e448da446b55e4770c2afdc1e200fce"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bbce5dd7c7735e01d24f5a60177f3e589078f83c8a29e124a6521b76d825b85"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:bdb7b4d889631a3b2a81a3347c4c3f031812eb4adeaa3ee4e6b0d028ad1852b5"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c294ae4e6bbd060dd79e2bd5bba8b6274d08ffd65b58d106394cb6abbf35cf45"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-win32.whl", hash = "sha256:bcdfb4b47fe04967669874fb1ce782a006756fdbebe7263f6a000e1db969120e"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-win_amd64.whl", hash = "sha256:7d0dbc56cb6af5088f3658982d3d8c1d6a82691f31f7b0da682c7b98fa914e91"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:a551d5f3dc63f096ed41775ceec72fdf91462bb95abdc179010dc95a93957800"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ab773f9ad848118df7a9bbabca53e3f1002387cdbb6ee81693db808b82aaab0"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2de46f5d5396d5331127cfa71f837cca945f9a2b04f7cb5a01949cf676db7d1"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7027be7930a90d18a386b25ee8af30514c61f3852c7268899f23fdfbd3107181"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99224d621affbb3c1a4f72b631f8393045f4ce647dd3262f12fe3576918f8bf3"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-win32.whl", hash = "sha256:c124912fd4e1bb9d1e7dc193ed482a9f812769cb1e69363ab68e01801e859821"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-win_amd64.whl", hash = "sha256:2c286fab42e49db23c46ab02479f328b8bdb837d3e281cae546cc4085c83b680"}, - {file = "SQLAlchemy-1.4.52.tar.gz", hash = "sha256:80e63bbdc5217dad3485059bdf6f65a7d43f33c8bde619df5c220edf03d87296"}, + {file = "SQLAlchemy-1.4.53-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:b61ac5457d91b5629a3dea2b258deb4cdd35ac8f6fa2031d2b9b2fff5b3396da"}, + {file = "SQLAlchemy-1.4.53-cp310-cp310-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a96aa8d425047551676b0e178ddb0683421e78eda879ab55775128b2e612cae"}, + {file = "SQLAlchemy-1.4.53-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e10ac36f0b994235c13388b39598bf27219ec8bdea5be99bdac612b01cbe525"}, + {file = "SQLAlchemy-1.4.53-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:437592b341a3229dd0443c9c803b0bf0a466f8f539014fef6cdb9c06b7edb7f9"}, + {file = "SQLAlchemy-1.4.53-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:784272ceb5eb71421fea9568749bcbe8bd019261a0e2e710a7efa76057af2499"}, + {file = "SQLAlchemy-1.4.53-cp310-cp310-win32.whl", hash = "sha256:122d7b5722df1a24402c6748bbb04687ef981493bb559d0cc0beffe722e0e6ed"}, + {file = "SQLAlchemy-1.4.53-cp310-cp310-win_amd64.whl", hash = "sha256:4604d42b2abccba266d3f5bbe883684b5df93e74054024c70d3fbb5eea45e530"}, + {file = "SQLAlchemy-1.4.53-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fb8e15dfa47f5de11ab073e12aadd6b502cfb7ac4bafd18bd18cfd1c7d13dbbc"}, + {file = "SQLAlchemy-1.4.53-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc8be4df55e8fde3006d9cb1f6b3df2ba26db613855dc4df2c0fcd5ec15cb3b7"}, + {file = "SQLAlchemy-1.4.53-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86b11640251f9a9789fd96cd6e5d176b1c230230c70ad40299bcbcc568451b4c"}, + {file = "SQLAlchemy-1.4.53-cp311-cp311-win32.whl", hash = "sha256:cd534c716f86bdf95b7b984a34ee278c91d1b1d7d183e7e5ff878600b1696046"}, + {file = "SQLAlchemy-1.4.53-cp311-cp311-win_amd64.whl", hash = "sha256:6dd06572872ca13ef5a90306a3e5af787498ddaa17fb00109b1243642646cd69"}, + {file = "SQLAlchemy-1.4.53-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:2774c24c405136c3ef472e2352bdca7330659d481fbf2283f996c0ef9eb90f22"}, + {file = "SQLAlchemy-1.4.53-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68a614765197b3d13a730d631a78c3bb9b3b72ba58ed7ab295d58d517464e315"}, + {file = "SQLAlchemy-1.4.53-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d13d4dfbc6e52363886b47cf02cf68c5d2a37c468626694dc210d7e97d4ad330"}, + {file = "SQLAlchemy-1.4.53-cp312-cp312-win32.whl", hash = "sha256:197065b91456574d70b6459bfa62bc0b52a4960a29ef923c375ec427274a3e05"}, + {file = "SQLAlchemy-1.4.53-cp312-cp312-win_amd64.whl", hash = "sha256:421306c4b936b0271a3ce2dc074928d5ece4a36f9c482daa5770f44ecfc3a883"}, + {file = "SQLAlchemy-1.4.53-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:13fc34b35d8ddb3fbe3f8fcfdf6c2546e676187f0fb20f5774da362ddaf8fa2d"}, + {file = "SQLAlchemy-1.4.53-cp36-cp36m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:626be971ff89541cfd3e70b54be00b57a7f8557204decb6223ce0428fec058f3"}, + {file = "SQLAlchemy-1.4.53-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:991e42fdfec561ebc6a4fae7161a86d129d6069fa14210b96b8dd752afa7059c"}, + {file = "SQLAlchemy-1.4.53-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:95123f3a1e0e8020848fd32ba751db889a01a44e4e4fef7e58c87ddd0b2fca59"}, + {file = "SQLAlchemy-1.4.53-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c58e011e9e6373b3a091d83f20601fb335a3b4bace80bfcb914ac168aad3b70d"}, + {file = "SQLAlchemy-1.4.53-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:670c7769bf5dcae9aff331247b5d82fe635c63731088a46ce68ba2ba519ef36e"}, + {file = "SQLAlchemy-1.4.53-cp37-cp37m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07ba54f09033d387ae9df8d62cbe211ed7304e0bfbece1f8c55e21db9fae5c11"}, + {file = "SQLAlchemy-1.4.53-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a38834b4c183c33daf58544281395aad2e985f0b47cca1e88ea5ada88344e63"}, + {file = "SQLAlchemy-1.4.53-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:616492f5315128a847f293a7c552f3561ac7e996d2aa5dc46bef4fb0d3781f1d"}, + {file = "SQLAlchemy-1.4.53-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0cf8c0af9563892c6632f7343bc393dfce6eeef8e4d10c5fadba9c0390520bd"}, + {file = "SQLAlchemy-1.4.53-cp37-cp37m-win32.whl", hash = "sha256:c05fe05941424c2f3747a8952381b7725e24cba2ca00141380e54789d5b616b6"}, + {file = "SQLAlchemy-1.4.53-cp37-cp37m-win_amd64.whl", hash = "sha256:93e90aa3e3b2f8e8cbae4d5509f8e0cf82972378d323c740a8df1c1e9f484172"}, + {file = "SQLAlchemy-1.4.53-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:9d7368df54d3ed45a18955f6cec38ebe075290594ac0d5c87a8ddaff7e10de27"}, + {file = "SQLAlchemy-1.4.53-cp38-cp38-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89d8ac4158ef68eea8bb0f6dd0583127d9aa8720606964ba8eee20b254f9c83a"}, + {file = "SQLAlchemy-1.4.53-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16bb9fa4d00b4581b14d9f0e2224dc7745b854aa4687738279af0f48f7056c98"}, + {file = "SQLAlchemy-1.4.53-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4fe5168d0249c23f537950b6d75935ff2709365a113e29938a979aec36668ecf"}, + {file = "SQLAlchemy-1.4.53-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b8608d162d3bd29d807aab32c3fb6e2f8e225a43d1c54c917fed38513785380"}, + {file = "SQLAlchemy-1.4.53-cp38-cp38-win32.whl", hash = "sha256:a9d4d132198844bd6828047135ce7b887687c92925049a2468a605fc775c7a1a"}, + {file = "SQLAlchemy-1.4.53-cp38-cp38-win_amd64.whl", hash = "sha256:c15d1f1fcf1f9bec0499ae1d9132b950fcc7730f2d26d10484c8808b4e077816"}, + {file = "SQLAlchemy-1.4.53-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:edf094a20a386ff2ec73de65ef18014b250259cb860edc61741e240ca22d6981"}, + {file = "SQLAlchemy-1.4.53-cp39-cp39-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83a9c3514ff19d9d30d8a8d378b24cd1dfa5528d20891481cb5f196117db6a48"}, + {file = "SQLAlchemy-1.4.53-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eaaeedbceb4dfd688fff2faf25a9a87a391f548811494f7bff7fa701b639abc3"}, + {file = "SQLAlchemy-1.4.53-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d021699b9007deb7aa715629078830c99a5fec2753d9bdd5ff33290d363ef755"}, + {file = "SQLAlchemy-1.4.53-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0465b8a68f8f4de754c1966c45b187ac784ad97bc9747736f913130f0e1adea0"}, + {file = "SQLAlchemy-1.4.53-cp39-cp39-win32.whl", hash = "sha256:5f67b9e9dcac3241781e96575468d55a42332157dee04bdbf781df573dff5f85"}, + {file = "SQLAlchemy-1.4.53-cp39-cp39-win_amd64.whl", hash = "sha256:a8c2f2a0b2c4e3b86eb58c9b6bb98548205eea2fba9dae4edfd29dc6aebbe95a"}, + {file = "SQLAlchemy-1.4.53.tar.gz", hash = "sha256:5e6ab710c4c064755fd92d1a417bef360228a19bdf0eee32b03aa0f5f8e9fe0d"}, ] [package.dependencies] @@ -1380,17 +1416,17 @@ aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] asyncio = ["greenlet (!=0.4.17)"] asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)", "mariadb (>=1.0.1,!=1.1.2)"] mssql = ["pyodbc"] -mssql-pymssql = ["pymssql"] -mssql-pyodbc = ["pyodbc"] +mssql-pymssql = ["pymssql", "pymssql"] +mssql-pyodbc = ["pyodbc", "pyodbc"] mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] -mysql-connector = ["mysql-connector-python"] +mysql-connector = ["mysql-connector-python", "mysql-connector-python"] oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"] postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] -postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] +postgresql-asyncpg = ["asyncpg", "asyncpg", "greenlet (!=0.4.17)", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)", "pg8000 (>=1.16.6,!=1.29.0)"] postgresql-psycopg2binary = ["psycopg2-binary"] postgresql-psycopg2cffi = ["psycopg2cffi"] pymysql = ["pymysql", "pymysql (<1)"] @@ -1452,38 +1488,50 @@ files = [ {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] +[[package]] +name = "typing-extensions" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + [[package]] name = "urllib3" -version = "1.26.14" +version = "2.2.2" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = ">=3.8" files = [ - {file = "urllib3-1.26.14-py2.py3-none-any.whl", hash = "sha256:75edcdc2f7d85b137124a6c3c9fc3933cdeaa12ecb9a6a959f22797a0feca7e1"}, - {file = "urllib3-1.26.14.tar.gz", hash = "sha256:076907bf8fd355cde77728471316625a4d2f7e713c125f51953bb5b3eecf4f72"}, + {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, + {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] -socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] [[package]] name = "werkzeug" -version = "2.2.3" +version = "3.0.3" description = "The comprehensive WSGI web application library." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "Werkzeug-2.2.3-py3-none-any.whl", hash = "sha256:56433961bc1f12533306c624f3be5e744389ac61d722175d543e1751285da612"}, - {file = "Werkzeug-2.2.3.tar.gz", hash = "sha256:2e1ccc9417d4da358b9de6f174e3ac094391ea1d4fbef2d667865d819dfd0afe"}, + {file = "werkzeug-3.0.3-py3-none-any.whl", hash = "sha256:fc9645dc43e03e4d630d23143a04a7f947a9a3b5727cd535fdfe155a17cc48c8"}, + {file = "werkzeug-3.0.3.tar.gz", hash = "sha256:097e5bfda9f0aba8da6b8545146def481d06aa7d3266e7448e2cccf67dd8bd18"}, ] [package.dependencies] MarkupSafe = ">=2.1.1" [package.extras] -watchdog = ["watchdog"] +watchdog = ["watchdog (>=2.3)"] [[package]] name = "xmltodict" @@ -1498,18 +1546,18 @@ files = [ [[package]] name = "zipp" -version = "3.15.0" +version = "3.20.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"}, - {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"}, + {file = "zipp-3.20.0-py3-none-any.whl", hash = "sha256:58da6168be89f0be59beb194da1250516fdaa062ccebd30127ac65d30045e10d"}, + {file = "zipp-3.20.0.tar.gz", hash = "sha256:0145e43d89664cfe1a2e533adc75adafed82fe2da404b4bbb6b026c0157bdb31"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] [metadata] lock-version = "2.0" diff --git a/tests/conftest.py b/tests/conftest.py index 6c110e93..b901132c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -7,7 +7,6 @@ from unittest.mock import patch # indexd_server and indexd_client is needed as fixtures -# from cdisutilstest.code.indexd_fixture import clear_database from gen3authz.client.arborist.client import ArboristClient from indexd import get_app From 8ad95d6bb1a311f2b4aedaf47b5a3733fb4025e4 Mon Sep 17 00:00:00 2001 From: BinamB Date: Wed, 21 Aug 2024 10:16:13 -0500 Subject: [PATCH 27/47] fix args --- bin/migrate_to_single_table.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bin/migrate_to_single_table.py b/bin/migrate_to_single_table.py index d6680c16..818f9507 100644 --- a/bin/migrate_to_single_table.py +++ b/bin/migrate_to_single_table.py @@ -38,7 +38,7 @@ def main(): args = parse_args() migrator = IndexRecordMigrator(conf_data=args.creds_path) migrator.index_record_to_new_table( - offset=args.offset, last_seen_guid=args.start_did + offset=args.start_offset, last_seen_guid=args.start_did ) return From 278f56d739e306517db02c3f23d7ac3b8188f668 Mon Sep 17 00:00:00 2001 From: BinamB Date: Wed, 21 Aug 2024 11:59:03 -0500 Subject: [PATCH 28/47] fix count --- bin/migrate_to_single_table.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/bin/migrate_to_single_table.py b/bin/migrate_to_single_table.py index 818f9507..2d6c6cfd 100644 --- a/bin/migrate_to_single_table.py +++ b/bin/migrate_to_single_table.py @@ -102,7 +102,7 @@ def index_record_to_new_table( """ try: self.total_records = self.session.query(IndexRecord).count() - count = 0 + self.count = 0 while True: if last_seen_guid is None: @@ -141,7 +141,7 @@ def index_record_to_new_table( except Exception as e: self.session.rollback() self.logger.error( - f"Error in migration: {e}. Last seen guid: {last_seen_guid} at position: {count}." + f"Error in migration: {e}. Last seen guid: {last_seen_guid} at position: {self.count}." ) finally: self.session.close() @@ -170,16 +170,16 @@ def bulk_insert_records(self, records_to_insert): try: self.session.bulk_save_objects(records_to_insert) self.session.commit() - count += len(records_to_insert) + self.count += len(records_to_insert) self.logger.info( - f"Done processing {count}/{self.total_records} records. {(count * 100)/self.total_records}%" + f"Done processing {self.count}/{self.total_records} records. {(self.count * 100)/self.total_records}%" ) - except IntegrityError: + except IntegrityError as e: self.session.rollback() - self.logger.error(f"Duplicate record found for records {records_to_insert}") + self.logger.error(f"Duplicate record found for records {e}") except Exception as e: self.session.rollback() - self.logger.error(f"Error bulk insert for records at {count} records") + self.logger.error(f"Error bulk insert for records at {self.count} records") def get_info_from_mult_tables(self, records): """ From c4dcdcb54e068ac7b3fef81830f7dfafe50873c7 Mon Sep 17 00:00:00 2001 From: BinamB Date: Thu, 22 Aug 2024 15:11:57 -0500 Subject: [PATCH 29/47] add raise and error log --- bin/migrate_to_single_table.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/bin/migrate_to_single_table.py b/bin/migrate_to_single_table.py index 2d6c6cfd..7763abc4 100644 --- a/bin/migrate_to_single_table.py +++ b/bin/migrate_to_single_table.py @@ -132,7 +132,15 @@ def index_record_to_new_table( if not records: break - records_to_insert = self.get_info_from_mult_tables(records) + try: + records_to_insert = self.get_info_from_mult_tables(records) + except Exception as e: + raise Exception( + f""" + Could not insert records with {e} at offset {offset} with the last seen guid {last_seen_guid}. Please re-run the job with the following command + gen3 job run indexd-single-table-migration-job --start-did {last_seen_guid} + """ + ) self.bulk_insert_records(records_to_insert) From 6d1ee7b25c1bc4e7205596c4fa9d935532d8f0c5 Mon Sep 17 00:00:00 2001 From: BinamB Date: Mon, 26 Aug 2024 09:52:18 -0500 Subject: [PATCH 30/47] Change error message + add inserts to try block --- bin/migrate_to_single_table.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/bin/migrate_to_single_table.py b/bin/migrate_to_single_table.py index 7763abc4..9904b0e6 100644 --- a/bin/migrate_to_single_table.py +++ b/bin/migrate_to_single_table.py @@ -134,16 +134,16 @@ def index_record_to_new_table( try: records_to_insert = self.get_info_from_mult_tables(records) + self.bulk_insert_records(records_to_insert) except Exception as e: raise Exception( f""" - Could not insert records with {e} at offset {offset} with the last seen guid {last_seen_guid}. Please re-run the job with the following command + Failed with error {e} + Please re-run the job with the following command gen3 job run indexd-single-table-migration-job --start-did {last_seen_guid} """ ) - self.bulk_insert_records(records_to_insert) - last_seen_guid = records[-1].did except Exception as e: From e8dd282e4eb19a7185afc7b34ce0d642beb25440 Mon Sep 17 00:00:00 2001 From: BinamB Date: Mon, 26 Aug 2024 09:55:42 -0500 Subject: [PATCH 31/47] fix error message --- bin/migrate_to_single_table.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/bin/migrate_to_single_table.py b/bin/migrate_to_single_table.py index 9904b0e6..d7c8785f 100644 --- a/bin/migrate_to_single_table.py +++ b/bin/migrate_to_single_table.py @@ -137,11 +137,7 @@ def index_record_to_new_table( self.bulk_insert_records(records_to_insert) except Exception as e: raise Exception( - f""" - Failed with error {e} - Please re-run the job with the following command - gen3 job run indexd-single-table-migration-job --start-did {last_seen_guid} - """ + f"Could not insert records with {e} at offset {offset} with the last seen guid {last_seen_guid}. Please re-run the job with the following --start-did {last_seen_guid}" ) last_seen_guid = records[-1].did From 4618e676494b97b7041b3c460158448f11a431cc Mon Sep 17 00:00:00 2001 From: BinamB Date: Mon, 26 Aug 2024 14:21:28 -0500 Subject: [PATCH 32/47] add backoff + raise error --- bin/migrate_to_single_table.py | 50 +++++++++++++++++++--------------- 1 file changed, 28 insertions(+), 22 deletions(-) diff --git a/bin/migrate_to_single_table.py b/bin/migrate_to_single_table.py index d7c8785f..db1d4d56 100644 --- a/bin/migrate_to_single_table.py +++ b/bin/migrate_to_single_table.py @@ -229,6 +229,9 @@ def get_info_from_mult_tables(self, records): ) return records_to_insert + @backoff.on_exception( + backoff.expo, Exception, max_tries=5, max_time=10, jitter=backoff.full_jitter + ) def get_index_record_hash(self, did): """ Get the index record hash for the given did and return correctly formatted value @@ -245,8 +248,11 @@ def get_index_record_hash(self, did): res = {hash_type: hash_value for hash_type, hash_value in stmt} return res except Exception as e: - self.logger.error(f"Error with hash for {did}: {e}") + raise Exception(f"Error with hash for {did}: {e}") + @backoff.on_exception( + backoff.expo, Exception, max_tries=5, max_time=10, jitter=backoff.full_jitter + ) def get_urls_record(self, did): """ Get the urls record for the given did and return correctly formatted value @@ -260,8 +266,11 @@ def get_urls_record(self, did): res = [u.url for u in stmt] return res except Exception as e: - self.logger.error(f"Error with urls for {did}: {e}") + raise Exception(f"Error with urls for {did}: {e}") + @backoff.on_exception( + backoff.expo, Exception, max_tries=5, max_time=10, jitter=backoff.full_jitter + ) def get_urls_metadata(self, did): """ Get the urls metadata for the given did and return correctly formatted value @@ -279,8 +288,11 @@ def get_urls_metadata(self, did): res = {url: {key: value} for url, key, value in stmt} return res except Exception as e: - self.logger.error(f"Error with url metadata for {did}: {e}") + raise Exception(f"Error with url metadata for {did}: {e}") + @backoff.on_exception( + backoff.expo, Exception, max_tries=5, max_time=10, jitter=backoff.full_jitter + ) def get_index_record_ace(self, did): """ Get the index record ace for the given did and return correctly formatted value @@ -294,8 +306,11 @@ def get_index_record_ace(self, did): res = [a.ace for a in stmt] return res except Exception as e: - self.logger.error(f"Error with ace for did {did}: {e}") + raise Exception(f"Error with ace for did {did}: {e}") + @backoff.on_exception( + backoff.expo, Exception, max_tries=5, max_time=10, jitter=backoff.full_jitter + ) def get_index_record_authz(self, did): """ Get the index record authz for the given did and return the correctly formatted value @@ -309,8 +324,11 @@ def get_index_record_authz(self, did): res = [r.resource for r in stmt] return res except Exception as e: - self.logger.error(f"Error with authz: {e}") + raise Exception(f"Error with authz: {e}") + @backoff.on_exception( + backoff.expo, Exception, max_tries=5, max_time=10, jitter=backoff.full_jitter + ) def get_index_record_alias(self, did): """ Get the index record alias for the given did and return the correctly formatted @@ -328,8 +346,11 @@ def get_index_record_alias(self, did): res[did].append(name) return res except Exception as e: - self.logger.error(f"Error with alias: {e}") + raise Exception(f"Error with alias: {e}") + @backoff.on_exception( + backoff.expo, Exception, max_tries=5, max_time=10, jitter=backoff.full_jitter + ) def get_index_record_metadata(self, did): """ Get the index record metadata for the given did and return the correctly fortmatted value @@ -346,22 +367,7 @@ def get_index_record_metadata(self, did): res = {key: value for key, value in stmt} return res except Exception as e: - self.logger.error(f"Error with alias for did {did}: {e}") - - def remove_duplicate_records(self, records, error): - """ - Remove duplicate records from the bulk insert records list - """ - # Extract the key value from the error message - key_value = re.search(r"\(guid\)=\((.*?)\)", str(error)) - key_value = key_value.group(1) - self.logger.info(f"Removing duplicate record {key_value}") - for record in records: - if key_value == str(record.guid): - records.remove(record) - break - - return records + raise Exception(f"Error with alias for did {did}: {e}") if __name__ == "__main__": From 072b33982479e56bdee0c922d6e62446c01d00be Mon Sep 17 00:00:00 2001 From: BinamB Date: Mon, 26 Aug 2024 15:16:14 -0500 Subject: [PATCH 33/47] poetry + lock --- poetry.lock | 38 +++++++++++++++++++++----------------- pyproject.toml | 2 +- 2 files changed, 22 insertions(+), 18 deletions(-) diff --git a/poetry.lock b/poetry.lock index 76b6cb81..de710f78 100644 --- a/poetry.lock +++ b/poetry.lock @@ -842,24 +842,24 @@ socks = ["socksio (==1.*)"] [[package]] name = "idna" -version = "3.7" +version = "3.8" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, + {file = "idna-3.8-py3-none-any.whl", hash = "sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac"}, + {file = "idna-3.8.tar.gz", hash = "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603"}, ] [[package]] name = "importlib-metadata" -version = "8.3.0" +version = "8.4.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-8.3.0-py3-none-any.whl", hash = "sha256:42817a4a0be5845d22c6e212db66a94ad261e2318d80b3e0d363894a79df2b67"}, - {file = "importlib_metadata-8.3.0.tar.gz", hash = "sha256:9c8fa6e8ea0f9516ad5c8db9246a731c948193c7754d3babb0114a05b27dd364"}, + {file = "importlib_metadata-8.4.0-py3-none-any.whl", hash = "sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1"}, + {file = "importlib_metadata-8.4.0.tar.gz", hash = "sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5"}, ] [package.dependencies] @@ -1319,13 +1319,13 @@ tests = ["coverage (>=3.7.1,<6.0.0)", "flake8", "mypy", "pytest (>=4.6)", "pytes [[package]] name = "setuptools" -version = "73.0.0" +version = "73.0.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-73.0.0-py3-none-any.whl", hash = "sha256:f2bfcce7ae1784d90b04c57c2802e8649e1976530bb25dc72c2b078d3ecf4864"}, - {file = "setuptools-73.0.0.tar.gz", hash = "sha256:3c08705fadfc8c7c445cf4d98078f0fafb9225775b2b4e8447e40348f82597c0"}, + {file = "setuptools-73.0.1-py3-none-any.whl", hash = "sha256:b208925fcb9f7af924ed2dc04708ea89791e24bde0d3020b27df0e116088b34e"}, + {file = "setuptools-73.0.1.tar.gz", hash = "sha256:d59a3e788ab7e012ab2c4baed1b376da6366883ee20d7a5fc426816e3d7b1193"}, ] [package.extras] @@ -1518,13 +1518,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "werkzeug" -version = "3.0.3" +version = "3.0.4" description = "The comprehensive WSGI web application library." optional = false python-versions = ">=3.8" files = [ - {file = "werkzeug-3.0.3-py3-none-any.whl", hash = "sha256:fc9645dc43e03e4d630d23143a04a7f947a9a3b5727cd535fdfe155a17cc48c8"}, - {file = "werkzeug-3.0.3.tar.gz", hash = "sha256:097e5bfda9f0aba8da6b8545146def481d06aa7d3266e7448e2cccf67dd8bd18"}, + {file = "werkzeug-3.0.4-py3-none-any.whl", hash = "sha256:02c9eb92b7d6c06f31a782811505d2157837cea66aaede3e217c7c27c039476c"}, + {file = "werkzeug-3.0.4.tar.gz", hash = "sha256:34f2371506b250df4d4f84bfe7b0921e4762525762bbd936614909fe25cd7306"}, ] [package.dependencies] @@ -1546,18 +1546,22 @@ files = [ [[package]] name = "zipp" -version = "3.20.0" +version = "3.20.1" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.20.0-py3-none-any.whl", hash = "sha256:58da6168be89f0be59beb194da1250516fdaa062ccebd30127ac65d30045e10d"}, - {file = "zipp-3.20.0.tar.gz", hash = "sha256:0145e43d89664cfe1a2e533adc75adafed82fe2da404b4bbb6b026c0157bdb31"}, + {file = "zipp-3.20.1-py3-none-any.whl", hash = "sha256:9960cd8967c8f85a56f920d5d507274e74f9ff813a0ab8889a5b5be2daf44064"}, + {file = "zipp-3.20.1.tar.gz", hash = "sha256:c22b14cc4763c5a5b04134207736c107db42e9d3ef2d9779d465f5f1bcba572b"}, ] [package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] [metadata] lock-version = "2.0" diff --git a/pyproject.toml b/pyproject.toml index 5395a833..81fb0df9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -12,6 +12,7 @@ include = [ [tool.poetry.dependencies] python = "3.9.*" alembic = "^1.9.4" +asyncpg = "^0.29.0" authutils = "^6.0.0" cdislogging = "^1.0.0" cdiserrors = "^1.0.0" @@ -26,7 +27,6 @@ psycopg2 = "^2.7" sqlalchemy = "^1.4.0" sqlalchemy-utils = "^0.37.3" PyYAML = "^5.4" -asyncpg = "^0.29.0" [tool.poetry.dev-dependencies] From 69b661380f349b61d7ee9c8c5303e190066cbdb8 Mon Sep 17 00:00:00 2001 From: BinamB Date: Mon, 26 Aug 2024 15:37:26 -0500 Subject: [PATCH 34/47] lock --- poetry.lock | 153 +++++++++++++++++++++++++++------------------------- 1 file changed, 79 insertions(+), 74 deletions(-) diff --git a/poetry.lock b/poetry.lock index 9e0363e3..c4ab104c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -140,13 +140,13 @@ tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] [[package]] name = "authlib" -version = "0.11" -description = "The ultimate Python library in building OAuth and OpenID Connect servers." +version = "1.3.2" +description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients." optional = false python-versions = ">=3.8" files = [ - {file = "Authlib-1.3.0-py2.py3-none-any.whl", hash = "sha256:9637e4de1fb498310a56900b3e2043a206b03cb11c05422014b0302cbc814be3"}, - {file = "Authlib-1.3.0.tar.gz", hash = "sha256:959ea62a5b7b5123c5059758296122b57cd2585ae2ed1c0622c21b371ffdae06"}, + {file = "Authlib-1.3.2-py2.py3-none-any.whl", hash = "sha256:ede026a95e9f5cdc2d4364a52103f5405e75aa156357e831ef2bfd0bc5094dfc"}, + {file = "authlib-1.3.2.tar.gz", hash = "sha256:4b16130117f9eb82aa6eec97f6dd4673c3f960ac0283ccdae2897ee4bc030ba2"}, ] [package.dependencies] @@ -154,26 +154,27 @@ cryptography = "*" [[package]] name = "authutils" -version = "6.1.2" +version = "6.2.5" description = "Gen3 auth utility functions" optional = false -python-versions = ">=3.6,<4.0" +python-versions = "<4.0,>=3.9" files = [ - {file = "authutils-6.1.2-py3-none-any.whl", hash = "sha256:5e45b7098a40ee9650326d3f9488f867a538d53d1e03304b59634d5e77a3a258"}, - {file = "authutils-6.1.2.tar.gz", hash = "sha256:b029daffcc8d1bca481e7ba0528c8982d05c8b8dc7eee72831d37ddc08a36842"}, + {file = "authutils-6.2.5-py3-none-any.whl", hash = "sha256:ef91c9c7c750123c28b7376be9ca00b4e89b2d52fa183dec9bfe681d8eac6227"}, + {file = "authutils-6.2.5.tar.gz", hash = "sha256:0d496721e9f0d8c69b34aff8f6fccdc7768ca4f104504d68e70fd647d4c23b19"}, ] [package.dependencies] authlib = ">=1.1.0" cached-property = ">=1.4,<2.0" cdiserrors = "<2.0.0" -httpx = ">=0.12.1,<1.0.0" -pyjwt = {version = ">=1.5,<2.0", extras = ["crypto"]} +cryptography = ">=41.0.6" +httpx = ">=0.23.0,<1.0.0" +pyjwt = {version = ">=2.4.0,<3.0", extras = ["crypto"]} xmltodict = ">=0.9,<1.0" [package.extras] -fastapi = ["fastapi (>=0.54.1,<0.55.0)"] -flask = ["Flask (>=0.10.1)"] +fastapi = ["fastapi (>=0.65.2,<0.66.0)"] +flask = ["Flask (<=2.3.3)"] [[package]] name = "backoff" @@ -1118,22 +1119,23 @@ files = [ [[package]] name = "pyjwt" -version = "1.7.1" +version = "2.9.0" description = "JSON Web Token implementation in Python" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "PyJWT-1.7.1-py2.py3-none-any.whl", hash = "sha256:5c6eca3c2940464d106b99ba83b00c6add741c9becaec087fb7ccdefea71350e"}, - {file = "PyJWT-1.7.1.tar.gz", hash = "sha256:8d59a976fb773f3e6a39c85636357c4f0e242707394cadadd9814f5cbaa20e96"}, + {file = "PyJWT-2.9.0-py3-none-any.whl", hash = "sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850"}, + {file = "pyjwt-2.9.0.tar.gz", hash = "sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c"}, ] [package.dependencies] -cryptography = {version = ">=1.4", optional = true, markers = "extra == \"crypto\""} +cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"crypto\""} [package.extras] -crypto = ["cryptography (>=1.4)"] -flake8 = ["flake8", "flake8-import-order", "pep8-naming"] -test = ["pytest (>=4.0.1,<5.0.0)", "pytest-cov (>=2.6.0,<3.0.0)", "pytest-runner (>=4.2,<5.0.0)"] +crypto = ["cryptography (>=3.4.0)"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] +tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] name = "pyrsistent" @@ -1240,61 +1242,64 @@ docs = ["Sphinx", "sphinx-rtd-theme"] [[package]] name = "pyyaml" -version = "6.0.1" +version = "6.0.2" description = "YAML parser and emitter for Python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] [[package]] @@ -1585,5 +1590,5 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" -python-versions = "3.9.*" -content-hash = "3e45ad2463a580453c01263b47601520d905b6c70796d4de9a9d2e067460a883" +python-versions = ">=3.9,<4.0" +content-hash = "dc32e3f22ed435e056c4787dd2e7d48e9defc8e94040ab29f0684df640620bc2" From f77f12e02bf6b5f3ab4844ebf000130674bbb054 Mon Sep 17 00:00:00 2001 From: BinamB Date: Tue, 27 Aug 2024 09:47:42 -0500 Subject: [PATCH 35/47] Bumb version --- poetry.lock | 7 ++++--- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index c4ab104c..83e9d674 100644 --- a/poetry.lock +++ b/poetry.lock @@ -818,13 +818,13 @@ trio = ["trio (>=0.22.0,<0.26.0)"] [[package]] name = "httpx" -version = "0.27.0" +version = "0.27.2" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, - {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, + {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, + {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, ] [package.dependencies] @@ -839,6 +839,7 @@ brotli = ["brotli", "brotlicffi"] cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] [[package]] name = "idna" diff --git a/pyproject.toml b/pyproject.toml index de6a6e74..1808df31 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "indexd" -version = "5.0.4" +version = "5.1.0" description = "Gen3 Indexing Service" authors = ["CTDS UChicago "] license = "Apache-2.0" From 7bee78886090134fc6751a32d6e980d1a0abfc02 Mon Sep 17 00:00:00 2001 From: BinamB Date: Wed, 28 Aug 2024 15:09:56 -0500 Subject: [PATCH 36/47] Fix snyk --- bin/migrate_to_single_table.py | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/bin/migrate_to_single_table.py b/bin/migrate_to_single_table.py index db1d4d56..7a1d30d0 100644 --- a/bin/migrate_to_single_table.py +++ b/bin/migrate_to_single_table.py @@ -36,7 +36,7 @@ def load_json(file_name): def main(): args = parse_args() - migrator = IndexRecordMigrator(conf_data=args.creds_path) + migrator = IndexRecordMigrator(creds_file=args.creds_file) migrator.index_record_to_new_table( offset=args.start_offset, last_seen_guid=args.start_did ) @@ -48,8 +48,9 @@ def parse_args(): description="Migrate data from old indexd database to new single table database" ) parser.add_argument( - "--creds-path", - help="Path to the creds file for the database you're trying to copy data from multi-table to single records table. Defaults to original indexd database creds from the indexd block in the creds.json file.", + "--creds-file", + dest="creds_file", + help="file to the creds file for the database you're trying to copy data from multi-table to single records table. Defaults to original indexd database creds from the indexd block in the creds.json file.", ) parser.add_argument( "--start-did", @@ -67,14 +68,10 @@ def parse_args(): class IndexRecordMigrator: - def __init__(self, conf_data=None): + def __init__(self, creds_file=None): self.logger = get_logger("migrate_single_table", log_level="debug") - if conf_data: - with open(conf_data, "r") as reader: - conf_data = json.load(reader) - else: - conf_data = load_json("creds.json") + conf_data = load_json(creds_file) if creds_file else load_json("creds.json") usr = conf_data.get("db_username", "{{db_username}}") db = conf_data.get("db_database", "{{db_database}}") From 9f28cdcb751693cf525e3f8d6397f04e3a08a6c2 Mon Sep 17 00:00:00 2001 From: BinamB Date: Wed, 28 Aug 2024 16:22:56 -0500 Subject: [PATCH 37/47] append test res --- tests/ci_commands_script.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/ci_commands_script.sh b/tests/ci_commands_script.sh index e82905b1..fbc382e7 100644 --- a/tests/ci_commands_script.sh +++ b/tests/ci_commands_script.sh @@ -1,3 +1,3 @@ #!/usr/bin/env bash -poetry run pytest -vv --cov=indexd --cov-report xml tests +poetry run pytest -vv --cov=indexd --cov-report --cov-append xml tests From eae539f75ce1a9b347b9d4a2dccd9712941e18bd Mon Sep 17 00:00:00 2001 From: BinamB Date: Wed, 28 Aug 2024 16:26:13 -0500 Subject: [PATCH 38/47] update test --- tests/test_migration.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/tests/test_migration.py b/tests/test_migration.py index 5f0f4538..37dbd1ff 100644 --- a/tests/test_migration.py +++ b/tests/test_migration.py @@ -70,7 +70,7 @@ def test_index_record_to_new_table(): """ Test index_record_to_new_table copies records from old tables to new record table. """ - index_record_migrator = IndexRecordMigrator(conf_data="tests/test_creds.json") + index_record_migrator = IndexRecordMigrator(creds_file="tests/test_creds.json") n_records = 100 create_record(n_records) index_record_migrator.index_record_to_new_table(batch_size=10) @@ -86,7 +86,7 @@ def test_get_index_record_hash(): """ Test get_index_record_hash from IndexRecordMigrator returns the correct format """ - index_record_migrator = IndexRecordMigrator(conf_data="tests/test_creds.json") + index_record_migrator = IndexRecordMigrator(creds_file="tests/test_creds.json") did = create_record()[0] result = index_record_migrator.get_index_record_hash(did) assert result == {"md5": "some_md5", "sha1": "some_sha1"} @@ -96,7 +96,7 @@ def test_get_urls_record(): """ Test get_urls_record from IndexRecordMigrator returns the correct format """ - index_record_migrator = IndexRecordMigrator(conf_data="tests/test_creds.json") + index_record_migrator = IndexRecordMigrator(creds_file="tests/test_creds.json") did = create_record()[0] result = index_record_migrator.get_urls_record(did) assert result == ["s3://bucket/data.json", "gs://bucket/data.txt"] @@ -106,7 +106,7 @@ def test_get_urls_metadata(): """ Test get_urls_metadata from IndexRecordMigrator returns the correct format """ - index_record_migrator = IndexRecordMigrator(conf_data="tests/test_creds.json") + index_record_migrator = IndexRecordMigrator(creds_file="tests/test_creds.json") did = create_record()[0] result = index_record_migrator.get_urls_metadata(did) assert result == { @@ -119,7 +119,7 @@ def test_get_index_record_ace(): """ Test get_index_record_ace from IndexRecordMigrator returns the correct format """ - index_record_migrator = IndexRecordMigrator(conf_data="tests/test_creds.json") + index_record_migrator = IndexRecordMigrator(creds_file="tests/test_creds.json") did = create_record()[0] result = index_record_migrator.get_index_record_ace(did) assert type(result) == list @@ -129,7 +129,7 @@ def test_get_index_record_authz(): """ Test get_index_record_authz from IndexRecordMigrator returns the correct format """ - index_record_migrator = IndexRecordMigrator(conf_data="tests/test_creds.json") + index_record_migrator = IndexRecordMigrator(creds_file="tests/test_creds.json") did = create_record()[0] result = index_record_migrator.get_index_record_authz(did) assert type(result) == list @@ -139,7 +139,7 @@ def test_get_index_record_metadata(): """ Test get_index_record_metadata from IndexRecordMigrator returns the correct format """ - index_record_migrator = IndexRecordMigrator(conf_data="tests/test_creds.json") + index_record_migrator = IndexRecordMigrator(creds_file="tests/test_creds.json") did = create_record()[0] result = index_record_migrator.get_index_record_metadata(did) assert result == { From 1c209d00765aae34effc8f482d9078ecd071b3c3 Mon Sep 17 00:00:00 2001 From: BinamB Date: Wed, 28 Aug 2024 16:29:45 -0500 Subject: [PATCH 39/47] fix cmd --- tests/ci_commands_script.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/ci_commands_script.sh b/tests/ci_commands_script.sh index fbc382e7..2f6e56b2 100644 --- a/tests/ci_commands_script.sh +++ b/tests/ci_commands_script.sh @@ -1,3 +1,3 @@ #!/usr/bin/env bash -poetry run pytest -vv --cov=indexd --cov-report --cov-append xml tests +poetry run pytest -vv --cov=indexd --cov-report xml tests --cov-append From 424725415a5064c4ffa383664954333f283d6067 Mon Sep 17 00:00:00 2001 From: BinamB Date: Thu, 29 Aug 2024 10:56:42 -0500 Subject: [PATCH 40/47] append test --- tests/ci_commands_script.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/ci_commands_script.sh b/tests/ci_commands_script.sh index 2f6e56b2..8fe8c315 100644 --- a/tests/ci_commands_script.sh +++ b/tests/ci_commands_script.sh @@ -1,3 +1,3 @@ #!/usr/bin/env bash -poetry run pytest -vv --cov=indexd --cov-report xml tests --cov-append +poetry run pytest -vv --cov=indexd --cov-append --cov-report xml tests From bad7c84aa036d2471bb9be99a5c049521be595ab Mon Sep 17 00:00:00 2001 From: BinamB Date: Wed, 4 Sep 2024 15:41:50 -0500 Subject: [PATCH 41/47] Fix coverage --- .secrets.baseline | 4 +- indexd/index/drivers/single_table_alchemy.py | 1 - indexd/single_table_settings.py | 54 ++++++++++++++++++-- tests/conftest.py | 6 +-- tests/test_drs.py | 27 +++++++--- 5 files changed, 77 insertions(+), 15 deletions(-) diff --git a/.secrets.baseline b/.secrets.baseline index 85543f2b..9b21844b 100644 --- a/.secrets.baseline +++ b/.secrets.baseline @@ -409,9 +409,9 @@ "filename": "tests/test_drs.py", "hashed_secret": "5666c088b494f26cd8f63ace013992f5fc391ce0", "is_verified": false, - "line_number": 38 + "line_number": 39 } ] }, - "generated_at": "2024-08-26T20:06:17Z" + "generated_at": "2024-09-04T20:41:41Z" } diff --git a/indexd/index/drivers/single_table_alchemy.py b/indexd/index/drivers/single_table_alchemy.py index e369efa7..b6e13db8 100644 --- a/indexd/index/drivers/single_table_alchemy.py +++ b/indexd/index/drivers/single_table_alchemy.py @@ -35,7 +35,6 @@ RevisionMismatch, UnhealthyCheck, ) -from indexd.utils import migrate_database Base = declarative_base() diff --git a/indexd/single_table_settings.py b/indexd/single_table_settings.py index 79b8ce21..326358ef 100644 --- a/indexd/single_table_settings.py +++ b/indexd/single_table_settings.py @@ -1,5 +1,8 @@ -from indexd import default_settings +import os + from indexd.index.drivers.single_table_alchemy import SingleTableSQLAlchemyIndexDriver +from .alias.drivers.alchemy import SQLAlchemyAliasDriver +from .auth.drivers.alchemy import SQLAlchemyAuthDriver # - DEFAULT_PREFIX: prefix to be prepended. # - PREPEND_PREFIX: the prefix is preprended to the generated GUID when a @@ -7,7 +10,40 @@ # - ADD_PREFIX_ALIAS: aliases are created for new records - "". # Do NOT set both ADD_PREFIX_ALIAS and PREPEND_PREFIX to True, or aliases # will be created as "". -default_settings.settings["config"]["INDEX"] = { + +CONFIG = {} +CONFIG["DIST"] = [ + { + "name": "testStage", + "host": "https://fictitious-commons.io/index/", + "hints": [".*dg\\.4503.*"], + "type": "indexd", + }, +] + +CONFIG["DRS_SERVICE_INFO"] = { + "name": "DRS System", + "type": { + "group": "org.ga4gh", + "artifact": "drs", + "version": "1.0.3", + }, + "version": "1.0.3", + "organization": { + "name": "CTDS", + "url": "https://fictitious-commons.io", + }, +} + +os.environ["PRESIGNED_FENCE_URL"] = "https://fictitious-commons.io/" +os.environ["HOSTNAME"] = "fictitious-commons.io" + +# Set PSQL Port, see https://www.postgresql.org/docs/12/app-psql.html +# PSQL default port is 5432, but in some setups, can be 5433. +psql_port = os.environ["PGPORT"] if os.environ.get("PGPORT") else "5432" + + +CONFIG["INDEX"] = { "driver": SingleTableSQLAlchemyIndexDriver( "postgresql://postgres:postgres@localhost:5432/indexd_tests", # pragma: allowlist secret echo=True, @@ -18,4 +54,16 @@ }, ) } -settings = default_settings.settings + +CONFIG["ALIAS"] = { + "driver": SQLAlchemyAliasDriver( + "postgresql://postgres:postgres@localhost:5432/indexd_tests", # pragma: allowlist secret + echo=True, # pragma: allowlist secret + ) +} + +AUTH = SQLAlchemyAuthDriver( + "postgresql://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret +) # pragma: allowlist secret + +settings = {"config": CONFIG, "auth": AUTH} diff --git a/tests/conftest.py b/tests/conftest.py index b901132c..f70c0f0f 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -72,11 +72,12 @@ def combined_default_and_single_table_settings(request): """ Fixture to run a unit test with both multi-table and single-table driver """ - from indexd import default_settings - from tests import default_test_settings # Load the default settings if request.param == "default_settings": + from indexd import default_settings + from tests import default_test_settings + importlib.reload(default_settings) default_settings.settings = { **default_settings.settings, @@ -90,7 +91,6 @@ def combined_default_and_single_table_settings(request): importlib.reload(single_table_settings) single_table_settings.settings = { - **default_test_settings.settings, **single_table_settings.settings, } yield get_app(single_table_settings.settings) diff --git a/tests/test_drs.py b/tests/test_drs.py index 7d644dfb..d78417f6 100644 --- a/tests/test_drs.py +++ b/tests/test_drs.py @@ -1,5 +1,6 @@ import flask import json + import tests.conftest import requests import responses @@ -86,21 +87,35 @@ def test_drs_get(client, user, combined_default_and_single_table_settings): def test_drs_get_no_default(client, user, combined_default_and_single_table_settings): # Change default index driver settings to use no prefix - settings["config"]["INDEX"]["driver"].config["DEFAULT_PREFIX"] = None - settings["config"]["INDEX"]["driver"].config["ADD_PREFIX_ALIAS"] = False + combined_default_and_single_table_settings.config["INDEX"]["driver"].config[ + "DEFAULT_PREFIX" + ] = None + combined_default_and_single_table_settings.config["INDEX"]["driver"].config[ + "PREPEND_PREFIX" + ] = False + combined_default_and_single_table_settings.config["INDEX"]["driver"].config[ + "ADD_PREFIX_ALIAS" + ] = False data = get_doc() - did = "ad8f4658-6acd-4f96-0dd8-3709890c959f" - data["did"] = did res_1 = client.post("/index/", json=data, headers=user) assert res_1.status_code == 200 + did = res_1.json["did"] + assert "testprefix:" not in did res_2 = client.get("/ga4gh/drs/v1/objects/" + did) assert res_2.status_code == 200 rec_2 = res_2.json assert rec_2["self_uri"] == "drs://" + did - settings["config"]["INDEX"]["driver"].config["DEFAULT_PREFIX"] = "testprefix:" - settings["config"]["INDEX"]["driver"].config["ADD_PREFIX_ALIAS"] = True + combined_default_and_single_table_settings.config["INDEX"]["driver"].config[ + "DEFAULT_PREFIX" + ] = "testprefix:" + combined_default_and_single_table_settings.config["INDEX"]["driver"].config[ + "PREPEND_PREFIX" + ] = True + combined_default_and_single_table_settings.config["INDEX"]["driver"].config[ + "ADD_PREFIX_ALIAS" + ] = True def verify_timestamps(expected_doc, did, client, has_updated_date=True): From de50dbe5446b891e948ba21cfe46f52e61283fdd Mon Sep 17 00:00:00 2001 From: BinamB Date: Mon, 9 Sep 2024 10:56:09 -0500 Subject: [PATCH 42/47] Fix stuff from comments --- bin/indexd_settings.py | 40 +- deployment/Secrets/indexd_settings.py | 42 +- indexd/default_settings.py | 45 +- indexd/index/drivers/single_table_alchemy.py | 73 ++-- indexd/single_table_settings.py | 69 ---- .../bb3d7586a096_createsingletable.py | 1 + poetry.lock | 387 +++++++----------- pyproject.toml | 2 - tests/conftest.py | 65 ++- tests/test_driver_alchemy_crud.py | 107 ++--- ...ion.py => test_migrate_to_single_table.py} | 0 11 files changed, 375 insertions(+), 456 deletions(-) delete mode 100644 indexd/single_table_settings.py rename tests/{test_migration.py => test_migrate_to_single_table.py} (100%) diff --git a/bin/indexd_settings.py b/bin/indexd_settings.py index bcc67985..cdcfa704 100644 --- a/bin/indexd_settings.py +++ b/bin/indexd_settings.py @@ -4,6 +4,7 @@ from indexd.index.drivers.alchemy import SQLAlchemyIndexDriver from indexd.alias.drivers.alchemy import SQLAlchemyAliasDriver from indexd.auth.drivers.alchemy import SQLAlchemyAuthDriver +from indexd.index.drivers.single_table_alchemy import SingleTableSQLAlchemyIndexDriver APP_NAME = "indexd" @@ -24,6 +25,8 @@ def load_json(file_name): CONFIG["JSONIFY_PRETTYPRINT_REGULAR"] = False +DATABASE_FORMAT = "" + dist = environ.get("DIST", None) if dist: CONFIG["DIST"] = json.loads(dist) @@ -32,18 +35,33 @@ def load_json(file_name): if drs_service_info: CONFIG["DRS_SERVICE_INFO"] = json.loads(drs_service_info) -CONFIG["INDEX"] = { - "driver": SQLAlchemyIndexDriver( - "postgresql+psycopg2://{usr}:{psw}@{pghost}:{pgport}/{db}".format( - usr=usr, - psw=psw, - pghost=pghost, - pgport=pgport, - db=db, +if DATABASE_FORMAT.lower == "single_table": + CONFIG["INDEX"] = { + "driver": SingleTableSQLAlchemyIndexDriver( + "postgresql+psycopg2://{usr}:{psw}@{pghost}:{pgport}/{db}".format( + usr=usr, + psw=psw, + pghost=pghost, + pgport=pgport, + db=db, + ), + index_config=index_config, ), - index_config=index_config, - ), -} + } +else: + CONFIG["INDEX"] = { + "driver": SQLAlchemyIndexDriver( + "postgresql+psycopg2://{usr}:{psw}@{pghost}:{pgport}/{db}".format( + usr=usr, + psw=psw, + pghost=pghost, + pgport=pgport, + db=db, + ), + index_config=index_config, + ), + } + CONFIG["ALIAS"] = { "driver": SQLAlchemyAliasDriver( diff --git a/deployment/Secrets/indexd_settings.py b/deployment/Secrets/indexd_settings.py index 4b45badf..5baab416 100644 --- a/deployment/Secrets/indexd_settings.py +++ b/deployment/Secrets/indexd_settings.py @@ -4,6 +4,7 @@ from indexd.alias.drivers.alchemy import SQLAlchemyAliasDriver from indexd.auth.drivers.alchemy import SQLAlchemyAuthDriver from indexd.index.drivers.single_table_alchemy import SingleTableSQLAlchemyIndexDriver +from indexd.index.drivers.alchemy import SQLAlchemyIndexDriver APP_NAME = "indexd" @@ -22,6 +23,8 @@ def load_json(file_name): index_config = conf_data.get("index_config") CONFIG = {} +USE_SINGLE_TABLE = False + CONFIG["JSONIFY_PRETTYPRINT_REGULAR"] = False dist = environ.get("DIST", None) @@ -32,18 +35,33 @@ def load_json(file_name): if drs_service_info: CONFIG["DRS_SERVICE_INFO"] = json.loads(drs_service_info) -CONFIG["INDEX"] = { - "driver": SingleTableSQLAlchemyIndexDriver( - "postgresql+psycopg2://{usr}:{psw}@{pghost}:{pgport}/{db}".format( - usr=usr, - psw=psw, - pghost=pghost, - pgport=pgport, - db=db, +if USE_SINGLE_TABLE is True: + CONFIG["INDEX"] = { + "driver": SingleTableSQLAlchemyIndexDriver( + "postgresql+psycopg2://{usr}:{psw}@{pghost}:{pgport}/{db}".format( + usr=usr, + psw=psw, + pghost=pghost, + pgport=pgport, + db=db, + ), + index_config=index_config, ), - index_config=index_config, - ), -} + } +else: + CONFIG["INDEX"] = { + "driver": SQLAlchemyIndexDriver( + "postgresql+psycopg2://{usr}:{psw}@{pghost}:{pgport}/{db}".format( + usr=usr, + psw=psw, + pghost=pghost, + pgport=pgport, + db=db, + ), + index_config=index_config, + ), + } + CONFIG["ALIAS"] = { "driver": SQLAlchemyAliasDriver( @@ -68,4 +86,4 @@ def load_json(file_name): arborist="http://arborist-service/", ) -settings = {"config": CONFIG, "auth": AUTH} +settings = {"config": CONFIG, "auth": AUTH, "use_single_table": USE_SINGLE_TABLE} diff --git a/indexd/default_settings.py b/indexd/default_settings.py index b9b72c36..75d26271 100644 --- a/indexd/default_settings.py +++ b/indexd/default_settings.py @@ -1,6 +1,8 @@ from .index.drivers.alchemy import SQLAlchemyIndexDriver from .alias.drivers.alchemy import SQLAlchemyAliasDriver from .auth.drivers.alchemy import SQLAlchemyAuthDriver +from .index.drivers.single_table_alchemy import SingleTableSQLAlchemyIndexDriver + CONFIG = {} @@ -9,28 +11,43 @@ # Key to lock the database during migrations CONFIG["DB_MIGRATION_POSTGRES_LOCK_KEY"] = 100 +USE_SINGLE_TABLE = False + # - DEFAULT_PREFIX: prefix to be prepended. # - PREPEND_PREFIX: the prefix is preprended to the generated GUID when a # new record is created WITHOUT a provided GUID. # - ADD_PREFIX_ALIAS: aliases are created for new records - "". # Do NOT set both ADD_PREFIX_ALIAS and PREPEND_PREFIX to True, or aliases # will be created as "". -CONFIG["INDEX"] = { - "driver": SQLAlchemyIndexDriver( - "postgresql://postgres:postgres@localhost:5432/indexd_tests", # pragma: allowlist secret - echo=True, - index_config={ - "DEFAULT_PREFIX": "testprefix:", - "PREPEND_PREFIX": True, - "ADD_PREFIX_ALIAS": False, - }, - ) -} +if USE_SINGLE_TABLE is True: + CONFIG["INDEX"] = { + "driver": SingleTableSQLAlchemyIndexDriver( + "postgresql://postgres:postgres@localhost:5432/indexd_tests", # pragma: allowlist secret + echo=True, + index_config={ + "DEFAULT_PREFIX": "testprefix:", + "PREPEND_PREFIX": True, + "ADD_PREFIX_ALIAS": False, + }, + ) + } +else: + CONFIG["INDEX"] = { + "driver": SQLAlchemyIndexDriver( + "postgresql://postgres:postgres@localhost:5432/indexd_tests", # pragma: allowlist secret + echo=True, + index_config={ + "DEFAULT_PREFIX": "testprefix:", + "PREPEND_PREFIX": True, + "ADD_PREFIX_ALIAS": False, + }, + ) + } CONFIG["ALIAS"] = { "driver": SQLAlchemyAliasDriver( "postgresql://postgres:postgres@localhost:5432/indexd_tests", # pragma: allowlist secret - echo=True, # pragma: allowlist secret + echo=True, ) } @@ -68,6 +85,6 @@ AUTH = SQLAlchemyAuthDriver( "postgresql://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret -) # pragma: allowlist secret +) -settings = {"config": CONFIG, "auth": AUTH} +settings = {"config": CONFIG, "auth": AUTH, "use_single_table": USE_SINGLE_TABLE} diff --git a/indexd/index/drivers/single_table_alchemy.py b/indexd/index/drivers/single_table_alchemy.py index b6e13db8..458d61a0 100644 --- a/indexd/index/drivers/single_table_alchemy.py +++ b/indexd/index/drivers/single_table_alchemy.py @@ -212,6 +212,12 @@ def ids( query = self._negate_filter(session, query, **negate_params) if page is not None: + # order by updated date so newly added stuff is + # at the end (reduce risk that a new records ends up in a page + # earlier on) and allows for some logic to check for newly added records + # (e.g. parallelly processing from beginning -> middle and ending -> middle + # and as a final step, checking the "ending"+1 to see if there are + # new records). query = query.order_by(Record.updated_date) else: query = query.order_by(Record.guid) @@ -380,6 +386,20 @@ def get_urls(self, size=None, hashes=None, ids=None, start=0, limit=100): return return_urls + def _validate_and_format_content_dates( + self, record, content_created_date, content_updated_date + ): + if content_created_date is not None: + record.content_created_date = datetime.datetime.fromisoformat( + content_created_date + ) + # Users cannot set content_updated_date without a content_created_date + record.content_updated_date = ( + datetime.datetime.fromisoformat(content_updated_date) + if content_updated_date is not None + else record.content_created_date # Set updated to created if no updated is provided + ) + def add( self, form, @@ -448,20 +468,14 @@ def add( record.description = description - if content_created_date is not None: - record.content_created_date = datetime.datetime.fromisoformat( - content_created_date - ) - # Users cannot set content_updated_date without a content_created_date - record.content_updated_date = ( - datetime.datetime.fromisoformat(content_updated_date) - if content_updated_date is not None - else record.content_created_date # Set updated to created if no updated is provided - ) - + self._validate_and_format_content_dates( + record=record, + content_created_date=content_created_date, + content_updated_date=content_updated_date, + ) try: - checked_url_metadata = check_url_metadata(url_metadata, record) - record.url_metadata = checked_url_metadata + check_url_metadata(url_metadata, record) + record.url_metadata = url_metadata if self.config.get("ADD_PREFIX_ALIAS"): prefix = self.config["DEFAULT_PREFIX"] record.alias = list(set([prefix + record.guid])) @@ -471,6 +485,8 @@ def add( raise MultipleRecordsFound( 'guid "{guid}" already exists'.format(guid=record.guid) ) + except Exception as e: + print(e) return record.guid, record.rev, record.baseid @@ -568,7 +584,7 @@ def update_blank_record(self, did, rev, size, hashes, urls, authz=None): except AuthError as err: self.logger.error( authz_err_msg.format("update", all_authz) - + " Falling back to 'file_uplaod' on '/data_file'." + + " Falling back to 'file_upload' on '/data_file'." ) record.authz = set(authz) @@ -687,7 +703,7 @@ def replace_aliases_for_did(self, aliases, did): try: query = session.query(Record).filter(Record.guid == did) record = query.one() - # delete this GUID's aliases and add new aliases + # delete this GUID's aliases record.alias = aliases session.commit() self.logger.info( @@ -864,10 +880,8 @@ def update(self, did, rev, changing_fields): record.record_metadata = changing_fields["metadata"] if "urls_metadata" in changing_fields: - checked_url_metadata = check_url_metadata( - changing_fields["urls_metadata"], record - ) - record.url_metadata = checked_url_metadata + check_url_metadata(changing_fields["urls_metadata"], record) + record.url_metadata = changing_fields["urls_metadata"] if changing_fields.get("content_created_date") is not None: record.content_created_date = datetime.datetime.fromisoformat( @@ -988,7 +1002,9 @@ def add_version( record.authz = authz record.hashes = hashes record.record_metadata = metadata - record.url_metadata = check_url_metadata(urls_metadata, record) + + check_url_metadata(urls_metadata, record) + record.url_metadata = urls_metadata try: session.add(record) @@ -996,6 +1012,12 @@ def add_version( except IntegrityError: raise MultipleRecordsFound("{guid} already exists".format(guid=guid)) + self._validate_and_format_content_dates( + record=record, + content_created_date=content_created_date, + content_updated_date=content_updated_date, + ) + return record.guid, record.baseid, record.rev def add_blank_version( @@ -1045,7 +1067,7 @@ def add_blank_version( new_record.guid = guid new_record.baseid = old_record.baseid - new_record.rev = str(uuid.uuid4()) + new_record.rev = str(uuid.uuid4())[:8] new_record.file_name = old_record.file_name new_record.uploader = old_record.uploader @@ -1504,10 +1526,9 @@ def check_url_metadata(url_metadata, record): create url metadata record in database """ urls = {u for u in record.urls} - for url, metadata in url_metadata.items(): - if url not in urls: + for url in url_metadata.items(): + if url[0] not in urls: raise UserError("url {} in url_metadata does not exist".format(url)) - return url_metadata def get_record_if_exists(did, session): @@ -1516,7 +1537,3 @@ def get_record_if_exists(did, session): If no record found, returns None. """ return session.query(Record).filter(Record.guid == did).first() - - -SCHEMA_MIGRATION_FUNCTIONS = [] -CURRENT_SCHEMA_VERSION = len(SCHEMA_MIGRATION_FUNCTIONS) diff --git a/indexd/single_table_settings.py b/indexd/single_table_settings.py deleted file mode 100644 index 326358ef..00000000 --- a/indexd/single_table_settings.py +++ /dev/null @@ -1,69 +0,0 @@ -import os - -from indexd.index.drivers.single_table_alchemy import SingleTableSQLAlchemyIndexDriver -from .alias.drivers.alchemy import SQLAlchemyAliasDriver -from .auth.drivers.alchemy import SQLAlchemyAuthDriver - -# - DEFAULT_PREFIX: prefix to be prepended. -# - PREPEND_PREFIX: the prefix is preprended to the generated GUID when a -# new record is created WITHOUT a provided GUID. -# - ADD_PREFIX_ALIAS: aliases are created for new records - "". -# Do NOT set both ADD_PREFIX_ALIAS and PREPEND_PREFIX to True, or aliases -# will be created as "". - -CONFIG = {} -CONFIG["DIST"] = [ - { - "name": "testStage", - "host": "https://fictitious-commons.io/index/", - "hints": [".*dg\\.4503.*"], - "type": "indexd", - }, -] - -CONFIG["DRS_SERVICE_INFO"] = { - "name": "DRS System", - "type": { - "group": "org.ga4gh", - "artifact": "drs", - "version": "1.0.3", - }, - "version": "1.0.3", - "organization": { - "name": "CTDS", - "url": "https://fictitious-commons.io", - }, -} - -os.environ["PRESIGNED_FENCE_URL"] = "https://fictitious-commons.io/" -os.environ["HOSTNAME"] = "fictitious-commons.io" - -# Set PSQL Port, see https://www.postgresql.org/docs/12/app-psql.html -# PSQL default port is 5432, but in some setups, can be 5433. -psql_port = os.environ["PGPORT"] if os.environ.get("PGPORT") else "5432" - - -CONFIG["INDEX"] = { - "driver": SingleTableSQLAlchemyIndexDriver( - "postgresql://postgres:postgres@localhost:5432/indexd_tests", # pragma: allowlist secret - echo=True, - index_config={ - "DEFAULT_PREFIX": "testprefix:", - "PREPEND_PREFIX": True, - "ADD_PREFIX_ALIAS": False, - }, - ) -} - -CONFIG["ALIAS"] = { - "driver": SQLAlchemyAliasDriver( - "postgresql://postgres:postgres@localhost:5432/indexd_tests", # pragma: allowlist secret - echo=True, # pragma: allowlist secret - ) -} - -AUTH = SQLAlchemyAuthDriver( - "postgresql://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret -) # pragma: allowlist secret - -settings = {"config": CONFIG, "auth": AUTH} diff --git a/migrations/versions/bb3d7586a096_createsingletable.py b/migrations/versions/bb3d7586a096_createsingletable.py index fa186620..b1798aa4 100644 --- a/migrations/versions/bb3d7586a096_createsingletable.py +++ b/migrations/versions/bb3d7586a096_createsingletable.py @@ -17,6 +17,7 @@ depends_on = None +# TODO: We need another migration that clears up old tables def upgrade() -> None: op.create_table( "record", diff --git a/poetry.lock b/poetry.lock index 83e9d674..8603d2e3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -41,74 +41,6 @@ doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphin test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] trio = ["trio (>=0.23)"] -[[package]] -name = "async-timeout" -version = "4.0.3" -description = "Timeout context manager for asyncio programs" -optional = false -python-versions = ">=3.7" -files = [ - {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, - {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, -] - -[[package]] -name = "asyncpg" -version = "0.29.0" -description = "An asyncio PostgreSQL driver" -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "asyncpg-0.29.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72fd0ef9f00aeed37179c62282a3d14262dbbafb74ec0ba16e1b1864d8a12169"}, - {file = "asyncpg-0.29.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:52e8f8f9ff6e21f9b39ca9f8e3e33a5fcdceaf5667a8c5c32bee158e313be385"}, - {file = "asyncpg-0.29.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9e6823a7012be8b68301342ba33b4740e5a166f6bbda0aee32bc01638491a22"}, - {file = "asyncpg-0.29.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:746e80d83ad5d5464cfbf94315eb6744222ab00aa4e522b704322fb182b83610"}, - {file = "asyncpg-0.29.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ff8e8109cd6a46ff852a5e6bab8b0a047d7ea42fcb7ca5ae6eaae97d8eacf397"}, - {file = "asyncpg-0.29.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:97eb024685b1d7e72b1972863de527c11ff87960837919dac6e34754768098eb"}, - {file = "asyncpg-0.29.0-cp310-cp310-win32.whl", hash = "sha256:5bbb7f2cafd8d1fa3e65431833de2642f4b2124be61a449fa064e1a08d27e449"}, - {file = "asyncpg-0.29.0-cp310-cp310-win_amd64.whl", hash = "sha256:76c3ac6530904838a4b650b2880f8e7af938ee049e769ec2fba7cd66469d7772"}, - {file = "asyncpg-0.29.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4900ee08e85af01adb207519bb4e14b1cae8fd21e0ccf80fac6aa60b6da37b4"}, - {file = "asyncpg-0.29.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a65c1dcd820d5aea7c7d82a3fdcb70e096f8f70d1a8bf93eb458e49bfad036ac"}, - {file = "asyncpg-0.29.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b52e46f165585fd6af4863f268566668407c76b2c72d366bb8b522fa66f1870"}, - {file = "asyncpg-0.29.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc600ee8ef3dd38b8d67421359779f8ccec30b463e7aec7ed481c8346decf99f"}, - {file = "asyncpg-0.29.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:039a261af4f38f949095e1e780bae84a25ffe3e370175193174eb08d3cecab23"}, - {file = "asyncpg-0.29.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6feaf2d8f9138d190e5ec4390c1715c3e87b37715cd69b2c3dfca616134efd2b"}, - {file = "asyncpg-0.29.0-cp311-cp311-win32.whl", hash = "sha256:1e186427c88225ef730555f5fdda6c1812daa884064bfe6bc462fd3a71c4b675"}, - {file = "asyncpg-0.29.0-cp311-cp311-win_amd64.whl", hash = "sha256:cfe73ffae35f518cfd6e4e5f5abb2618ceb5ef02a2365ce64f132601000587d3"}, - {file = "asyncpg-0.29.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6011b0dc29886ab424dc042bf9eeb507670a3b40aece3439944006aafe023178"}, - {file = "asyncpg-0.29.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b544ffc66b039d5ec5a7454667f855f7fec08e0dfaf5a5490dfafbb7abbd2cfb"}, - {file = "asyncpg-0.29.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d84156d5fb530b06c493f9e7635aa18f518fa1d1395ef240d211cb563c4e2364"}, - {file = "asyncpg-0.29.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54858bc25b49d1114178d65a88e48ad50cb2b6f3e475caa0f0c092d5f527c106"}, - {file = "asyncpg-0.29.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bde17a1861cf10d5afce80a36fca736a86769ab3579532c03e45f83ba8a09c59"}, - {file = "asyncpg-0.29.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:37a2ec1b9ff88d8773d3eb6d3784dc7e3fee7756a5317b67f923172a4748a175"}, - {file = "asyncpg-0.29.0-cp312-cp312-win32.whl", hash = "sha256:bb1292d9fad43112a85e98ecdc2e051602bce97c199920586be83254d9dafc02"}, - {file = "asyncpg-0.29.0-cp312-cp312-win_amd64.whl", hash = "sha256:2245be8ec5047a605e0b454c894e54bf2ec787ac04b1cb7e0d3c67aa1e32f0fe"}, - {file = "asyncpg-0.29.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0009a300cae37b8c525e5b449233d59cd9868fd35431abc470a3e364d2b85cb9"}, - {file = "asyncpg-0.29.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cad1324dbb33f3ca0cd2074d5114354ed3be2b94d48ddfd88af75ebda7c43cc"}, - {file = "asyncpg-0.29.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:012d01df61e009015944ac7543d6ee30c2dc1eb2f6b10b62a3f598beb6531548"}, - {file = "asyncpg-0.29.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:000c996c53c04770798053e1730d34e30cb645ad95a63265aec82da9093d88e7"}, - {file = "asyncpg-0.29.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e0bfe9c4d3429706cf70d3249089de14d6a01192d617e9093a8e941fea8ee775"}, - {file = "asyncpg-0.29.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:642a36eb41b6313ffa328e8a5c5c2b5bea6ee138546c9c3cf1bffaad8ee36dd9"}, - {file = "asyncpg-0.29.0-cp38-cp38-win32.whl", hash = "sha256:a921372bbd0aa3a5822dd0409da61b4cd50df89ae85150149f8c119f23e8c408"}, - {file = "asyncpg-0.29.0-cp38-cp38-win_amd64.whl", hash = "sha256:103aad2b92d1506700cbf51cd8bb5441e7e72e87a7b3a2ca4e32c840f051a6a3"}, - {file = "asyncpg-0.29.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5340dd515d7e52f4c11ada32171d87c05570479dc01dc66d03ee3e150fb695da"}, - {file = "asyncpg-0.29.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e17b52c6cf83e170d3d865571ba574577ab8e533e7361a2b8ce6157d02c665d3"}, - {file = "asyncpg-0.29.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f100d23f273555f4b19b74a96840aa27b85e99ba4b1f18d4ebff0734e78dc090"}, - {file = "asyncpg-0.29.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48e7c58b516057126b363cec8ca02b804644fd012ef8e6c7e23386b7d5e6ce83"}, - {file = "asyncpg-0.29.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f9ea3f24eb4c49a615573724d88a48bd1b7821c890c2effe04f05382ed9e8810"}, - {file = "asyncpg-0.29.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8d36c7f14a22ec9e928f15f92a48207546ffe68bc412f3be718eedccdf10dc5c"}, - {file = "asyncpg-0.29.0-cp39-cp39-win32.whl", hash = "sha256:797ab8123ebaed304a1fad4d7576d5376c3a006a4100380fb9d517f0b59c1ab2"}, - {file = "asyncpg-0.29.0-cp39-cp39-win_amd64.whl", hash = "sha256:cce08a178858b426ae1aa8409b5cc171def45d4293626e7aa6510696d46decd8"}, - {file = "asyncpg-0.29.0.tar.gz", hash = "sha256:d1c49e1f44fffafd9a55e1a9b101590859d881d639ea2922516f5d9c512d354e"}, -] - -[package.dependencies] -async-timeout = {version = ">=4.0.3", markers = "python_version < \"3.12.0\""} - -[package.extras] -docs = ["Sphinx (>=5.3.0,<5.4.0)", "sphinx-rtd-theme (>=1.2.2)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] -test = ["flake8 (>=6.1,<7.0)", "uvloop (>=0.15.3)"] - [[package]] name = "atomicwrites" version = "1.4.1" @@ -236,106 +168,91 @@ files = [ {file = "cdislogging-1.1.1.tar.gz", hash = "sha256:77e11648244cda3a8094b8ae6081435a2303f259612846c49ef8825c7be141e3"}, ] -[[package]] -name = "cdisutilstest" -version = "0.2.4" -description = "Collection of test data and tools" -optional = false -python-versions = "*" -files = [] -develop = false - -[package.source] -type = "git" -url = "https://github.com/uc-cdis/cdisutils-test" -reference = "1.0.0" -resolved_reference = "bdfdeb05e45407e839fd954ce6d195d847cd8024" - [[package]] name = "certifi" -version = "2024.7.4" +version = "2024.8.30" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, - {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, ] [[package]] name = "cffi" -version = "1.17.0" +version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" files = [ - {file = "cffi-1.17.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f9338cc05451f1942d0d8203ec2c346c830f8e86469903d5126c1f0a13a2bcbb"}, - {file = "cffi-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0ce71725cacc9ebf839630772b07eeec220cbb5f03be1399e0457a1464f8e1a"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c815270206f983309915a6844fe994b2fa47e5d05c4c4cef267c3b30e34dbe42"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6bdcd415ba87846fd317bee0774e412e8792832e7805938987e4ede1d13046d"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a98748ed1a1df4ee1d6f927e151ed6c1a09d5ec21684de879c7ea6aa96f58f2"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a048d4f6630113e54bb4b77e315e1ba32a5a31512c31a273807d0027a7e69ab"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24aa705a5f5bd3a8bcfa4d123f03413de5d86e497435693b638cbffb7d5d8a1b"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:856bf0924d24e7f93b8aee12a3a1095c34085600aa805693fb7f5d1962393206"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4304d4416ff032ed50ad6bb87416d802e67139e31c0bde4628f36a47a3164bfa"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:331ad15c39c9fe9186ceaf87203a9ecf5ae0ba2538c9e898e3a6967e8ad3db6f"}, - {file = "cffi-1.17.0-cp310-cp310-win32.whl", hash = "sha256:669b29a9eca6146465cc574659058ed949748f0809a2582d1f1a324eb91054dc"}, - {file = "cffi-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:48b389b1fd5144603d61d752afd7167dfd205973a43151ae5045b35793232aa2"}, - {file = "cffi-1.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5d97162c196ce54af6700949ddf9409e9833ef1003b4741c2b39ef46f1d9720"}, - {file = "cffi-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ba5c243f4004c750836f81606a9fcb7841f8874ad8f3bf204ff5e56332b72b9"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb9333f58fc3a2296fb1d54576138d4cf5d496a2cc118422bd77835e6ae0b9cb"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:435a22d00ec7d7ea533db494da8581b05977f9c37338c80bc86314bec2619424"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1df34588123fcc88c872f5acb6f74ae59e9d182a2707097f9e28275ec26a12d"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df8bb0010fdd0a743b7542589223a2816bdde4d94bb5ad67884348fa2c1c67e8"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8b5b9712783415695663bd463990e2f00c6750562e6ad1d28e072a611c5f2a6"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ffef8fd58a36fb5f1196919638f73dd3ae0db1a878982b27a9a5a176ede4ba91"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e67d26532bfd8b7f7c05d5a766d6f437b362c1bf203a3a5ce3593a645e870b8"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45f7cd36186db767d803b1473b3c659d57a23b5fa491ad83c6d40f2af58e4dbb"}, - {file = "cffi-1.17.0-cp311-cp311-win32.whl", hash = "sha256:a9015f5b8af1bb6837a3fcb0cdf3b874fe3385ff6274e8b7925d81ccaec3c5c9"}, - {file = "cffi-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:b50aaac7d05c2c26dfd50c3321199f019ba76bb650e346a6ef3616306eed67b0"}, - {file = "cffi-1.17.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aec510255ce690d240f7cb23d7114f6b351c733a74c279a84def763660a2c3bc"}, - {file = "cffi-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2770bb0d5e3cc0e31e7318db06efcbcdb7b31bcb1a70086d3177692a02256f59"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db9a30ec064129d605d0f1aedc93e00894b9334ec74ba9c6bdd08147434b33eb"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a47eef975d2b8b721775a0fa286f50eab535b9d56c70a6e62842134cf7841195"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3e0992f23bbb0be00a921eae5363329253c3b86287db27092461c887b791e5e"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6107e445faf057c118d5050560695e46d272e5301feffda3c41849641222a828"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb862356ee9391dc5a0b3cbc00f416b48c1b9a52d252d898e5b7696a5f9fe150"}, - {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c1c13185b90bbd3f8b5963cd8ce7ad4ff441924c31e23c975cb150e27c2bf67a"}, - {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17c6d6d3260c7f2d94f657e6872591fe8733872a86ed1345bda872cfc8c74885"}, - {file = "cffi-1.17.0-cp312-cp312-win32.whl", hash = "sha256:c3b8bd3133cd50f6b637bb4322822c94c5ce4bf0d724ed5ae70afce62187c492"}, - {file = "cffi-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:dca802c8db0720ce1c49cce1149ff7b06e91ba15fa84b1d59144fef1a1bc7ac2"}, - {file = "cffi-1.17.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6ce01337d23884b21c03869d2f68c5523d43174d4fc405490eb0091057943118"}, - {file = "cffi-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cab2eba3830bf4f6d91e2d6718e0e1c14a2f5ad1af68a89d24ace0c6b17cced7"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14b9cbc8f7ac98a739558eb86fabc283d4d564dafed50216e7f7ee62d0d25377"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b00e7bcd71caa0282cbe3c90966f738e2db91e64092a877c3ff7f19a1628fdcb"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:41f4915e09218744d8bae14759f983e466ab69b178de38066f7579892ff2a555"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4760a68cab57bfaa628938e9c2971137e05ce48e762a9cb53b76c9b569f1204"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:011aff3524d578a9412c8b3cfaa50f2c0bd78e03eb7af7aa5e0df59b158efb2f"}, - {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:a003ac9edc22d99ae1286b0875c460351f4e101f8c9d9d2576e78d7e048f64e0"}, - {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ef9528915df81b8f4c7612b19b8628214c65c9b7f74db2e34a646a0a2a0da2d4"}, - {file = "cffi-1.17.0-cp313-cp313-win32.whl", hash = "sha256:70d2aa9fb00cf52034feac4b913181a6e10356019b18ef89bc7c12a283bf5f5a"}, - {file = "cffi-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:b7b6ea9e36d32582cda3465f54c4b454f62f23cb083ebc7a94e2ca6ef011c3a7"}, - {file = "cffi-1.17.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:964823b2fc77b55355999ade496c54dde161c621cb1f6eac61dc30ed1b63cd4c"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:516a405f174fd3b88829eabfe4bb296ac602d6a0f68e0d64d5ac9456194a5b7e"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dec6b307ce928e8e112a6bb9921a1cb00a0e14979bf28b98e084a4b8a742bd9b"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4094c7b464cf0a858e75cd14b03509e84789abf7b79f8537e6a72152109c76e"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2404f3de742f47cb62d023f0ba7c5a916c9c653d5b368cc966382ae4e57da401"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3aa9d43b02a0c681f0bfbc12d476d47b2b2b6a3f9287f11ee42989a268a1833c"}, - {file = "cffi-1.17.0-cp38-cp38-win32.whl", hash = "sha256:0bb15e7acf8ab35ca8b24b90af52c8b391690ef5c4aec3d31f38f0d37d2cc499"}, - {file = "cffi-1.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:93a7350f6706b31f457c1457d3a3259ff9071a66f312ae64dc024f049055f72c"}, - {file = "cffi-1.17.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1a2ddbac59dc3716bc79f27906c010406155031a1c801410f1bafff17ea304d2"}, - {file = "cffi-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6327b572f5770293fc062a7ec04160e89741e8552bf1c358d1a23eba68166759"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbc183e7bef690c9abe5ea67b7b60fdbca81aa8da43468287dae7b5c046107d4"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bdc0f1f610d067c70aa3737ed06e2726fd9d6f7bfee4a351f4c40b6831f4e82"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6d872186c1617d143969defeadac5a904e6e374183e07977eedef9c07c8953bf"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d46ee4764b88b91f16661a8befc6bfb24806d885e27436fdc292ed7e6f6d058"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f76a90c345796c01d85e6332e81cab6d70de83b829cf1d9762d0a3da59c7932"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0e60821d312f99d3e1569202518dddf10ae547e799d75aef3bca3a2d9e8ee693"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:eb09b82377233b902d4c3fbeeb7ad731cdab579c6c6fda1f763cd779139e47c3"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:24658baf6224d8f280e827f0a50c46ad819ec8ba380a42448e24459daf809cf4"}, - {file = "cffi-1.17.0-cp39-cp39-win32.whl", hash = "sha256:0fdacad9e0d9fc23e519efd5ea24a70348305e8d7d85ecbb1a5fa66dc834e7fb"}, - {file = "cffi-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:7cbc78dc018596315d4e7841c8c3a7ae31cc4d638c9b627f87d52e8abaaf2d29"}, - {file = "cffi-1.17.0.tar.gz", hash = "sha256:f3157624b7558b914cb039fd1af735e5e8049a87c817cc215109ad1c8779df76"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] [package.dependencies] @@ -548,38 +465,38 @@ yaml = ["PyYAML (>=3.10)"] [[package]] name = "cryptography" -version = "43.0.0" +version = "43.0.1" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-43.0.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:64c3f16e2a4fc51c0d06af28441881f98c5d91009b8caaff40cf3548089e9c74"}, - {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3dcdedae5c7710b9f97ac6bba7e1052b95c7083c9d0e9df96e02a1932e777895"}, - {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d9a1eca329405219b605fac09ecfc09ac09e595d6def650a437523fcd08dd22"}, - {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ea9e57f8ea880eeea38ab5abf9fbe39f923544d7884228ec67d666abd60f5a47"}, - {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9a8d6802e0825767476f62aafed40532bd435e8a5f7d23bd8b4f5fd04cc80ecf"}, - {file = "cryptography-43.0.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:cc70b4b581f28d0a254d006f26949245e3657d40d8857066c2ae22a61222ef55"}, - {file = "cryptography-43.0.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:4a997df8c1c2aae1e1e5ac49c2e4f610ad037fc5a3aadc7b64e39dea42249431"}, - {file = "cryptography-43.0.0-cp37-abi3-win32.whl", hash = "sha256:6e2b11c55d260d03a8cf29ac9b5e0608d35f08077d8c087be96287f43af3ccdc"}, - {file = "cryptography-43.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:31e44a986ceccec3d0498e16f3d27b2ee5fdf69ce2ab89b52eaad1d2f33d8778"}, - {file = "cryptography-43.0.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:7b3f5fe74a5ca32d4d0f302ffe6680fcc5c28f8ef0dc0ae8f40c0f3a1b4fca66"}, - {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac1955ce000cb29ab40def14fd1bbfa7af2017cca696ee696925615cafd0dce5"}, - {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:299d3da8e00b7e2b54bb02ef58d73cd5f55fb31f33ebbf33bd00d9aa6807df7e"}, - {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ee0c405832ade84d4de74b9029bedb7b31200600fa524d218fc29bfa371e97f5"}, - {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cb013933d4c127349b3948aa8aaf2f12c0353ad0eccd715ca789c8a0f671646f"}, - {file = "cryptography-43.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:fdcb265de28585de5b859ae13e3846a8e805268a823a12a4da2597f1f5afc9f0"}, - {file = "cryptography-43.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2905ccf93a8a2a416f3ec01b1a7911c3fe4073ef35640e7ee5296754e30b762b"}, - {file = "cryptography-43.0.0-cp39-abi3-win32.whl", hash = "sha256:47ca71115e545954e6c1d207dd13461ab81f4eccfcb1345eac874828b5e3eaaf"}, - {file = "cryptography-43.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:0663585d02f76929792470451a5ba64424acc3cd5227b03921dab0e2f27b1709"}, - {file = "cryptography-43.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2c6d112bf61c5ef44042c253e4859b3cbbb50df2f78fa8fae6747a7814484a70"}, - {file = "cryptography-43.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:844b6d608374e7d08f4f6e6f9f7b951f9256db41421917dfb2d003dde4cd6b66"}, - {file = "cryptography-43.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:51956cf8730665e2bdf8ddb8da0056f699c1a5715648c1b0144670c1ba00b48f"}, - {file = "cryptography-43.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:aae4d918f6b180a8ab8bf6511a419473d107df4dbb4225c7b48c5c9602c38c7f"}, - {file = "cryptography-43.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:232ce02943a579095a339ac4b390fbbe97f5b5d5d107f8a08260ea2768be8cc2"}, - {file = "cryptography-43.0.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5bcb8a5620008a8034d39bce21dc3e23735dfdb6a33a06974739bfa04f853947"}, - {file = "cryptography-43.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:08a24a7070b2b6804c1940ff0f910ff728932a9d0e80e7814234269f9d46d069"}, - {file = "cryptography-43.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e9c5266c432a1e23738d178e51c2c7a5e2ddf790f248be939448c0ba2021f9d1"}, - {file = "cryptography-43.0.0.tar.gz", hash = "sha256:b88075ada2d51aa9f18283532c9f60e72170041bba88d7f37e49cbb10275299e"}, + {file = "cryptography-43.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a"}, + {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042"}, + {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494"}, + {file = "cryptography-43.0.1-cp37-abi3-win32.whl", hash = "sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2"}, + {file = "cryptography-43.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d"}, + {file = "cryptography-43.0.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1"}, + {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa"}, + {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4"}, + {file = "cryptography-43.0.1-cp39-abi3-win32.whl", hash = "sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47"}, + {file = "cryptography-43.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2"}, + {file = "cryptography-43.0.1.tar.gz", hash = "sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d"}, ] [package.dependencies] @@ -592,7 +509,7 @@ nox = ["nox"] pep8test = ["check-sdist", "click", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi", "cryptography-vectors (==43.0.0)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi", "cryptography-vectors (==43.0.1)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] [[package]] @@ -1345,19 +1262,23 @@ tests = ["coverage (>=3.7.1,<6.0.0)", "flake8", "mypy", "pytest (>=4.6)", "pytes [[package]] name = "setuptools" -version = "73.0.1" +version = "74.1.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-73.0.1-py3-none-any.whl", hash = "sha256:b208925fcb9f7af924ed2dc04708ea89791e24bde0d3020b27df0e116088b34e"}, - {file = "setuptools-73.0.1.tar.gz", hash = "sha256:d59a3e788ab7e012ab2c4baed1b376da6366883ee20d7a5fc426816e3d7b1193"}, + {file = "setuptools-74.1.2-py3-none-any.whl", hash = "sha256:5f4c08aa4d3ebcb57a50c33b1b07e94315d7fc7230f7115e47fc99776c8ce308"}, + {file = "setuptools-74.1.2.tar.gz", hash = "sha256:95b40ed940a1c67eb70fc099094bd6e99c6ee7c23aa2306f4d2697ba7916f9c6"}, ] [package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.11.*)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (<0.4)", "pytest-ruff (>=0.2.1)", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.11.*)", "pytest-mypy"] [[package]] name = "six" @@ -1383,55 +1304,55 @@ files = [ [[package]] name = "sqlalchemy" -version = "1.4.53" +version = "1.4.54" description = "Database Abstraction Library" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "SQLAlchemy-1.4.53-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:b61ac5457d91b5629a3dea2b258deb4cdd35ac8f6fa2031d2b9b2fff5b3396da"}, - {file = "SQLAlchemy-1.4.53-cp310-cp310-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a96aa8d425047551676b0e178ddb0683421e78eda879ab55775128b2e612cae"}, - {file = "SQLAlchemy-1.4.53-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e10ac36f0b994235c13388b39598bf27219ec8bdea5be99bdac612b01cbe525"}, - {file = "SQLAlchemy-1.4.53-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:437592b341a3229dd0443c9c803b0bf0a466f8f539014fef6cdb9c06b7edb7f9"}, - {file = "SQLAlchemy-1.4.53-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:784272ceb5eb71421fea9568749bcbe8bd019261a0e2e710a7efa76057af2499"}, - {file = "SQLAlchemy-1.4.53-cp310-cp310-win32.whl", hash = "sha256:122d7b5722df1a24402c6748bbb04687ef981493bb559d0cc0beffe722e0e6ed"}, - {file = "SQLAlchemy-1.4.53-cp310-cp310-win_amd64.whl", hash = "sha256:4604d42b2abccba266d3f5bbe883684b5df93e74054024c70d3fbb5eea45e530"}, - {file = "SQLAlchemy-1.4.53-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fb8e15dfa47f5de11ab073e12aadd6b502cfb7ac4bafd18bd18cfd1c7d13dbbc"}, - {file = "SQLAlchemy-1.4.53-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc8be4df55e8fde3006d9cb1f6b3df2ba26db613855dc4df2c0fcd5ec15cb3b7"}, - {file = "SQLAlchemy-1.4.53-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86b11640251f9a9789fd96cd6e5d176b1c230230c70ad40299bcbcc568451b4c"}, - {file = "SQLAlchemy-1.4.53-cp311-cp311-win32.whl", hash = "sha256:cd534c716f86bdf95b7b984a34ee278c91d1b1d7d183e7e5ff878600b1696046"}, - {file = "SQLAlchemy-1.4.53-cp311-cp311-win_amd64.whl", hash = "sha256:6dd06572872ca13ef5a90306a3e5af787498ddaa17fb00109b1243642646cd69"}, - {file = "SQLAlchemy-1.4.53-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:2774c24c405136c3ef472e2352bdca7330659d481fbf2283f996c0ef9eb90f22"}, - {file = "SQLAlchemy-1.4.53-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68a614765197b3d13a730d631a78c3bb9b3b72ba58ed7ab295d58d517464e315"}, - {file = "SQLAlchemy-1.4.53-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d13d4dfbc6e52363886b47cf02cf68c5d2a37c468626694dc210d7e97d4ad330"}, - {file = "SQLAlchemy-1.4.53-cp312-cp312-win32.whl", hash = "sha256:197065b91456574d70b6459bfa62bc0b52a4960a29ef923c375ec427274a3e05"}, - {file = "SQLAlchemy-1.4.53-cp312-cp312-win_amd64.whl", hash = "sha256:421306c4b936b0271a3ce2dc074928d5ece4a36f9c482daa5770f44ecfc3a883"}, - {file = "SQLAlchemy-1.4.53-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:13fc34b35d8ddb3fbe3f8fcfdf6c2546e676187f0fb20f5774da362ddaf8fa2d"}, - {file = "SQLAlchemy-1.4.53-cp36-cp36m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:626be971ff89541cfd3e70b54be00b57a7f8557204decb6223ce0428fec058f3"}, - {file = "SQLAlchemy-1.4.53-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:991e42fdfec561ebc6a4fae7161a86d129d6069fa14210b96b8dd752afa7059c"}, - {file = "SQLAlchemy-1.4.53-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:95123f3a1e0e8020848fd32ba751db889a01a44e4e4fef7e58c87ddd0b2fca59"}, - {file = "SQLAlchemy-1.4.53-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c58e011e9e6373b3a091d83f20601fb335a3b4bace80bfcb914ac168aad3b70d"}, - {file = "SQLAlchemy-1.4.53-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:670c7769bf5dcae9aff331247b5d82fe635c63731088a46ce68ba2ba519ef36e"}, - {file = "SQLAlchemy-1.4.53-cp37-cp37m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07ba54f09033d387ae9df8d62cbe211ed7304e0bfbece1f8c55e21db9fae5c11"}, - {file = "SQLAlchemy-1.4.53-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a38834b4c183c33daf58544281395aad2e985f0b47cca1e88ea5ada88344e63"}, - {file = "SQLAlchemy-1.4.53-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:616492f5315128a847f293a7c552f3561ac7e996d2aa5dc46bef4fb0d3781f1d"}, - {file = "SQLAlchemy-1.4.53-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0cf8c0af9563892c6632f7343bc393dfce6eeef8e4d10c5fadba9c0390520bd"}, - {file = "SQLAlchemy-1.4.53-cp37-cp37m-win32.whl", hash = "sha256:c05fe05941424c2f3747a8952381b7725e24cba2ca00141380e54789d5b616b6"}, - {file = "SQLAlchemy-1.4.53-cp37-cp37m-win_amd64.whl", hash = "sha256:93e90aa3e3b2f8e8cbae4d5509f8e0cf82972378d323c740a8df1c1e9f484172"}, - {file = "SQLAlchemy-1.4.53-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:9d7368df54d3ed45a18955f6cec38ebe075290594ac0d5c87a8ddaff7e10de27"}, - {file = "SQLAlchemy-1.4.53-cp38-cp38-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89d8ac4158ef68eea8bb0f6dd0583127d9aa8720606964ba8eee20b254f9c83a"}, - {file = "SQLAlchemy-1.4.53-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16bb9fa4d00b4581b14d9f0e2224dc7745b854aa4687738279af0f48f7056c98"}, - {file = "SQLAlchemy-1.4.53-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4fe5168d0249c23f537950b6d75935ff2709365a113e29938a979aec36668ecf"}, - {file = "SQLAlchemy-1.4.53-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b8608d162d3bd29d807aab32c3fb6e2f8e225a43d1c54c917fed38513785380"}, - {file = "SQLAlchemy-1.4.53-cp38-cp38-win32.whl", hash = "sha256:a9d4d132198844bd6828047135ce7b887687c92925049a2468a605fc775c7a1a"}, - {file = "SQLAlchemy-1.4.53-cp38-cp38-win_amd64.whl", hash = "sha256:c15d1f1fcf1f9bec0499ae1d9132b950fcc7730f2d26d10484c8808b4e077816"}, - {file = "SQLAlchemy-1.4.53-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:edf094a20a386ff2ec73de65ef18014b250259cb860edc61741e240ca22d6981"}, - {file = "SQLAlchemy-1.4.53-cp39-cp39-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83a9c3514ff19d9d30d8a8d378b24cd1dfa5528d20891481cb5f196117db6a48"}, - {file = "SQLAlchemy-1.4.53-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eaaeedbceb4dfd688fff2faf25a9a87a391f548811494f7bff7fa701b639abc3"}, - {file = "SQLAlchemy-1.4.53-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d021699b9007deb7aa715629078830c99a5fec2753d9bdd5ff33290d363ef755"}, - {file = "SQLAlchemy-1.4.53-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0465b8a68f8f4de754c1966c45b187ac784ad97bc9747736f913130f0e1adea0"}, - {file = "SQLAlchemy-1.4.53-cp39-cp39-win32.whl", hash = "sha256:5f67b9e9dcac3241781e96575468d55a42332157dee04bdbf781df573dff5f85"}, - {file = "SQLAlchemy-1.4.53-cp39-cp39-win_amd64.whl", hash = "sha256:a8c2f2a0b2c4e3b86eb58c9b6bb98548205eea2fba9dae4edfd29dc6aebbe95a"}, - {file = "SQLAlchemy-1.4.53.tar.gz", hash = "sha256:5e6ab710c4c064755fd92d1a417bef360228a19bdf0eee32b03aa0f5f8e9fe0d"}, + {file = "SQLAlchemy-1.4.54-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:af00236fe21c4d4f4c227b6ccc19b44c594160cc3ff28d104cdce85855369277"}, + {file = "SQLAlchemy-1.4.54-cp310-cp310-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1183599e25fa38a1a322294b949da02b4f0da13dbc2688ef9dbe746df573f8a6"}, + {file = "SQLAlchemy-1.4.54-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1990d5a6a5dc358a0894c8ca02043fb9a5ad9538422001fb2826e91c50f1d539"}, + {file = "SQLAlchemy-1.4.54-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:14b3f4783275339170984cadda66e3ec011cce87b405968dc8d51cf0f9997b0d"}, + {file = "SQLAlchemy-1.4.54-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b24364150738ce488333b3fb48bfa14c189a66de41cd632796fbcacb26b4585"}, + {file = "SQLAlchemy-1.4.54-cp310-cp310-win32.whl", hash = "sha256:a8a72259a1652f192c68377be7011eac3c463e9892ef2948828c7d58e4829988"}, + {file = "SQLAlchemy-1.4.54-cp310-cp310-win_amd64.whl", hash = "sha256:b67589f7955924865344e6eacfdcf70675e64f36800a576aa5e961f0008cde2a"}, + {file = "SQLAlchemy-1.4.54-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b05e0626ec1c391432eabb47a8abd3bf199fb74bfde7cc44a26d2b1b352c2c6e"}, + {file = "SQLAlchemy-1.4.54-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13e91d6892b5fcb94a36ba061fb7a1f03d0185ed9d8a77c84ba389e5bb05e936"}, + {file = "SQLAlchemy-1.4.54-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb59a11689ff3c58e7652260127f9e34f7f45478a2f3ef831ab6db7bcd72108f"}, + {file = "SQLAlchemy-1.4.54-cp311-cp311-win32.whl", hash = "sha256:1390ca2d301a2708fd4425c6d75528d22f26b8f5cbc9faba1ddca136671432bc"}, + {file = "SQLAlchemy-1.4.54-cp311-cp311-win_amd64.whl", hash = "sha256:2b37931eac4b837c45e2522066bda221ac6d80e78922fb77c75eb12e4dbcdee5"}, + {file = "SQLAlchemy-1.4.54-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3f01c2629a7d6b30d8afe0326b8c649b74825a0e1ebdcb01e8ffd1c920deb07d"}, + {file = "SQLAlchemy-1.4.54-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c24dd161c06992ed16c5e528a75878edbaeced5660c3db88c820f1f0d3fe1f4"}, + {file = "SQLAlchemy-1.4.54-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5e0d47d619c739bdc636bbe007da4519fc953393304a5943e0b5aec96c9877c"}, + {file = "SQLAlchemy-1.4.54-cp312-cp312-win32.whl", hash = "sha256:12bc0141b245918b80d9d17eca94663dbd3f5266ac77a0be60750f36102bbb0f"}, + {file = "SQLAlchemy-1.4.54-cp312-cp312-win_amd64.whl", hash = "sha256:f941aaf15f47f316123e1933f9ea91a6efda73a161a6ab6046d1cde37be62c88"}, + {file = "SQLAlchemy-1.4.54-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:a41611835010ed4ea4c7aed1da5b58aac78ee7e70932a91ed2705a7b38e40f52"}, + {file = "SQLAlchemy-1.4.54-cp36-cp36m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e8c1b9ecaf9f2590337d5622189aeb2f0dbc54ba0232fa0856cf390957584a9"}, + {file = "SQLAlchemy-1.4.54-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0de620f978ca273ce027769dc8db7e6ee72631796187adc8471b3c76091b809e"}, + {file = "SQLAlchemy-1.4.54-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c5a2530400a6e7e68fd1552a55515de6a4559122e495f73554a51cedafc11669"}, + {file = "SQLAlchemy-1.4.54-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0cf7076c8578b3de4e43a046cc7a1af8466e1c3f5e64167189fe8958a4f9c02"}, + {file = "SQLAlchemy-1.4.54-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:f1e1b92ee4ee9ffc68624ace218b89ca5ca667607ccee4541a90cc44999b9aea"}, + {file = "SQLAlchemy-1.4.54-cp37-cp37m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41cffc63c7c83dfc30c4cab5b4308ba74440a9633c4509c51a0c52431fb0f8ab"}, + {file = "SQLAlchemy-1.4.54-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5933c45d11cbd9694b1540aa9076816cc7406964c7b16a380fd84d3a5fe3241"}, + {file = "SQLAlchemy-1.4.54-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cafe0ba3a96d0845121433cffa2b9232844a2609fce694fcc02f3f31214ece28"}, + {file = "SQLAlchemy-1.4.54-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a19f816f4702d7b1951d7576026c7124b9bfb64a9543e571774cf517b7a50b29"}, + {file = "SQLAlchemy-1.4.54-cp37-cp37m-win32.whl", hash = "sha256:76c2ba7b5a09863d0a8166fbc753af96d561818c572dbaf697c52095938e7be4"}, + {file = "SQLAlchemy-1.4.54-cp37-cp37m-win_amd64.whl", hash = "sha256:a86b0e4be775902a5496af4fb1b60d8a2a457d78f531458d294360b8637bb014"}, + {file = "SQLAlchemy-1.4.54-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:a49730afb716f3f675755afec109895cab95bc9875db7ffe2e42c1b1c6279482"}, + {file = "SQLAlchemy-1.4.54-cp38-cp38-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26e78444bc77d089e62874dc74df05a5c71f01ac598010a327881a48408d0064"}, + {file = "SQLAlchemy-1.4.54-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02d2ecb9508f16ab9c5af466dfe5a88e26adf2e1a8d1c56eb616396ccae2c186"}, + {file = "SQLAlchemy-1.4.54-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:394b0135900b62dbf63e4809cdc8ac923182af2816d06ea61cd6763943c2cc05"}, + {file = "SQLAlchemy-1.4.54-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed3576675c187e3baa80b02c4c9d0edfab78eff4e89dd9da736b921333a2432"}, + {file = "SQLAlchemy-1.4.54-cp38-cp38-win32.whl", hash = "sha256:fc9ffd9a38e21fad3e8c5a88926d57f94a32546e937e0be46142b2702003eba7"}, + {file = "SQLAlchemy-1.4.54-cp38-cp38-win_amd64.whl", hash = "sha256:a01bc25eb7a5688656c8770f931d5cb4a44c7de1b3cec69b84cc9745d1e4cc10"}, + {file = "SQLAlchemy-1.4.54-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:0b76bbb1cbae618d10679be8966f6d66c94f301cfc15cb49e2f2382563fb6efb"}, + {file = "SQLAlchemy-1.4.54-cp39-cp39-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdb2886c0be2c6c54d0651d5a61c29ef347e8eec81fd83afebbf7b59b80b7393"}, + {file = "SQLAlchemy-1.4.54-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:954816850777ac234a4e32b8c88ac1f7847088a6e90cfb8f0e127a1bf3feddff"}, + {file = "SQLAlchemy-1.4.54-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1d83cd1cc03c22d922ec94d0d5f7b7c96b1332f5e122e81b1a61fb22da77879a"}, + {file = "SQLAlchemy-1.4.54-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1576fba3616f79496e2f067262200dbf4aab1bb727cd7e4e006076686413c80c"}, + {file = "SQLAlchemy-1.4.54-cp39-cp39-win32.whl", hash = "sha256:3112de9e11ff1957148c6de1df2bc5cc1440ee36783412e5eedc6f53638a577d"}, + {file = "SQLAlchemy-1.4.54-cp39-cp39-win_amd64.whl", hash = "sha256:6da60fb24577f989535b8fc8b2ddc4212204aaf02e53c4c7ac94ac364150ed08"}, + {file = "sqlalchemy-1.4.54.tar.gz", hash = "sha256:4470fbed088c35dc20b78a39aaf4ae54fe81790c783b3264872a0224f437c31a"}, ] [package.dependencies] @@ -1592,4 +1513,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<4.0" -content-hash = "dc32e3f22ed435e056c4787dd2e7d48e9defc8e94040ab29f0684df640620bc2" +content-hash = "acd140a2033030d1529c6bd6765df8492ce2e99a18c209b1dac7b79889a384fd" diff --git a/pyproject.toml b/pyproject.toml index 1808df31..50afc1a8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -12,7 +12,6 @@ include = [ [tool.poetry.dependencies] python = ">=3.9,<4.0" alembic = "^1.9.4" -asyncpg = "^0.29.0" authutils = "^6.0.0" cdislogging = "^1.0.0" cdiserrors = "^1.0.0" @@ -30,7 +29,6 @@ PyYAML = ">=5.3,<7" [tool.poetry.dev-dependencies] -cdisutilstest = {git = "https://github.com/uc-cdis/cdisutils-test", rev = "1.0.0"} coveralls = "^3.0.1" mock = "^4.0.2" pytest = "^6.2.4" diff --git a/tests/conftest.py b/tests/conftest.py index f70c0f0f..aec4206a 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -6,6 +6,8 @@ import mock from unittest.mock import patch +from cdislogging import get_logger + # indexd_server and indexd_client is needed as fixtures from gen3authz.client.arborist.client import ArboristClient @@ -18,9 +20,13 @@ from indexd.index.drivers.alchemy import SQLAlchemyIndexDriver from indexd.alias.drivers.alchemy import SQLAlchemyAliasDriver from indexd.auth.drivers.alchemy import SQLAlchemyAuthDriver +from indexd.index.drivers.single_table_alchemy import SingleTableSQLAlchemyIndexDriver + POSTGRES_CONNECTION = "postgresql://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret +logger = get_logger(__name__, log_level="info") + def clear_database(): """ @@ -74,31 +80,51 @@ def combined_default_and_single_table_settings(request): """ # Load the default settings - if request.param == "default_settings": - from indexd import default_settings - from tests import default_test_settings + from indexd import default_settings + from tests import default_test_settings + + importlib.reload(default_settings) + importlib.reload(default_test_settings) - importlib.reload(default_settings) - default_settings.settings = { - **default_settings.settings, - **default_test_settings.settings, + if request.param == "default_settings": + default_settings.settings["use_single_table"] = False + default_settings.settings["config"]["INDEX"] = { + "driver": SQLAlchemyIndexDriver( + "postgresql://postgres:postgres@localhost:5432/indexd_tests", # pragma: allowlist secret + echo=True, + index_config={ + "DEFAULT_PREFIX": "testprefix:", + "PREPEND_PREFIX": True, + "ADD_PREFIX_ALIAS": False, + }, + ) } - yield get_app(default_settings.settings) # Load the single-table settings elif request.param == "single_table_settings": - from indexd import single_table_settings - - importlib.reload(single_table_settings) - single_table_settings.settings = { - **single_table_settings.settings, + default_settings.settings["use_single_table"] = True + default_settings.settings["config"]["INDEX"] = { + "driver": SingleTableSQLAlchemyIndexDriver( + "postgresql://postgres:postgres@localhost:5432/indexd_tests", # pragma: allowlist secret + echo=True, + index_config={ + "DEFAULT_PREFIX": "testprefix:", + "PREPEND_PREFIX": True, + "ADD_PREFIX_ALIAS": False, + }, + ) } - yield get_app(single_table_settings.settings) + + default_settings.settings = { + **default_settings.settings, + **default_test_settings.settings, + } + yield get_app(default_settings.settings) try: clear_database() - except Exception: - pass + except Exception as e: + logger.error(f"Failed to clear database with error {e}") @pytest.fixture(scope="function", autouse=True) @@ -117,18 +143,17 @@ def app(): try: clear_database() except Exception as e: - pass + logger.error(f"Failed to clear database with error {e}") @pytest.fixture def user(app): engine = create_engine(POSTGRES_CONNECTION) driver = SQLAlchemyAuthDriver(POSTGRES_CONNECTION) - try: driver.add("test", "test") except Exception as e: - pass + logger.error(f"Failed to add test users with error {e}") yield { "Authorization": ("Basic " + base64.b64encode(b"test:test").decode("ascii")), @@ -138,7 +163,7 @@ def user(app): try: driver.delete("test") except Exception as e: - pass + logger.error(f"Failed to delete test user with error {e}") engine.dispose() diff --git a/tests/test_driver_alchemy_crud.py b/tests/test_driver_alchemy_crud.py index d2fb63b3..dde5e8a5 100644 --- a/tests/test_driver_alchemy_crud.py +++ b/tests/test_driver_alchemy_crud.py @@ -20,16 +20,13 @@ POSTGRES_CONNECTION = "postgresql://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret -def test_driver_init_does_not_create_records( - combined_default_and_single_table_settings, -): +def test_driver_init_does_not_create_records(): """ Tests for creation of records after driver init. Tests driver init does not have unexpected side-effects. """ engine = create_engine(POSTGRES_CONNECTION) - driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) with engine.connect() as conn: result = conn.execute("SELECT COUNT(*) FROM index_record") @@ -38,16 +35,13 @@ def test_driver_init_does_not_create_records( assert count == 0, "driver created records upon initialization" -def test_driver_init_does_not_create_record_urls( - combined_default_and_single_table_settings, -): +def test_driver_init_does_not_create_record_urls(): """ Tests for creation of urls after driver init. Tests driver init does not have unexpected side-effects. """ engine = create_engine(POSTGRES_CONNECTION) - driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) with engine.connect() as conn: result = conn.execute("SELECT COUNT(*) FROM index_record_url") @@ -56,16 +50,13 @@ def test_driver_init_does_not_create_record_urls( assert count == 0, "driver created records urls upon initilization" -def test_driver_init_does_not_create_record_hashes( - combined_default_and_single_table_settings, -): +def test_driver_init_does_not_create_record_hashes(): """ Tests for creation of hashes after driver init. Tests driver init does not have unexpected side-effects. """ engine = create_engine(POSTGRES_CONNECTION) - driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) with engine.connect() as conn: result = conn.execute("SELECT COUNT(*) FROM index_record_hash") @@ -74,7 +65,7 @@ def test_driver_init_does_not_create_record_hashes( assert count == 0, "driver created records hashes upon initilization" -def test_driver_add_object_record(combined_default_and_single_table_settings): +def test_driver_add_object_record(): """ Tests creation of a record. """ @@ -103,7 +94,7 @@ def test_driver_add_object_record(combined_default_and_single_table_settings): assert record[4] is None, "record size non-null" -def test_driver_add_bundle_record(combined_default_and_single_table_settings): +def test_driver_add_bundle_record(): """ Tests creation of a record. """ @@ -125,7 +116,7 @@ def test_driver_add_bundle_record(combined_default_and_single_table_settings): assert len(result) == 10 -def test_driver_add_container_record(combined_default_and_single_table_settings): +def test_driver_add_container_record(): """ Tests creation of a record. """ @@ -157,7 +148,7 @@ def test_driver_add_container_record(combined_default_and_single_table_settings) assert record[4] == None, "record size non-null" -def test_driver_add_bundles_record(combined_default_and_single_table_settings): +def test_driver_add_bundles_record(): """ Tests creation of a record. """ @@ -187,7 +178,7 @@ def test_driver_add_bundles_record(combined_default_and_single_table_settings): assert record[3], "record updated date not populated" -def test_driver_add_multipart_record(combined_default_and_single_table_settings): +def test_driver_add_multipart_record(): """ Tests creation of a record. """ @@ -219,7 +210,7 @@ def test_driver_add_multipart_record(combined_default_and_single_table_settings) assert record[4] == None, "record size non-null" -def test_driver_add_with_valid_did(combined_default_and_single_table_settings): +def test_driver_add_with_valid_did(): """ Tests creation of a record with given valid did. """ @@ -232,7 +223,7 @@ def test_driver_add_with_valid_did(combined_default_and_single_table_settings): assert s.query(IndexRecord).first().did == did -def test_driver_add_with_duplicate_did(combined_default_and_single_table_settings): +def test_driver_add_with_duplicate_did(): """ Tests creation of a record with duplicate did. """ @@ -246,7 +237,7 @@ def test_driver_add_with_duplicate_did(combined_default_and_single_table_setting driver.add(form, did=did) -def test_driver_add_multiple_records(combined_default_and_single_table_settings): +def test_driver_add_multiple_records(): """ Tests creation of a record. """ @@ -280,7 +271,7 @@ def test_driver_add_multiple_records(combined_default_and_single_table_settings) assert record[4] == None, "record size non-null" -def test_driver_add_with_size(combined_default_and_single_table_settings): +def test_driver_add_with_size(): """ Tests creation of a record with size. """ @@ -311,7 +302,7 @@ def test_driver_add_with_size(combined_default_and_single_table_settings): assert size == new_size, "record size mismatch" -def test_driver_add_with_urls(combined_default_and_single_table_settings): +def test_driver_add_with_urls(): """ Tests creation of a record with urls. """ @@ -352,7 +343,7 @@ def test_driver_add_with_urls(combined_default_and_single_table_settings): assert urls == new_urls, "record urls mismatch" -def test_driver_add_with_filename(combined_default_and_single_table_settings): +def test_driver_add_with_filename(): """ Tests creation of a record with filename. """ @@ -365,7 +356,7 @@ def test_driver_add_with_filename(combined_default_and_single_table_settings): assert s.query(IndexRecord).first().file_name == "abc" -def test_driver_add_with_version(combined_default_and_single_table_settings): +def test_driver_add_with_version(): """ Tests creation of a record with version string. """ @@ -378,7 +369,7 @@ def test_driver_add_with_version(combined_default_and_single_table_settings): assert s.query(IndexRecord).first().version == "ver_123" -def test_driver_add_with_hashes(combined_default_and_single_table_settings): +def test_driver_add_with_hashes(): """ Tests creation of a record with hashes. """ @@ -420,7 +411,7 @@ def test_driver_add_with_hashes(combined_default_and_single_table_settings): assert hashes == new_hashes, "record hashes mismatch" -def test_driver_get_record(combined_default_and_single_table_settings): +def test_driver_get_record(): """ Tests retrieval of a record. """ @@ -472,7 +463,7 @@ def test_driver_get_record(combined_default_and_single_table_settings): ), "updated date does not match" -def test_driver_get_fails_with_no_records(combined_default_and_single_table_settings): +def test_driver_get_fails_with_no_records(): """ Tests retrieval of a record fails if there are no records. """ @@ -482,9 +473,7 @@ def test_driver_get_fails_with_no_records(combined_default_and_single_table_sett driver.get("some_record_that_does_not_exist") -def test_driver_nonstrict_get_without_prefix( - combined_default_and_single_table_settings, -): +def test_driver_nonstrict_get_without_prefix(): """ Tests retrieval of a record when a default prefix is set, but no prefix is supplied by the request. """ @@ -545,7 +534,7 @@ def test_driver_nonstrict_get_without_prefix( ), "updated date does not match" -def test_driver_nonstrict_get_with_prefix(combined_default_and_single_table_settings): +def test_driver_nonstrict_get_with_prefix(): """ Tests retrieval of a record when a default prefix is set and supplied by the request, but records are stored without prefixes. @@ -607,9 +596,7 @@ def test_driver_nonstrict_get_with_prefix(combined_default_and_single_table_sett ), "updated date does not match" -def test_driver_nonstrict_get_with_incorrect_prefix( - combined_default_and_single_table_settings, -): +def test_driver_nonstrict_get_with_incorrect_prefix(): """ Tests retrieval of a record fails if default prefix is set and request uses a different prefix with same uuid """ @@ -646,9 +633,7 @@ def test_driver_nonstrict_get_with_incorrect_prefix( driver.get_with_nonstrict_prefix("wrongprefix/" + did) -def test_driver_nonstrict_get_with_no_default_prefix( - combined_default_and_single_table_settings, -): +def test_driver_nonstrict_get_with_no_default_prefix(): """ Tests retrieval of a record fails as expected if no default prefix is set """ @@ -665,7 +650,7 @@ def test_driver_nonstrict_get_with_no_default_prefix( driver.get_with_nonstrict_prefix("fake_id_without_prefix") -def test_driver_get_latest_version(combined_default_and_single_table_settings): +def test_driver_get_latest_version(): """ Tests retrieval of the lattest record version """ @@ -722,9 +707,7 @@ def test_driver_get_latest_version(combined_default_and_single_table_settings): ), "updated date does not match" -def test_driver_get_latest_version_with_no_record( - combined_default_and_single_table_settings, -): +def test_driver_get_latest_version_with_no_record(): """ Tests retrieval of the lattest record version """ @@ -756,7 +739,7 @@ def test_driver_get_latest_version_with_no_record( driver.get_latest_version("some base version") -def test_driver_get_all_versions(combined_default_and_single_table_settings): +def test_driver_get_all_versions(): """ Tests retrieval of the lattest record version """ @@ -830,9 +813,7 @@ def test_driver_get_all_versions(combined_default_and_single_table_settings): ), "updated date does not match" -def test_driver_get_all_versions_with_no_record( - combined_default_and_single_table_settings, -): +def test_driver_get_all_versions_with_no_record(): """ Tests retrieval of the lattest record version """ @@ -862,7 +843,7 @@ def test_driver_get_all_versions_with_no_record( driver.get_all_versions("some baseid") -def test_driver_get_fails_with_invalid_id(combined_default_and_single_table_settings): +def test_driver_get_fails_with_invalid_id(): """ Tests retrieval of a record fails if the record id is not found. """ @@ -890,7 +871,9 @@ def test_driver_get_fails_with_invalid_id(combined_default_and_single_table_sett driver.get("some_record_that_does_not_exist") -def test_driver_update_record(skip_authz, combined_default_and_single_table_settings): +def test_driver_update_record( + skip_authz, +): _test_driver_update_record() @@ -964,9 +947,7 @@ def _test_driver_update_record(): assert version == new_version, "version does not match" -def test_driver_update_fails_with_no_records( - combined_default_and_single_table_settings, -): +def test_driver_update_fails_with_no_records(): """ Tests updating a record fails if there are no records. """ @@ -978,9 +959,7 @@ def test_driver_update_fails_with_no_records( ) -def test_driver_update_fails_with_invalid_id( - combined_default_and_single_table_settings, -): +def test_driver_update_fails_with_invalid_id(): """ Tests updating a record fails if the record id is not found. """ @@ -1008,9 +987,7 @@ def test_driver_update_fails_with_invalid_id( driver.update("some_record_that_does_not_exist", "some_record_version", rev) -def test_driver_update_fails_with_invalid_rev( - combined_default_and_single_table_settings, -): +def test_driver_update_fails_with_invalid_rev(): """ Tests updating a record fails if the record rev is not invalid. """ @@ -1038,7 +1015,9 @@ def test_driver_update_fails_with_invalid_rev( driver.update(did, baseid, "some_revision") -def test_driver_delete_record(skip_authz, combined_default_and_single_table_settings): +def test_driver_delete_record( + skip_authz, +): _test_driver_delete_record() @@ -1077,9 +1056,7 @@ def _test_driver_delete_record(): assert count == 0, "records remain after deletion" -def test_driver_delete_fails_with_no_records( - combined_default_and_single_table_settings, -): +def test_driver_delete_fails_with_no_records(): """ Tests deletion of a record fails if there are no records. """ @@ -1089,9 +1066,7 @@ def test_driver_delete_fails_with_no_records( driver.delete("some_record_that_does_not_exist", "some_revision") -def test_driver_delete_fails_with_invalid_id( - combined_default_and_single_table_settings, -): +def test_driver_delete_fails_with_invalid_id(): """ Tests deletion of a record fails if the record id is not found. """ @@ -1119,9 +1094,7 @@ def test_driver_delete_fails_with_invalid_id( driver.delete("some_record_that_does_not_exist", rev) -def test_driver_delete_fails_with_invalid_rev( - combined_default_and_single_table_settings, -): +def test_driver_delete_fails_with_invalid_rev(): """ Tests deletion of a record fails if the record rev is not invalid. """ @@ -1149,7 +1122,7 @@ def test_driver_delete_fails_with_invalid_rev( driver.delete(did, "some_revision") -def test_driver_get_bundle(combined_default_and_single_table_settings): +def test_driver_get_bundle(): """ Tests retrieval of a record. """ diff --git a/tests/test_migration.py b/tests/test_migrate_to_single_table.py similarity index 100% rename from tests/test_migration.py rename to tests/test_migrate_to_single_table.py From bf64594375dd2e73534061d39faf1bc3ed1612ac Mon Sep 17 00:00:00 2001 From: BinamB Date: Mon, 9 Sep 2024 12:23:02 -0500 Subject: [PATCH 43/47] Add how-to --- .secrets.baseline | 42 +++++++-- docs/migration_to_single_table_indexd.md | 103 +++++++++++++++++++++++ 2 files changed, 139 insertions(+), 6 deletions(-) create mode 100644 docs/migration_to_single_table_indexd.md diff --git a/.secrets.baseline b/.secrets.baseline index 9b21844b..3686d6e3 100644 --- a/.secrets.baseline +++ b/.secrets.baseline @@ -7,6 +7,9 @@ { "name": "AWSKeyDetector" }, + { + "name": "AzureStorageKeyDetector" + }, { "name": "Base64HighEntropyString", "limit": 4.5 @@ -17,9 +20,15 @@ { "name": "CloudantDetector" }, + { + "name": "DiscordBotTokenDetector" + }, + { + "name": "GitHubTokenDetector" + }, { "name": "HexHighEntropyString", - "limit": 3 + "limit": 3.0 }, { "name": "IbmCloudIamDetector" @@ -37,15 +46,24 @@ { "name": "MailchimpDetector" }, + { + "name": "NpmDetector" + }, { "name": "PrivateKeyDetector" }, + { + "name": "SendGridDetector" + }, { "name": "SlackDetector" }, { "name": "SoftlayerDetector" }, + { + "name": "SquareOAuthDetector" + }, { "name": "StripeDetector" }, @@ -57,10 +75,6 @@ { "path": "detect_secrets.filters.allowlist.is_line_allowlisted" }, - { - "path": "detect_secrets.filters.common.is_baseline_file", - "filename": ".secrets.baseline" - }, { "path": "detect_secrets.filters.common.is_ignored_due_to_verification_policies", "min_level": 2 @@ -215,6 +229,22 @@ "line_number": 125 } ], + "docs/migration_to_single_table_indexd.md": [ + { + "type": "Secret Keyword", + "filename": "docs/migration_to_single_table_indexd.md", + "hashed_secret": "ed8883d3d8dc2e3c7fc5d14cc2e8830dd27e8f96", + "is_verified": false, + "line_number": 26 + }, + { + "type": "Basic Auth Credentials", + "filename": "docs/migration_to_single_table_indexd.md", + "hashed_secret": "afc848c316af1a89d49826c5ae9d00ed769415f3", + "is_verified": false, + "line_number": 57 + } + ], "migrations/versions/15f2e9345ade_create_tables.py": [ { "type": "Hex High Entropy String", @@ -413,5 +443,5 @@ } ] }, - "generated_at": "2024-09-04T20:41:41Z" + "generated_at": "2024-09-09T17:22:44Z" } diff --git a/docs/migration_to_single_table_indexd.md b/docs/migration_to_single_table_indexd.md new file mode 100644 index 00000000..50ba11c0 --- /dev/null +++ b/docs/migration_to_single_table_indexd.md @@ -0,0 +1,103 @@ +# Running Data Migration for Single Table Indexd + +## A. Prepare Database and Configuration +1. **Deploy the version of IndexD** that contains Single Table Indexd. Alembic, used for database migration, should create a new table named `records` in the IndexD database. Note that this is a database migration and NOT a data migration. +2. **Create clone database:** +``` + # create a new database + gen3 psql indexd -c 'create database indexd_new' + # dump old db and restore it on the new one + gen3 db backup indexd | psql -U $indexd_user -h -d indexd_new +``` + +**If you don’t have permissions:** + +a. Run `gen3 db creds indexd` + +b. Using the information from above, run `gen3 psql $g3Farmserver -c "alter user $username createdb;"` + +3. **Update credentials:** After Creating the backup database, update the `Gen3Secrets/creds.json` to include the credentials for the new database. Add a new block named `indexd_new` with the credentials for the new database. Copy configuration from indexd. Run `gen3 kube-setup-secrets` creates the secrets in kube secrets. The new database can be accessed by using `gen3 psql indexd_new` + +4. **Update cloud automation script:** `~/cloud-automation/kube/services/indexd/indexd-deploy.yaml` +``` + volumes: + - name: creds-volume-new + secret: + secretName: "indexd_new" + volumeMounts: + - name: "creds-volume-new" + readOnly: True + mountPath: "var/www/indexd/new_creds.json" + subPath: creds.json +``` +After updating the cloud-auto script, run `gen3 roll indexd` + +## B. Run Database Migration +Added a new migration job in cloud-automation, `indexd-single-table-migration-job.yaml` + +To run: +``` + gen3 job run indexd-single-table-migration-job +``` + +**If a job stops in the middle of migration** for any reason, the job should return the last worked on. You can take the last seen guid and re-run the job with the `START_DID` parameter: + +``` + gen3 job run indexd-single-table-migration-job START_DID +``` + +## C. Swap IndexD to use the clone database: +Go to the indexd settings under `cloud-automation/apis_config/indexd_settings.py` +Change the config `CONFIG[“INDEX”]` + +**From:** +``` + CONFIG["INDEX"] = { + "driver": SQLAlchemyIndexDriver( + "postgresql://postgres:postgres@localhost:5432/indexd_tests", + echo=True, + index_config={ + "DEFAULT_PREFIX": "testprefix:", + "PREPEND_PREFIX": True, + "ADD_PREFIX_ALIAS": False, + }, + ) + } +``` + +**To:** +``` + from indexd.index.drivers.single_table_alchemy import SingleTableSQLAlchemyIndexDriver + + USE_SINGLE_TABLE = False + + if USE_SINGLE_TABLE is True: + CONFIG["INDEX"] = { + "driver": SingleTableSQLAlchemyIndexDriver( + "postgresql://postgres:postgres@localhost:5432/indexd_tests", + echo=True, + index_config={ + "DEFAULT_PREFIX": "testprefix:", + "PREPEND_PREFIX": True, + "ADD_PREFIX_ALIAS": False, + }, + ) + } + else: + CONFIG["INDEX"] = { + "driver": SQLAlchemyIndexDriver( + "postgresql://postgres:postgres@localhost:5432/indexd_tests", + echo=True, + index_config={ + "DEFAULT_PREFIX": "testprefix:", + "PREPEND_PREFIX": True, + "ADD_PREFIX_ALIAS": False, + }, + ) + } +``` + +Import `from indexd.index.drivers.single_table_alchemy import SingleTableSQLAlchemyIndexDriver` and add the driver `SingleTableSQLAlchemyIndexDriver` similar to `SQLAlchemyIndexDriver`. Wrap those around an `if` statement like shown above and add a new configuration `USE_SINGLE_TABLE` to make it easier to swap between the two drivers. + +## D. Swap the current running database with the snapshot: +In `creds.json`, you should have an indexd block and an indexd_new block. Swap them out, `gen3 kube-setup-secrets` and `gen3 roll indexd` From cfe974cb0bd9e71b950658bccc2a1bb33085977e Mon Sep 17 00:00:00 2001 From: BinamB Date: Mon, 9 Sep 2024 15:03:47 -0500 Subject: [PATCH 44/47] fix bin setting file --- bin/indexd_settings.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bin/indexd_settings.py b/bin/indexd_settings.py index cdcfa704..cd677a24 100644 --- a/bin/indexd_settings.py +++ b/bin/indexd_settings.py @@ -25,7 +25,7 @@ def load_json(file_name): CONFIG["JSONIFY_PRETTYPRINT_REGULAR"] = False -DATABASE_FORMAT = "" +USE_SINGLE_TABLE = False dist = environ.get("DIST", None) if dist: @@ -35,7 +35,7 @@ def load_json(file_name): if drs_service_info: CONFIG["DRS_SERVICE_INFO"] = json.loads(drs_service_info) -if DATABASE_FORMAT.lower == "single_table": +if USE_SINGLE_TABLE is True: CONFIG["INDEX"] = { "driver": SingleTableSQLAlchemyIndexDriver( "postgresql+psycopg2://{usr}:{psw}@{pghost}:{pgport}/{db}".format( From 4c0097ec14e34acd1cb975b83029d69c09cdaf04 Mon Sep 17 00:00:00 2001 From: BinamB Date: Mon, 9 Sep 2024 15:07:03 -0500 Subject: [PATCH 45/47] Add to settings --- bin/indexd_settings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bin/indexd_settings.py b/bin/indexd_settings.py index cd677a24..ce0b7476 100644 --- a/bin/indexd_settings.py +++ b/bin/indexd_settings.py @@ -86,4 +86,4 @@ def load_json(file_name): arborist="http://localhost/", ) -settings = {"config": CONFIG, "auth": AUTH} +settings = {"config": CONFIG, "auth": AUTH, "use_single_table": USE_SINGLE_TABLE} From 9dbce6d868174a643b195e88758e747fa38d5bd1 Mon Sep 17 00:00:00 2001 From: BinamB Date: Mon, 9 Sep 2024 16:28:08 -0500 Subject: [PATCH 46/47] fix things --- docs/migration_to_single_table_indexd.md | 6 +++--- indexd/index/drivers/single_table_alchemy.py | 16 ++++++++-------- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/docs/migration_to_single_table_indexd.md b/docs/migration_to_single_table_indexd.md index 50ba11c0..e9f68679 100644 --- a/docs/migration_to_single_table_indexd.md +++ b/docs/migration_to_single_table_indexd.md @@ -33,14 +33,14 @@ b. Using the information from above, run `gen3 psql $g3Farmserver -c "alter user After updating the cloud-auto script, run `gen3 roll indexd` ## B. Run Database Migration -Added a new migration job in cloud-automation, `indexd-single-table-migration-job.yaml` +Run the cloud-automation migration job, `indexd-single-table-migration-job.yaml` To run: ``` gen3 job run indexd-single-table-migration-job ``` -**If a job stops in the middle of migration** for any reason, the job should return the last worked on. You can take the last seen guid and re-run the job with the `START_DID` parameter: +**If a job stops in the middle of migration** for any reason, the job should return the last seen guid; re-run the job with the `START_DID` parameter: ``` gen3 job run indexd-single-table-migration-job START_DID @@ -69,7 +69,7 @@ Change the config `CONFIG[“INDEX”]` ``` from indexd.index.drivers.single_table_alchemy import SingleTableSQLAlchemyIndexDriver - USE_SINGLE_TABLE = False + USE_SINGLE_TABLE = True if USE_SINGLE_TABLE is True: CONFIG["INDEX"] = { diff --git a/indexd/index/drivers/single_table_alchemy.py b/indexd/index/drivers/single_table_alchemy.py index 458d61a0..eabc56be 100644 --- a/indexd/index/drivers/single_table_alchemy.py +++ b/indexd/index/drivers/single_table_alchemy.py @@ -995,14 +995,20 @@ def add_version( record.file_name = file_name record.version = version record.description = description - record.content_created_date = content_created_date - record.content_updated_date = content_updated_date record.urls = urls record.acl = acl record.authz = authz record.hashes = hashes record.record_metadata = metadata + self._validate_and_format_content_dates( + record=record, + content_created_date=content_created_date, + content_updated_date=content_updated_date, + ) + record.content_created_date = content_created_date + record.content_updated_date = content_updated_date + check_url_metadata(urls_metadata, record) record.url_metadata = urls_metadata @@ -1012,12 +1018,6 @@ def add_version( except IntegrityError: raise MultipleRecordsFound("{guid} already exists".format(guid=guid)) - self._validate_and_format_content_dates( - record=record, - content_created_date=content_created_date, - content_updated_date=content_updated_date, - ) - return record.guid, record.baseid, record.rev def add_blank_version( From af642754284ad0f467ee56b8b8d23731e9c8c11e Mon Sep 17 00:00:00 2001 From: Pauline Ribeyre <4224001+paulineribeyre@users.noreply.github.com> Date: Mon, 9 Sep 2024 16:39:47 -0500 Subject: [PATCH 47/47] minor fixes --- docs/migration_to_single_table_indexd.md | 4 ++-- indexd/index/drivers/alchemy.py | 8 +++----- indexd/index/drivers/single_table_alchemy.py | 12 +++++------- tests/ci_commands_script.sh | 2 +- 4 files changed, 11 insertions(+), 15 deletions(-) diff --git a/docs/migration_to_single_table_indexd.md b/docs/migration_to_single_table_indexd.md index e9f68679..b8f0320b 100644 --- a/docs/migration_to_single_table_indexd.md +++ b/docs/migration_to_single_table_indexd.md @@ -1,7 +1,7 @@ # Running Data Migration for Single Table Indexd ## A. Prepare Database and Configuration -1. **Deploy the version of IndexD** that contains Single Table Indexd. Alembic, used for database migration, should create a new table named `records` in the IndexD database. Note that this is a database migration and NOT a data migration. +1. **Deploy the version of IndexD** that contains Single Table Indexd. Alembic, used for database migrations, should create a new table named `records` in the IndexD database. Note that this is a database migration and NOT a data migration. 2. **Create clone database:** ``` # create a new database @@ -16,7 +16,7 @@ a. Run `gen3 db creds indexd` b. Using the information from above, run `gen3 psql $g3Farmserver -c "alter user $username createdb;"` -3. **Update credentials:** After Creating the backup database, update the `Gen3Secrets/creds.json` to include the credentials for the new database. Add a new block named `indexd_new` with the credentials for the new database. Copy configuration from indexd. Run `gen3 kube-setup-secrets` creates the secrets in kube secrets. The new database can be accessed by using `gen3 psql indexd_new` +3. **Update credentials:** After Creating the backup database, update the `Gen3Secrets/creds.json` to include the credentials for the new database. Add a new block named `indexd_new` with the credentials for the new database. Copy configuration from indexd. Run `gen3 kube-setup-secrets` to create the secrets in kube secrets. The new database can be accessed by using `gen3 psql indexd_new` 4. **Update cloud automation script:** `~/cloud-automation/kube/services/indexd/indexd-deploy.yaml` ``` diff --git a/indexd/index/drivers/alchemy.py b/indexd/index/drivers/alchemy.py index 54bd865e..d9f36a46 100644 --- a/indexd/index/drivers/alchemy.py +++ b/indexd/index/drivers/alchemy.py @@ -676,7 +676,7 @@ def get_urls(self, size=None, hashes=None, ids=None, start=0, limit=100): for r in query ] - def _validate_and_format_content_dates( + def _validate_and_set_content_dates( self, record, content_created_date, content_updated_date ): if content_created_date is not None: @@ -769,7 +769,7 @@ def add( record.description = description - self._validate_and_format_content_dates( + self._validate_and_set_content_dates( record=record, content_created_date=content_created_date, content_updated_date=content_updated_date, @@ -1371,8 +1371,6 @@ def add_version( record.file_name = file_name record.version = version record.description = description - record.content_created_date = content_created_date - record.content_updated_date = content_updated_date record.urls = [IndexRecordUrl(did=record.did, url=url) for url in urls] @@ -1393,7 +1391,7 @@ def add_version( for m_key, m_value in metadata.items() ] - self._validate_and_format_content_dates( + self._validate_and_set_content_dates( record=record, content_created_date=content_created_date, content_updated_date=content_updated_date, diff --git a/indexd/index/drivers/single_table_alchemy.py b/indexd/index/drivers/single_table_alchemy.py index eabc56be..a57fb68a 100644 --- a/indexd/index/drivers/single_table_alchemy.py +++ b/indexd/index/drivers/single_table_alchemy.py @@ -386,7 +386,7 @@ def get_urls(self, size=None, hashes=None, ids=None, start=0, limit=100): return return_urls - def _validate_and_format_content_dates( + def _validate_and_set_content_dates( self, record, content_created_date, content_updated_date ): if content_created_date is not None: @@ -468,7 +468,7 @@ def add( record.description = description - self._validate_and_format_content_dates( + self._validate_and_set_content_dates( record=record, content_created_date=content_created_date, content_updated_date=content_updated_date, @@ -1001,13 +1001,11 @@ def add_version( record.hashes = hashes record.record_metadata = metadata - self._validate_and_format_content_dates( + self._validate_and_set_content_dates( record=record, content_created_date=content_created_date, content_updated_date=content_updated_date, ) - record.content_created_date = content_created_date - record.content_updated_date = content_updated_date check_url_metadata(urls_metadata, record) record.url_metadata = urls_metadata @@ -1526,8 +1524,8 @@ def check_url_metadata(url_metadata, record): create url metadata record in database """ urls = {u for u in record.urls} - for url in url_metadata.items(): - if url[0] not in urls: + for url in url_metadata: + if url not in urls: raise UserError("url {} in url_metadata does not exist".format(url)) diff --git a/tests/ci_commands_script.sh b/tests/ci_commands_script.sh index 8fe8c315..2e2c4766 100644 --- a/tests/ci_commands_script.sh +++ b/tests/ci_commands_script.sh @@ -1,3 +1,3 @@ #!/usr/bin/env bash -poetry run pytest -vv --cov=indexd --cov-append --cov-report xml tests +poetry run pytest -vv --cov=indexd --cov=migrations/versions --cov-append --cov-report xml tests