From 7e9e862e0fa3bf89837bd5cb416e1095b3c833de Mon Sep 17 00:00:00 2001 From: Olzhas Arystanov Date: Fri, 20 Dec 2024 18:29:07 +0500 Subject: [PATCH] Format with ruff --- b2sdk/__init__.py | 1 + b2sdk/_internal/account_info/abstract.py | 41 +- b2sdk/_internal/account_info/exception.py | 7 +- b2sdk/_internal/account_info/in_memory.py | 14 +- .../account_info/sqlite_account_info.py | 46 +- .../_internal/account_info/upload_url_pool.py | 1 + b2sdk/_internal/api.py | 39 +- b2sdk/_internal/api_config.py | 3 +- b2sdk/_internal/application_key.py | 8 +- b2sdk/_internal/b2http.py | 14 +- b2sdk/_internal/bucket.py | 38 +- b2sdk/_internal/encryption/setting.py | 21 +- b2sdk/_internal/encryption/types.py | 10 +- b2sdk/_internal/exception.py | 125 +++-- b2sdk/_internal/file_lock.py | 50 +- b2sdk/_internal/file_version.py | 30 +- b2sdk/_internal/filter.py | 6 +- b2sdk/_internal/large_file/part.py | 5 +- b2sdk/_internal/large_file/services.py | 1 - b2sdk/_internal/progress.py | 6 +- b2sdk/_internal/raw_api.py | 183 ++++--- b2sdk/_internal/raw_simulator.py | 311 +++++++----- b2sdk/_internal/replication/monitoring.py | 30 +- b2sdk/_internal/replication/setting.py | 30 +- b2sdk/_internal/replication/setup.py | 56 ++- b2sdk/_internal/requests/__init__.py | 2 +- .../requests/included_source_meta.py | 9 +- b2sdk/_internal/scan/exception.py | 16 +- b2sdk/_internal/scan/folder.py | 56 ++- b2sdk/_internal/scan/path.py | 23 +- b2sdk/_internal/scan/policies.py | 2 +- b2sdk/_internal/scan/report.py | 5 +- b2sdk/_internal/scan/scan.py | 2 + b2sdk/_internal/session.py | 13 +- b2sdk/_internal/stream/chained.py | 10 +- b2sdk/_internal/stream/hashing.py | 2 +- b2sdk/_internal/sync/action.py | 36 +- b2sdk/_internal/sync/policy.py | 17 +- b2sdk/_internal/sync/report.py | 13 +- b2sdk/_internal/sync/sync.py | 27 +- b2sdk/_internal/transfer/emerge/emerger.py | 5 +- b2sdk/_internal/transfer/emerge/exception.py | 1 + b2sdk/_internal/transfer/emerge/executor.py | 34 +- .../emerge/planner/part_definition.py | 18 +- .../transfer/emerge/planner/planner.py | 69 +-- .../transfer/emerge/planner/upload_subpart.py | 9 +- .../transfer/emerge/unbound_write_intent.py | 7 +- .../_internal/transfer/emerge/write_intent.py | 12 +- .../transfer/inbound/download_manager.py | 2 +- .../transfer/inbound/downloaded_file.py | 13 +- .../transfer/inbound/downloader/abstract.py | 19 +- .../transfer/inbound/downloader/parallel.py | 78 ++- .../transfer/inbound/downloader/simple.py | 10 +- .../inbound/downloader/stats_collector.py | 13 +- .../transfer/outbound/copy_manager.py | 27 +- .../transfer/outbound/copy_source.py | 15 +- .../transfer/outbound/outbound_source.py | 2 +- .../transfer/outbound/upload_manager.py | 7 +- .../transfer/outbound/upload_source.py | 58 +-- b2sdk/_internal/types.py | 11 +- b2sdk/_internal/utils/__init__.py | 20 +- b2sdk/_internal/utils/docs.py | 2 +- b2sdk/_internal/utils/escape.py | 4 +- b2sdk/_internal/utils/filesystem.py | 5 +- b2sdk/_internal/utils/range_.py | 1 + b2sdk/_internal/utils/thread_pool.py | 6 +- b2sdk/_internal/utils/typing.py | 2 +- b2sdk/_internal/version_utils.py | 32 +- b2sdk/_v3/__init__.py | 20 +- b2sdk/v0/__init__.py | 7 +- b2sdk/v0/account_info.py | 2 +- b2sdk/v0/exception.py | 9 +- b2sdk/v0/sync.py | 15 +- b2sdk/v1/__init__.py | 29 +- b2sdk/v1/account_info.py | 16 +- b2sdk/v1/api.py | 32 +- b2sdk/v1/b2http.py | 2 +- b2sdk/v1/bucket.py | 25 +- b2sdk/v1/download_dest.py | 49 +- b2sdk/v1/exception.py | 10 +- b2sdk/v1/file_metadata.py | 3 +- b2sdk/v1/file_version.py | 12 +- b2sdk/v1/replication/monitoring.py | 1 - b2sdk/v1/session.py | 4 +- b2sdk/v1/sync/encryption_provider.py | 2 +- b2sdk/v1/sync/file.py | 8 +- b2sdk/v1/sync/file_to_path_translator.py | 7 +- b2sdk/v1/sync/scan_policies.py | 4 +- b2sdk/v1/sync/sync.py | 13 +- b2sdk/v2/__init__.py | 6 +- b2sdk/v2/api.py | 2 +- b2sdk/v2/bucket.py | 18 +- b2sdk/v2/raw_api.py | 4 +- b2sdk/v2/raw_simulator.py | 6 +- b2sdk/v2/session.py | 4 +- b2sdk/v2/version_utils.py | 21 +- b2sdk/version.py | 12 +- doc/source/conf.py | 56 +-- noxfile.py | 55 +- pyproject.toml | 4 + test/integration/base.py | 15 +- test/integration/bucket_cleaner.py | 17 +- test/integration/cleanup_buckets.py | 5 +- test/integration/conftest.py | 38 +- test/integration/test_bucket.py | 38 +- test/integration/test_download.py | 18 +- .../test_file_version_attributes.py | 12 +- test/integration/test_raw_api.py | 128 ++--- test/integration/test_sync.py | 10 +- test/unit/account_info/fixtures.py | 20 +- test/unit/account_info/test_account_info.py | 35 +- .../account_info/test_sqlite_account_info.py | 4 +- test/unit/api/test_api.py | 138 +++-- test/unit/b2http/test_b2http.py | 25 +- test/unit/bucket/test_bucket.py | 450 +++++++++-------- test/unit/conftest.py | 4 +- test/unit/file_version/test_file_version.py | 17 +- test/unit/filter/test_filter.py | 30 +- test/unit/fixtures/folder.py | 5 +- test/unit/fixtures/raw_api.py | 2 +- test/unit/internal/test_emerge_planner.py | 179 ++++--- .../internal/test_unbound_write_intent.py | 4 +- .../transfer/downloader/test_parallel.py | 28 +- test/unit/replication/test_monitoring.py | 245 +++++---- test/unit/scan/test_folder_traversal.py | 471 +++++++++--------- test/unit/scan/test_scan_policies.py | 8 +- test/unit/stream/test_progress.py | 12 +- test/unit/sync/fixtures.py | 8 +- test/unit/sync/test_exception.py | 7 +- test/unit/sync/test_sync.py | 153 +++--- test/unit/sync/test_sync_report.py | 3 +- test/unit/test_cache.py | 2 +- test/unit/test_exception.py | 49 +- test/unit/test_progress.py | 16 +- test/unit/test_raw_simulator.py | 43 +- test/unit/test_session.py | 8 +- test/unit/utils/test_docs.py | 13 +- test/unit/utils/test_escape.py | 23 +- test/unit/utils/test_filesystem.py | 14 +- .../utils/test_incremental_hex_digester.py | 4 +- test/unit/utils/test_range_.py | 15 +- test/unit/utils/test_thread_pool.py | 1 - test/unit/v0/test_bucket.py | 132 +++-- test/unit/v0/test_copy_manager.py | 64 ++- test/unit/v0/test_download_dest.py | 12 +- test/unit/v0/test_file_metadata.py | 2 +- test/unit/v0/test_policy.py | 26 +- test/unit/v0/test_raw_api.py | 61 +-- test/unit/v0/test_session.py | 2 +- test/unit/v0/test_sync.py | 154 +++--- test/unit/v0/test_utils.py | 248 ++------- test/unit/v0/test_version_utils.py | 35 +- test/unit/v1/test_copy_manager.py | 64 ++- test/unit/v1/test_download_dest.py | 12 +- test/unit/v1/test_file_metadata.py | 2 +- test/unit/v1/test_policy.py | 26 +- test/unit/v1/test_raw_api.py | 61 +-- test/unit/v1/test_session.py | 2 +- test/unit/v1/test_sync.py | 154 +++--- test/unit/v1/test_utils.py | 248 ++------- test/unit/v1/test_version_utils.py | 35 +- test/unit/v2/test_bucket.py | 16 +- test/unit/v2/test_raw_api.py | 16 +- test/unit/v2/test_session.py | 9 +- test/unit/v_all/test_api.py | 15 +- test/unit/v_all/test_constants.py | 34 +- test/unit/v_all/test_replication.py | 1 + 167 files changed, 3165 insertions(+), 2832 deletions(-) diff --git a/b2sdk/__init__.py b/b2sdk/__init__.py index 3edaab638..f5258afb5 100644 --- a/b2sdk/__init__.py +++ b/b2sdk/__init__.py @@ -10,5 +10,6 @@ from __future__ import annotations import b2sdk.version # noqa: E402 + __version__ = b2sdk.version.VERSION assert __version__ # PEP-0396 diff --git a/b2sdk/_internal/account_info/abstract.py b/b2sdk/_internal/account_info/abstract.py index e48201844..8095f375e 100644 --- a/b2sdk/_internal/account_info/abstract.py +++ b/b2sdk/_internal/account_info/abstract.py @@ -142,8 +142,9 @@ def is_master_key(self) -> bool: new_style_master_key_suffix = '0000000000' if account_id == application_key_id: return True # old style - if len(application_key_id - ) == (3 + len(account_id) + len(new_style_master_key_suffix)): # 3 for cluster id + if len(application_key_id) == ( + 3 + len(account_id) + len(new_style_master_key_suffix) + ): # 3 for cluster id # new style if application_key_id.endswith(account_id + new_style_master_key_suffix): return True @@ -320,9 +321,17 @@ def set_auth_data( assert self.allowed_is_valid(allowed) self._set_auth_data( - account_id, auth_token, api_url, download_url, recommended_part_size, - absolute_minimum_part_size, application_key, realm, s3_api_url, allowed, - application_key_id + account_id, + auth_token, + api_url, + download_url, + recommended_part_size, + absolute_minimum_part_size, + application_key, + realm, + s3_api_url, + allowed, + application_key_id, ) @classmethod @@ -338,15 +347,27 @@ def allowed_is_valid(cls, allowed): :rtype: bool """ return ( - ('bucketId' in allowed) and ('bucketName' in allowed) and - ((allowed['bucketId'] is not None) or (allowed['bucketName'] is None)) and - ('capabilities' in allowed) and ('namePrefix' in allowed) + ('bucketId' in allowed) + and ('bucketName' in allowed) + and ((allowed['bucketId'] is not None) or (allowed['bucketName'] is None)) + and ('capabilities' in allowed) + and ('namePrefix' in allowed) ) @abstractmethod def _set_auth_data( - self, account_id, auth_token, api_url, download_url, recommended_part_size, - absolute_minimum_part_size, application_key, realm, s3_api_url, allowed, application_key_id + self, + account_id, + auth_token, + api_url, + download_url, + recommended_part_size, + absolute_minimum_part_size, + application_key, + realm, + s3_api_url, + allowed, + application_key_id, ): """ Actually store the auth data. Can assume that 'allowed' is present and valid. diff --git a/b2sdk/_internal/account_info/exception.py b/b2sdk/_internal/account_info/exception.py index bda210502..42b87b444 100644 --- a/b2sdk/_internal/account_info/exception.py +++ b/b2sdk/_internal/account_info/exception.py @@ -18,6 +18,7 @@ class AccountInfoError(B2Error, metaclass=ABCMeta): """ Base class for all account info errors. """ + pass @@ -35,8 +36,10 @@ def __init__(self, file_name): self.file_name = file_name def __str__(self): - return f'Account info file ({self.file_name}) appears corrupted. ' \ - f'Try removing and then re-authorizing the account.' + return ( + f'Account info file ({self.file_name}) appears corrupted. ' + f'Try removing and then re-authorizing the account.' + ) class MissingAccountData(AccountInfoError): diff --git a/b2sdk/_internal/account_info/in_memory.py b/b2sdk/_internal/account_info/in_memory.py index d30e0114c..737a56fac 100644 --- a/b2sdk/_internal/account_info/in_memory.py +++ b/b2sdk/_internal/account_info/in_memory.py @@ -60,8 +60,18 @@ def _clear_in_memory_account_fields(self): self._s3_api_url = None def _set_auth_data( - self, account_id, auth_token, api_url, download_url, recommended_part_size, - absolute_minimum_part_size, application_key, realm, s3_api_url, allowed, application_key_id + self, + account_id, + auth_token, + api_url, + download_url, + recommended_part_size, + absolute_minimum_part_size, + application_key, + realm, + s3_api_url, + allowed, + application_key_id, ): self._account_id = account_id self._application_key_id = application_key_id diff --git a/b2sdk/_internal/account_info/sqlite_account_info.py b/b2sdk/_internal/account_info/sqlite_account_info.py index 8f1d558e4..234d9ddaf 100644 --- a/b2sdk/_internal/account_info/sqlite_account_info.py +++ b/b2sdk/_internal/account_info/sqlite_account_info.py @@ -29,7 +29,9 @@ B2_ACCOUNT_INFO_PROFILE_NAME_REGEXP = re.compile(r'[a-zA-Z0-9_\-]{1,64}') XDG_CONFIG_HOME_ENV_VAR = 'XDG_CONFIG_HOME' -DEFAULT_ABSOLUTE_MINIMUM_PART_SIZE = 5000000 # this value is used ONLY in migrating db, and in v1 wrapper, it is not +DEFAULT_ABSOLUTE_MINIMUM_PART_SIZE = ( + 5000000 # this value is used ONLY in migrating db, and in v1 wrapper, it is not +) # meant to be a default for other applications @@ -162,8 +164,13 @@ def _validate_database(self, last_upgrade_to_run=None): with open(self.filename, 'rb') as f: data = json.loads(f.read().decode('utf-8')) keys = [ - 'account_id', 'application_key', 'account_auth_token', 'api_url', - 'download_url', 'minimum_part_size', 'realm' + 'account_id', + 'application_key', + 'account_auth_token', + 'api_url', + 'download_url', + 'minimum_part_size', + 'realm', ] if all(k in data for k in keys): # remove the json file @@ -184,7 +191,7 @@ def _validate_database(self, last_upgrade_to_run=None): # new column absolute_minimum_part_size = DEFAULT_ABSOLUTE_MINIMUM_PART_SIZE conn.execute( insert_statement, - (*(data[k] for k in keys), DEFAULT_ABSOLUTE_MINIMUM_PART_SIZE) + (*(data[k] for k in keys), DEFAULT_ABSOLUTE_MINIMUM_PART_SIZE), ) # all is happy now return @@ -287,8 +294,9 @@ def _create_tables(self, conn, last_upgrade_to_run): self._ensure_update(3, ['ALTER TABLE account ADD COLUMN s3_api_url TEXT;']) if 4 <= last_upgrade_to_run: self._ensure_update( - 4, [ - """ + 4, + [ + f""" CREATE TABLE tmp_account ( account_id TEXT NOT NULL, @@ -296,14 +304,14 @@ def _create_tables(self, conn, last_upgrade_to_run): account_auth_token TEXT NOT NULL, api_url TEXT NOT NULL, download_url TEXT NOT NULL, - absolute_minimum_part_size INT NOT NULL DEFAULT {}, + absolute_minimum_part_size INT NOT NULL DEFAULT {DEFAULT_ABSOLUTE_MINIMUM_PART_SIZE}, recommended_part_size INT NOT NULL, realm TEXT NOT NULL, allowed TEXT, account_id_or_app_key_id TEXT, s3_api_url TEXT ); - """.format(DEFAULT_ABSOLUTE_MINIMUM_PART_SIZE), + """, """INSERT INTO tmp_account( account_id, application_key, @@ -373,7 +381,7 @@ def _create_tables(self, conn, last_upgrade_to_run): FROM tmp_account; """, 'DROP TABLE tmp_account;', - ] + ], ) def _ensure_update(self, update_number, update_commands: list[str]): @@ -387,7 +395,7 @@ def _ensure_update(self, update_number, update_commands: list[str]): conn.execute('BEGIN') cursor = conn.execute( 'SELECT COUNT(*) AS count FROM update_done WHERE update_number = ?;', - (update_number,) + (update_number,), ) update_count = cursor.fetchone()[0] if update_count == 0: @@ -433,7 +441,8 @@ def _set_auth_data( """ conn.execute( - insert_statement, ( + insert_statement, + ( account_id, application_key_id, application_key, @@ -445,7 +454,7 @@ def _set_auth_data( realm, json.dumps(allowed), s3_api_url, - ) + ), ) def set_auth_data_with_schema_0_for_test( @@ -480,7 +489,8 @@ def set_auth_data_with_schema_0_for_test( """ conn.execute( - insert_statement, ( + insert_statement, + ( account_id, application_key, auth_token, @@ -488,7 +498,7 @@ def set_auth_data_with_schema_0_for_test( download_url, minimum_part_size, realm, - ) + ), ) def get_application_key(self): @@ -576,17 +586,17 @@ def _get_account_info_or_raise(self, column_name): except Exception as e: logger.exception( '_get_account_info_or_raise encountered a problem while trying to retrieve "%s"', - column_name + column_name, ) raise MissingAccountData(str(e)) def refresh_entire_bucket_name_cache(self, name_id_iterable): with self._get_connection() as conn: conn.execute('DELETE FROM bucket;') - for (bucket_name, bucket_id) in name_id_iterable: + for bucket_name, bucket_id in name_id_iterable: conn.execute( 'INSERT INTO bucket (bucket_name, bucket_id) VALUES (?, ?);', - (bucket_name, bucket_id) + (bucket_name, bucket_id), ) def save_bucket(self, bucket): @@ -594,7 +604,7 @@ def save_bucket(self, bucket): conn.execute('DELETE FROM bucket WHERE bucket_id = ?;', (bucket.id_,)) conn.execute( 'INSERT INTO bucket (bucket_id, bucket_name) VALUES (?, ?);', - (bucket.id_, bucket.name) + (bucket.id_, bucket.name), ) def remove_bucket_name(self, bucket_name): diff --git a/b2sdk/_internal/account_info/upload_url_pool.py b/b2sdk/_internal/account_info/upload_url_pool.py index d409d4184..e96b38bbe 100644 --- a/b2sdk/_internal/account_info/upload_url_pool.py +++ b/b2sdk/_internal/account_info/upload_url_pool.py @@ -71,6 +71,7 @@ class UrlPoolAccountInfo(AbstractAccountInfo): Implement part of :py:class:`AbstractAccountInfo` for upload URL pool management with a simple, key-value storage, such as :py:class:`b2sdk.v2.UploadUrlPool`. """ + # staticmethod is necessary here to avoid the first argument binding to the first argument (like ``partial(fun, arg)``) BUCKET_UPLOAD_POOL_CLASS = staticmethod( UploadUrlPool diff --git a/b2sdk/_internal/api.py b/b2sdk/_internal/api.py index cbc1b792e..1551c8cc5 100644 --- a/b2sdk/_internal/api.py +++ b/b2sdk/_internal/api.py @@ -68,7 +68,8 @@ def url_for_api(info, api_name): class Services: - """ Gathers objects that provide high level logic over raw api usage. """ + """Gathers objects that provide high level logic over raw api usage.""" + UPLOAD_MANAGER_CLASS = staticmethod(UploadManager) COPY_MANAGER_CLASS = staticmethod(CopyManager) DOWNLOAD_MANAGER_CLASS = staticmethod(DownloadManager) @@ -133,6 +134,7 @@ class handles several things that simplify the task of uploading The class also keeps a cache of information needed to access the service, such as auth tokens and upload URLs. """ + BUCKET_FACTORY_CLASS = staticmethod(BucketFactory) BUCKET_CLASS = staticmethod(Bucket) SESSION_CLASS = staticmethod(B2Session) @@ -272,8 +274,12 @@ def create_bucket( replication=replication, ) bucket = self.BUCKET_FACTORY_CLASS.from_api_bucket_dict(self, response) - assert name == bucket.name, f'API created a bucket with different name than requested: {name} != {name}' - assert bucket_type == bucket.type_, f'API created a bucket with different type than requested: {bucket_type} != {bucket.type_}' + assert ( + name == bucket.name + ), f'API created a bucket with different name than requested: {name} != {name}' + assert ( + bucket_type == bucket.type_ + ), f'API created a bucket with different type than requested: {bucket_type} != {bucket.type_}' self.cache.save_bucket(bucket) return bucket @@ -389,8 +395,9 @@ def delete_bucket(self, bucket): account_id = self.account_info.get_account_id() self.session.delete_bucket(account_id, bucket.id_) - def list_buckets(self, bucket_name=None, bucket_id=None, *, - use_cache: bool = False) -> Sequence[Bucket]: + def list_buckets( + self, bucket_name=None, bucket_id=None, *, use_cache: bool = False + ) -> Sequence[Bucket]: """ Call ``b2_list_buckets`` and return a list of buckets. @@ -418,13 +425,14 @@ def list_buckets(self, bucket_name=None, bucket_id=None, *, cached_list = self.cache.list_bucket_names_ids() buckets = [ self.BUCKET_CLASS(self, cache_b_id, name=cached_b_name) - for cached_b_name, cache_b_id in cached_list if ( - (bucket_name is None or bucket_name == cached_b_name) and - (bucket_id is None or bucket_id == cache_b_id) + for cached_b_name, cache_b_id in cached_list + if ( + (bucket_name is None or bucket_name == cached_b_name) + and (bucket_id is None or bucket_id == cache_b_id) ) ] if buckets: - logger.debug("Using cached bucket list as it is not empty") + logger.debug('Using cached bucket list as it is not empty') return buckets account_id = self.account_info.get_account_id() @@ -494,8 +502,8 @@ def get_download_url_for_file_name(self, bucket_name, file_name): :param str file_name: a file name """ self.check_bucket_name_restrictions(bucket_name) - return '{}/file/{}/{}'.format( - self.account_info.get_download_url(), bucket_name, b2_url_encode(file_name) + return ( + f'{self.account_info.get_download_url()}/file/{bucket_name}/{b2_url_encode(file_name)}' ) # keys @@ -524,7 +532,7 @@ def create_key( key_name=key_name, valid_duration_seconds=valid_duration_seconds, bucket_id=bucket_id, - name_prefix=name_prefix + name_prefix=name_prefix, ) assert set(response['capabilities']) == set(capabilities) @@ -551,8 +559,9 @@ def delete_key_by_id(self, application_key_id: str) -> ApplicationKey: response = self.session.delete_key(application_key_id=application_key_id) return ApplicationKey.from_api_response(response) - def list_keys(self, start_application_key_id: str | None = None - ) -> Generator[ApplicationKey, None, None]: + def list_keys( + self, start_application_key_id: str | None = None + ) -> Generator[ApplicationKey, None, None]: """ List application keys. Lazily perform requests to B2 cloud and return all keys. @@ -603,7 +612,7 @@ def get_file_info(self, file_id: str) -> FileVersion: def get_file_info_by_name(self, bucket_name: str, file_name: str) -> DownloadVersion: """ - Gets info about a file version. Similar to `get_file_info` but + Gets info about a file version. Similar to `get_file_info` but takes the bucket name and file name instead of file id. :param str bucket_name: The name of the bucket where the file resides. diff --git a/b2sdk/_internal/api_config.py b/b2sdk/_internal/api_config.py index 47781a213..e10dbac20 100644 --- a/b2sdk/_internal/api_config.py +++ b/b2sdk/_internal/api_config.py @@ -17,7 +17,6 @@ class B2HttpApiConfig: - DEFAULT_RAW_API_CLASS = B2RawHTTPApi def __init__( @@ -26,7 +25,7 @@ def __init__( install_clock_skew_hook: bool = True, user_agent_append: str | None = None, _raw_api_class: type[AbstractRawApi] | None = None, - decode_content: bool = False + decode_content: bool = False, ): """ A structure with params to be passed to low level API. diff --git a/b2sdk/_internal/application_key.py b/b2sdk/_internal/application_key.py index 4dc3a8487..c005ee0cd 100644 --- a/b2sdk/_internal/application_key.py +++ b/b2sdk/_internal/application_key.py @@ -60,12 +60,11 @@ def parse_response_dict(cls, response: dict): } return { **mandatory_args, - **{key: value - for key, value in optional_args.items() if value is not None}, + **{key: value for key, value in optional_args.items() if value is not None}, } def has_capabilities(self, capabilities) -> bool: - """ checks whether the key has ALL of the given capabilities """ + """checks whether the key has ALL of the given capabilities""" return len(set(capabilities) - set(self.capabilities)) == 0 def as_dict(self): @@ -84,8 +83,7 @@ def as_dict(self): } return { **mandatory_keys, - **{key: value - for key, value in optional_keys.items() if value is not None}, + **{key: value for key, value in optional_keys.items() if value is not None}, } diff --git a/b2sdk/_internal/b2http.py b/b2sdk/_internal/b2http.py index 448b075cc..4044a1014 100644 --- a/b2sdk/_internal/b2http.py +++ b/b2sdk/_internal/b2http.py @@ -61,7 +61,7 @@ def _print_exception(e, indent=''): """ print(indent + 'EXCEPTION', repr(e)) print(indent + 'CLASS', type(e)) - for (i, a) in enumerate(e.args): + for i, a in enumerate(e.args): print(indent + 'ARG %d: %s' % (i, repr(a))) if isinstance(a, Exception): _print_exception(a, indent + ' ') @@ -144,7 +144,7 @@ def post_request(self, method, url, headers, response): # Convert the server time to a datetime object try: - with setlocale("C"): + with setlocale('C'): # "%Z" always creates naive datetimes, even though the timezone # is specified. https://github.com/python/cpython/issues/76678 # Anyway, thankfully, HTTP/1.1 spec requires the string @@ -314,13 +314,11 @@ def request_content_return_json( response = self.request( method, url, - headers={ - **headers, 'Accept': 'application/json' - }, + headers={**headers, 'Accept': 'application/json'}, data=data, try_count=try_count, params=params, - _timeout=_timeout + _timeout=_timeout, ) # Decode the JSON that came back. If we've gotten this far, @@ -518,7 +516,9 @@ def _translate_errors(cls, fcn, post_params=None): except (TypeError, ValueError) as exc: logger.warning( 'Inconsistent status codes returned by the server %r != %r; parsing exception: %r', - error.get('status'), response.status_code, exc + error.get('status'), + response.status_code, + exc, ) status = response.status_code diff --git a/b2sdk/_internal/bucket.py b/b2sdk/_internal/bucket.py index eb665519c..e6df3495c 100644 --- a/b2sdk/_internal/bucket.py +++ b/b2sdk/_internal/bucket.py @@ -122,8 +122,10 @@ def _add_file_info_item(self, file_info: dict[str, str], name: str, value: str | if value is not None: if name in file_info and file_info[name] != value: logger.warning( - 'Overwriting file info key %s with value %s (previous value %s)', name, value, - file_info[name] + 'Overwriting file info key %s with value %s (previous value %s)', + name, + value, + file_info[name], ) file_info[name] = value @@ -228,7 +230,7 @@ def update( default_retention=default_retention, replication=replication, is_file_lock_enabled=is_file_lock_enabled, - ) + ), ) def cancel_large_file(self, file_id): @@ -348,7 +350,7 @@ def list_file_versions( """ if fetch_count is not None and fetch_count <= 0: # fetch_count equal to 0 means "use API default", which we don't want to support here - raise ValueError("unsupported fetch_count value") + raise ValueError('unsupported fetch_count value') start_file_name = file_name start_file_id = None session = self.api.session @@ -449,7 +451,7 @@ def ls( # +1 to include the starter character. Using posix path to # ensure consistent behaviour on Windows (e.g. case sensitivity). - path = pathlib.PurePosixPath(folder_to_list[:starter_index + 1]) + path = pathlib.PurePosixPath(folder_to_list[: starter_index + 1]) parent_path = str(path.parent) # Path considers dot to be the empty path. # There's no shorter path than that. @@ -495,7 +497,7 @@ def ls( if not filter_matcher.match(file_version.file_name): continue - after_prefix = file_version.file_name[len(prefix):] + after_prefix = file_version.file_name[len(prefix) :] # In case of wildcards, we don't care about folders at all, and it's recursive by default. if '/' not in after_prefix or recursive: # This is not a folder, so we'll print it out and @@ -684,7 +686,7 @@ def upload_local_file( sources = upload_source.get_incremental_sources( existing_file_info, - self.api.session.account_info.get_absolute_minimum_part_size() + self.api.session.account_info.get_absolute_minimum_part_size(), ) if len(sources) > 1 and not large_file_sha1: @@ -1140,7 +1142,7 @@ def _create_file( content_disposition: str | None = None, content_encoding: str | None = None, content_language: str | None = None, - **kwargs + **kwargs, ): validate_b2_file_name(file_name) progress_listener = progress_listener or DoNothingProgressListener() @@ -1168,7 +1170,7 @@ def _create_file( min_part_size=min_part_size, max_part_size=max_part_size, large_file_sha1=large_file_sha1, - **kwargs + **kwargs, ) def concatenate( @@ -1338,11 +1340,7 @@ def get_download_url(self, filename): :param str filename: a file name :rtype: str """ - return "{}/file/{}/{}".format( - self.api.account_info.get_download_url(), - b2_url_encode(self.name), - b2_url_encode(filename), - ) + return f'{self.api.account_info.get_download_url()}/file/{b2_url_encode(self.name)}/{b2_url_encode(filename)}' def hide_file(self, file_name): """ @@ -1366,11 +1364,11 @@ def unhide_file(self, file_name: str, bypass_governance: bool = False) -> FileId raise FileNotPresent(bucket_name=self.name, file_id_or_name=file_name) action = latest_file_version.action - if action == "upload": + if action == 'upload': raise FileNotHidden(file_name) - elif action == "delete": + elif action == 'delete': raise FileDeleted(file_name) - elif action != "hide": + elif action != 'hide': raise UnexpectedFileVersionAction(action) return self.delete_file_version(latest_file_version.id_, file_name, bypass_governance) @@ -1539,8 +1537,9 @@ def get_notification_rules(self) -> list[NotificationRuleResponse]: """ return self.api.session.get_bucket_notification_rules(self.id_) - def set_notification_rules(self, - rules: Iterable[NotificationRule]) -> list[NotificationRuleResponse]: + def set_notification_rules( + self, rules: Iterable[NotificationRule] + ) -> list[NotificationRuleResponse]: """ Set notification rules for this bucket. """ @@ -1551,6 +1550,7 @@ class BucketFactory: """ This is a factory for creating bucket objects from different kind of objects. """ + BUCKET_CLASS = staticmethod(Bucket) @classmethod diff --git a/b2sdk/_internal/encryption/setting.py b/b2sdk/_internal/encryption/setting.py index e072b6cb7..07663d7c4 100644 --- a/b2sdk/_internal/encryption/setting.py +++ b/b2sdk/_internal/encryption/setting.py @@ -28,6 +28,7 @@ class _UnknownKeyId(enum.Enum): """The purpose of this enum is to provide a sentinel that can be used with type annotations.""" + unknown_key_id = 0 @@ -44,6 +45,7 @@ class EncryptionKey: in encrypted file's fileInfo, or UNKNOWN_KEY_ID when that information is missing. The secret may be None, if encryption metadata is read from the server. """ + SECRET_REPR = '******' def __init__(self, secret: bytes | None, key_id: str | None | _UnknownKeyId): @@ -117,7 +119,9 @@ def __init__( def __eq__(self, other): if other is None: raise ValueError('cannot compare a known encryption setting to an unknown one') - return self.mode == other.mode and self.algorithm == other.algorithm and self.key == other.key + return ( + self.mode == other.mode and self.algorithm == other.algorithm and self.key == other.key + ) def serialize_to_json_for_request(self): if self.key and self.key.secret is None: @@ -206,13 +210,12 @@ def add_key_id_to_file_info(self, file_info: dict | None): raise ValueError('Cannot add an unknown key id to file info') if file_info is None: file_info = {} - if file_info.get(SSE_C_KEY_ID_FILE_INFO_KEY_NAME) is not None and file_info[ - SSE_C_KEY_ID_FILE_INFO_KEY_NAME] != self.key.key_id: + if ( + file_info.get(SSE_C_KEY_ID_FILE_INFO_KEY_NAME) is not None + and file_info[SSE_C_KEY_ID_FILE_INFO_KEY_NAME] != self.key.key_id + ): raise ValueError( - 'Ambiguous key id set: "{}" in file_info and "{}" in {}'.format( - file_info[SSE_C_KEY_ID_FILE_INFO_KEY_NAME], self.key.key_id, - self.__class__.__name__ - ) + f'Ambiguous key id set: "{file_info[SSE_C_KEY_ID_FILE_INFO_KEY_NAME]}" in file_info and "{self.key.key_id}" in {self.__class__.__name__}' ) file_info[SSE_C_KEY_ID_FILE_INFO_KEY_NAME] = self.key.key_id return file_info @@ -351,7 +354,9 @@ def from_response_headers(cls, headers): return EncryptionSetting(EncryptionMode.NONE) -SSE_NONE = EncryptionSetting(mode=EncryptionMode.NONE,) +SSE_NONE = EncryptionSetting( + mode=EncryptionMode.NONE, +) """ Commonly used "no encryption" setting """ diff --git a/b2sdk/_internal/encryption/types.py b/b2sdk/_internal/encryption/types.py index d095a22a2..811c3f5e7 100644 --- a/b2sdk/_internal/encryption/types.py +++ b/b2sdk/_internal/encryption/types.py @@ -30,18 +30,16 @@ class EncryptionMode(Enum): """Encryption mode.""" UNKNOWN = None #: unknown encryption mode (sdk doesn't know or used key has no rights to know) - NONE = "none" #: no encryption (plaintext) + NONE = 'none' #: no encryption (plaintext) SSE_B2 = 'SSE-B2' #: server-side encryption with key maintained by B2 SSE_C = 'SSE-C' #: server-side encryption with key provided by the client - #CLIENT = 'CLIENT' #: client-side encryption + # CLIENT = 'CLIENT' #: client-side encryption def can_be_set_as_bucket_default(self): return self in BUCKET_DEFAULT_ENCRYPTION_MODES -ENCRYPTION_MODES_WITH_MANDATORY_ALGORITHM = frozenset( - (EncryptionMode.SSE_B2, EncryptionMode.SSE_C) -) # yapf: off -ENCRYPTION_MODES_WITH_MANDATORY_KEY = frozenset((EncryptionMode.SSE_C,)) # yapf: off +ENCRYPTION_MODES_WITH_MANDATORY_ALGORITHM = frozenset((EncryptionMode.SSE_B2, EncryptionMode.SSE_C)) +ENCRYPTION_MODES_WITH_MANDATORY_KEY = frozenset((EncryptionMode.SSE_C,)) BUCKET_DEFAULT_ENCRYPTION_MODES = frozenset((EncryptionMode.NONE, EncryptionMode.SSE_B2)) diff --git a/b2sdk/_internal/exception.py b/b2sdk/_internal/exception.py index a42b21b5e..16764de1d 100644 --- a/b2sdk/_internal/exception.py +++ b/b2sdk/_internal/exception.py @@ -218,14 +218,7 @@ def __init__(self, dest_path, source_path, dest_prefix, source_prefix): self.source_prefix = source_prefix def __str__(self): - return 'source file is older than destination: {}{} with a time of {} cannot be synced to {}{} with a time of {}, unless a valid newer_file_mode is provided'.format( - self.source_prefix, - self.source_path.relative_path, - self.source_path.mod_time, - self.dest_prefix, - self.dest_path.relative_path, - self.dest_path.mod_time, - ) + return f'source file is older than destination: {self.source_prefix}{self.source_path.relative_path} with a time of {self.source_path.mod_time} cannot be synced to {self.dest_prefix}{self.dest_path.relative_path} with a time of {self.dest_path.mod_time}, unless a valid newer_file_mode is provided' def should_retry_http(self): return True @@ -281,7 +274,7 @@ class FileNameNotAllowed(NotAllowedByAppKeyError): class FileNotPresent(FileOrBucketNotFound): def __str__(self): # overridden to retain message across prev versions - return "File not present%s" % (': ' + self.file_id_or_name if self.file_id_or_name else "") + return 'File not present%s' % (': ' + self.file_id_or_name if self.file_id_or_name else '') class UnusableFileName(B2SimpleError): @@ -291,6 +284,7 @@ class UnusableFileName(B2SimpleError): Could possibly use InvalidUploadSource, but this is intended for the filename on the server, which could differ. https://www.backblaze.com/b2/docs/files.html. """ + pass @@ -309,11 +303,14 @@ def __init__(self, content_length, range_): self.range_ = range_ def __str__(self): - return 'A range of %d-%d was requested (size of %d), but cloud could only serve %d of that' % ( - self.range_[0], - self.range_[1], - self.range_[1] - self.range_[0] + 1, - self.content_length, + return ( + 'A range of %d-%d was requested (size of %d), but cloud could only serve %d of that' + % ( + self.range_[0], + self.range_[1], + self.range_[1] - self.range_[0] + 1, + self.content_length, + ) ) @@ -385,7 +382,7 @@ def __init__(self): super().__init__('') def __str__(self): - return 'Application key is restricted to a bucket that doesn\'t exist' + return "Application key is restricted to a bucket that doesn't exist" class MaxFileSizeExceeded(B2Error): @@ -415,7 +412,7 @@ class MissingPart(B2SimpleError): class NonExistentBucket(FileOrBucketNotFound): def __str__(self): # overridden to retain message across prev versions - return "No such bucket%s" % (': ' + self.bucket_name if self.bucket_name else "") + return 'No such bucket%s' % (': ' + self.bucket_name if self.bucket_name else '') class FileSha1Mismatch(B2SimpleError): @@ -496,7 +493,7 @@ class UnrecognizedBucketType(B2Error): class UnsatisfiableRange(B2Error): def __str__(self): - return "The range in the request is outside the size of the file" + return 'The range in the request is outside the size of the file' class UploadTokenUsedConcurrently(B2Error): @@ -505,23 +502,25 @@ def __init__(self, token): self.token = token def __str__(self): - return f"More than one concurrent upload using auth token {self.token}" + return f'More than one concurrent upload using auth token {self.token}' class AccessDenied(B2Error): def __str__(self): - return "This call with these parameters is not allowed for this auth token" + return 'This call with these parameters is not allowed for this auth token' class SSECKeyError(AccessDenied): def __str__(self): - return "Wrong or no SSE-C key provided when reading a file." + return 'Wrong or no SSE-C key provided when reading a file.' class RetentionWriteError(AccessDenied): def __str__(self): - return "Auth token not authorized to write retention or file already in 'compliance' mode or " \ - "bypassGovernance=true parameter missing" + return ( + "Auth token not authorized to write retention or file already in 'compliance' mode or " + 'bypassGovernance=true parameter missing' + ) class WrongEncryptionModeForBucketDefault(InvalidUserInput): @@ -530,7 +529,7 @@ def __init__(self, encryption_mode): self.encryption_mode = encryption_mode def __str__(self): - return f"{self.encryption_mode} cannot be used as default for a bucket." + return f'{self.encryption_mode} cannot be used as default for a bucket.' class CopyArgumentsMismatch(InvalidUserInput): @@ -539,17 +538,17 @@ class CopyArgumentsMismatch(InvalidUserInput): class DisablingFileLockNotSupported(B2Error): def __str__(self): - return "Disabling file lock is not supported" + return 'Disabling file lock is not supported' class SourceReplicationConflict(B2Error): def __str__(self): - return "Operation not supported for buckets with source replication" + return 'Operation not supported for buckets with source replication' class EnablingFileLockOnRestrictedBucket(B2Error): def __str__(self): - return "Turning on file lock for a restricted bucket is not allowed" + return 'Turning on file lock for a restricted bucket is not allowed' class InvalidJsonResponse(B2SimpleError): @@ -557,7 +556,7 @@ class InvalidJsonResponse(B2SimpleError): def __init__(self, content: bytes): self.content = content - message = self.content[:self.UP_TO_BYTES_COUNT].decode('utf-8', errors='replace') + message = self.content[: self.UP_TO_BYTES_COUNT].decode('utf-8', errors='replace') if len(self.content) > self.UP_TO_BYTES_COUNT: message += '...' @@ -617,23 +616,19 @@ def _event_type_invalid_error(code: str, message: str, **_) -> B2Error: valid_types = sorted(typing.get_args(EVENT_TYPE)) return EventTypeInvalidError( - f"Event Type error: {message!r}. Valid types: {sorted(valid_types)!r}", code + f'Event Type error: {message!r}. Valid types: {sorted(valid_types)!r}', code ) _error_handlers: dict[tuple[int, str | None], typing.Callable] = { - (400, "event_type_categories"): - lambda code, message, **_: EventTypeCategoriesError(message, code), - (400, "event_type_overlap"): - lambda code, message, **_: EventTypeOverlapError(message, code), - (400, "event_types_empty"): - lambda code, message, **_: EventTypesEmptyError(message, code), - (400, "event_type_invalid"): - _event_type_invalid_error, - (401, "email_not_verified"): - lambda code, message, **_: EmailNotVerified(message, code), - (401, "no_payment_history"): - lambda code, message, **_: NoPaymentHistory(message, code), + (400, 'event_type_categories'): lambda code, message, **_: EventTypeCategoriesError( + message, code + ), + (400, 'event_type_overlap'): lambda code, message, **_: EventTypeOverlapError(message, code), + (400, 'event_types_empty'): lambda code, message, **_: EventTypesEmptyError(message, code), + (400, 'event_type_invalid'): _event_type_invalid_error, + (401, 'email_not_verified'): lambda code, message, **_: EmailNotVerified(message, code), + (401, 'no_payment_history'): lambda code, message, **_: NoPaymentHistory(message, code), } @@ -643,7 +638,7 @@ def interpret_b2_error( code: str | None, message: str | None, response_headers: dict[str, Any], - post_params: dict[str, Any] | None = None + post_params: dict[str, Any] | None = None, ) -> B2Error: post_params = post_params or {} @@ -654,18 +649,17 @@ def interpret_b2_error( code=code, message=message, response_headers=response_headers, - post_params=post_params + post_params=post_params, ) if error: return error - if status == 400 and code == "already_hidden": + if status == 400 and code == 'already_hidden': return FileAlreadyHidden(post_params.get('fileName')) elif status == 400 and code == 'bad_json': return BadJson(message) - elif ( - (status == 400 and code in ("no_such_file", "file_not_present")) or - (status == 404 and code == "not_found") + elif (status == 400 and code in ('no_such_file', 'file_not_present')) or ( + status == 404 and code == 'not_found' ): # hide_file returns 400 and "no_such_file" # delete_file_version returns 400 and "file_not_present" @@ -680,22 +674,22 @@ def interpret_b2_error( # We should ideally only reach that case on programming error or outdated # sdk versions, but to prevent user confusion we omit the message param return ResourceNotFound() - elif status == 400 and code == "duplicate_bucket_name": + elif status == 400 and code == 'duplicate_bucket_name': return DuplicateBucketName(post_params.get('bucketName')) - elif status == 400 and code == "missing_part": + elif status == 400 and code == 'missing_part': return MissingPart(post_params.get('fileId')) - elif status == 400 and code == "part_sha1_mismatch": + elif status == 400 and code == 'part_sha1_mismatch': return PartSha1Mismatch(post_params.get('fileId')) - elif status == 400 and code == "bad_bucket_id": + elif status == 400 and code == 'bad_bucket_id': return BucketIdNotFound(post_params.get('bucketId')) - elif status == 400 and code == "auth_token_limit": + elif status == 400 and code == 'auth_token_limit': matcher = UPLOAD_TOKEN_USED_CONCURRENTLY_ERROR_MESSAGE_RE.match(message) - assert matcher is not None, f"unexpected error message: {message}" + assert matcher is not None, f'unexpected error message: {message}' token = matcher.group('token') return UploadTokenUsedConcurrently(token) - elif status == 400 and code == "source_too_large": + elif status == 400 and code == 'source_too_large': matcher = COPY_SOURCE_TOO_BIG_ERROR_MESSAGE_RE.match(message) - assert matcher is not None, f"unexpected error message: {message}" + assert matcher is not None, f'unexpected error message: {message}' size = int(matcher.group('size')) return CopySourceTooBig(message, code, size) elif status == 400 and code == 'file_lock_conflict': @@ -705,13 +699,18 @@ def interpret_b2_error( elif status == 400 and code == 'restricted_bucket_conflict': return EnablingFileLockOnRestrictedBucket() elif status == 400 and code == 'bad_request': - # it's "bad_request" on 2022-09-14, but will become 'disabling_file_lock_not_allowed' # TODO: cleanup after 2022-09-22 - if message == 'fileLockEnabled value of false is not allowed when bucket is already file lock enabled.': + if ( + message + == 'fileLockEnabled value of false is not allowed when bucket is already file lock enabled.' + ): return DisablingFileLockNotSupported() # it's "bad_request" on 2022-09-14, but will become 'source_replication_conflict' # TODO: cleanup after 2022-09-22 - if message == 'Turning on file lock for an existing bucket having source replication configuration is not allowed.': + if ( + message + == 'Turning on file lock for an existing bucket having source replication configuration is not allowed.' + ): return SourceReplicationConflict() # it's "bad_request" on 2022-09-14, but will become 'restricted_bucket_conflict' # TODO: cleanup after 2022-09-22 @@ -721,22 +720,22 @@ def interpret_b2_error( return BadRequest(message, code) elif status == 400: warnings.warn( - f"bad request exception with an unknown `code`. message={message}, code={code}" + f'bad request exception with an unknown `code`. message={message}, code={code}' ) return BadRequest(message, code) - elif status == 401 and code in ("bad_auth_token", "expired_auth_token"): + elif status == 401 and code in ('bad_auth_token', 'expired_auth_token'): return InvalidAuthToken(message, code) elif status == 401: return Unauthorized(message, code) - elif status == 403 and code == "storage_cap_exceeded": + elif status == 403 and code == 'storage_cap_exceeded': return StorageCapExceeded() - elif status == 403 and code == "transaction_cap_exceeded": + elif status == 403 and code == 'transaction_cap_exceeded': return TransactionCapExceeded() - elif status == 403 and code == "access_denied": + elif status == 403 and code == 'access_denied': return AccessDenied() elif status == 409: return Conflict() - elif status == 416 and code == "range_not_satisfiable": + elif status == 416 and code == 'range_not_satisfiable': return UnsatisfiableRange() elif status == 429: return TooManyRequests(retry_after_seconds=response_headers.get('retry-after')) diff --git a/b2sdk/_internal/file_lock.py b/b2sdk/_internal/file_lock.py index 7d09177de..5e95e846d 100644 --- a/b2sdk/_internal/file_lock.py +++ b/b2sdk/_internal/file_lock.py @@ -19,10 +19,11 @@ @enum.unique class RetentionMode(enum.Enum): """Enum class representing retention modes set in files and buckets""" - GOVERNANCE = "governance" #: retention settings for files in this mode can be modified by clients with appropriate application key capabilities - COMPLIANCE = "compliance" #: retention settings for files in this mode can only be modified by extending the retention dates by clients with appropriate application key capabilities + + GOVERNANCE = 'governance' #: retention settings for files in this mode can be modified by clients with appropriate application key capabilities + COMPLIANCE = 'compliance' #: retention settings for files in this mode can only be modified by extending the retention dates by clients with appropriate application key capabilities NONE = None #: retention not set - UNKNOWN = "unknown" #: the client is not authorized to read retention settings + UNKNOWN = 'unknown' #: the client is not authorized to read retention settings RETENTION_MODES_REQUIRING_PERIODS = frozenset({RetentionMode.COMPLIANCE, RetentionMode.GOVERNANCE}) @@ -30,6 +31,7 @@ class RetentionMode(enum.Enum): class RetentionPeriod: """Represent a time period (either in days or in years) that is used as a default for bucket retention""" + KNOWN_UNITS = ['days', 'years'] def __init__(self, years: int | None = None, days: int | None = None): @@ -59,8 +61,8 @@ def from_period_dict(cls, period_dict): def as_dict(self): return { - "duration": self.duration, - "unit": self.unit, + 'duration': self.duration, + 'unit': self.unit, } def __repr__(self): @@ -110,8 +112,8 @@ def from_file_version_dict(cls, file_version_dict: dict) -> FileRetentionSetting if 'fileRetention' not in file_version_dict: if file_version_dict['action'] not in ACTIONS_WITHOUT_LOCK_SETTINGS: raise UnexpectedCloudBehaviour( - 'No fileRetention provided for file version with action=%s' % - (file_version_dict['action']) + 'No fileRetention provided for file version with action=%s' + % (file_version_dict['action']) ) return NO_RETENTION_FILE_SETTING file_retention_dict = file_version_dict['fileRetention'] @@ -125,7 +127,6 @@ def from_file_version_dict(cls, file_version_dict: dict) -> FileRetentionSetting def from_file_retention_value_dict( cls, file_retention_value_dict: dict ) -> FileRetentionSetting: - mode = file_retention_value_dict['mode'] if mode is None: return NO_RETENTION_FILE_SETTING @@ -150,7 +151,8 @@ def from_response_headers(cls, headers) -> FileRetentionSetting: retain_until = None return cls(RetentionMode(headers[retention_mode_header]), retain_until) if 'X-Bz-Client-Unauthorized-To-Read' in headers and retention_mode_header in headers[ - 'X-Bz-Client-Unauthorized-To-Read'].split(','): + 'X-Bz-Client-Unauthorized-To-Read' + ].split(','): return UNKNOWN_FILE_RETENTION_SETTING return NO_RETENTION_FILE_SETTING # the bucket is not file-lock-enabled or the file is has no retention set @@ -161,8 +163,8 @@ def serialize_to_json_for_request(self): def as_dict(self): return { - "mode": self.mode.value, - "retainUntilTimestamp": self.retain_until, + 'mode': self.mode.value, + 'retainUntilTimestamp': self.retain_until, } def add_to_to_upload_headers(self, headers): @@ -208,8 +210,8 @@ def from_file_version_dict(cls, file_version_dict: dict) -> LegalHold: if 'legalHold' not in file_version_dict: if file_version_dict['action'] not in ACTIONS_WITHOUT_LOCK_SETTINGS: raise UnexpectedCloudBehaviour( - 'legalHold not provided for file version with action=%s' % - (file_version_dict['action']) + 'legalHold not provided for file version with action=%s' + % (file_version_dict['action']) ) return cls.UNSET if not file_version_dict['legalHold']['isClientAuthorizedToRead']: @@ -230,9 +232,12 @@ def from_response_headers(cls, headers) -> LegalHold: if legal_hold_header in headers: return cls(headers['X-Bz-File-Legal-Hold']) if 'X-Bz-Client-Unauthorized-To-Read' in headers and legal_hold_header in headers[ - 'X-Bz-Client-Unauthorized-To-Read'].split(','): + 'X-Bz-Client-Unauthorized-To-Read' + ].split(','): return cls.UNKNOWN - return cls.UNSET # the bucket is not file-lock-enabled or the header is missing for any other reason + return ( + cls.UNSET + ) # the bucket is not file-lock-enabled or the header is missing for any other reason def to_server(self) -> str: if self.is_unknown(): @@ -247,7 +252,7 @@ def add_to_upload_headers(self, headers): class BucketRetentionSetting: """Represent bucket's default file retention settings, i.e. whether the files should be retained, in which mode - and for how long""" + and for how long""" def __init__(self, mode: RetentionMode, period: RetentionPeriod | None = None): if mode in RETENTION_MODES_REQUIRING_PERIODS and period is None: @@ -347,17 +352,18 @@ def from_bucket_dict(cls, bucket_dict): def as_dict(self): return { - "defaultRetention": self.default_retention.as_dict(), - "isFileLockEnabled": self.is_file_lock_enabled, + 'defaultRetention': self.default_retention.as_dict(), + 'isFileLockEnabled': self.is_file_lock_enabled, } def __eq__(self, other): - return self.default_retention == other.default_retention and self.is_file_lock_enabled == other.is_file_lock_enabled + return ( + self.default_retention == other.default_retention + and self.is_file_lock_enabled == other.is_file_lock_enabled + ) def __repr__(self): - return '{}({}, {})'.format( - self.__class__.__name__, repr(self.default_retention), repr(self.is_file_lock_enabled) - ) + return f'{self.__class__.__name__}({repr(self.default_retention)}, {repr(self.is_file_lock_enabled)})' UNKNOWN_BUCKET_RETENTION = BucketRetentionSetting(RetentionMode.UNKNOWN) diff --git a/b2sdk/_internal/file_version.py b/b2sdk/_internal/file_version.py index ccc08ffb5..85e088635 100644 --- a/b2sdk/_internal/file_version.py +++ b/b2sdk/_internal/file_version.py @@ -36,6 +36,7 @@ class BaseFileVersion: :ivar size - size of the whole file (for "upload" markers) """ + __slots__ = [ 'id_', 'api', @@ -96,7 +97,7 @@ def __init__( @classmethod def _decode_content_sha1(cls, content_sha1): if content_sha1.startswith(UNVERIFIED_CHECKSUM_PREFIX): - return content_sha1[len(UNVERIFIED_CHECKSUM_PREFIX):], False + return content_sha1[len(UNVERIFIED_CHECKSUM_PREFIX) :], False return content_sha1, True @classmethod @@ -120,17 +121,19 @@ def _get_args_for_clone(self): 'file_name': self.file_name, 'size': self.size, 'content_type': self.content_type, - 'content_sha1': self._encode_content_sha1(self.content_sha1, self.content_sha1_verified), + 'content_sha1': self._encode_content_sha1( + self.content_sha1, self.content_sha1_verified + ), 'file_info': self.file_info, 'upload_timestamp': self.upload_timestamp, 'server_side_encryption': self.server_side_encryption, 'file_retention': self.file_retention, 'legal_hold': self.legal_hold, 'replication_status': self.replication_status, - } # yapf: disable + } def as_dict(self): - """ represents the object as a dict which looks almost exactly like the raw api output for upload/list """ + """represents the object as a dict which looks almost exactly like the raw api output for upload/list""" result = { 'fileId': self.id_, 'fileName': self.file_name, @@ -164,7 +167,7 @@ def __eq__(self, other): def __repr__(self): return '{}({})'.format( self.__class__.__name__, - ', '.join(repr(getattr(self, attr)) for attr in self._all_slots()) + ', '.join(repr(getattr(self, attr)) for attr in self._all_slots()), ) def _all_slots(self): @@ -207,7 +210,7 @@ def get_content_sha1(self) -> Sha1HexDigest | None: Get the file's content SHA1 hex digest from the header or, if its absent, from the file info. If both are missing, return None. """ - if self.content_sha1 and self.content_sha1 != "none": + if self.content_sha1 and self.content_sha1 != 'none': return self.content_sha1 elif LARGE_FILE_SHA1 in self.file_info: return Sha1HexDigest(self.file_info[LARGE_FILE_SHA1]) @@ -398,6 +401,7 @@ class DownloadVersion(BaseFileVersion): """ A structure which represents metadata of an initialized download """ + __slots__ = [ 'range_', 'content_disposition', @@ -536,8 +540,9 @@ def from_api_response(self, file_version_dict, force_action=None): into a :py:class:`b2sdk.v2.FileVersion` object. """ - assert file_version_dict.get('action') is None or force_action is None, \ - 'action was provided by both info_dict and function argument' + assert ( + file_version_dict.get('action') is None or force_action is None + ), 'action was provided by both info_dict and function argument' action = file_version_dict.get('action') or force_action file_name = file_version_dict['fileName'] id_ = file_version_dict['fileId'] @@ -557,8 +562,9 @@ def from_api_response(self, file_version_dict, force_action=None): legal_hold = LegalHold.from_file_version_dict(file_version_dict) replication_status_value = file_version_dict.get('replicationStatus') - replication_status = replication_status_value and ReplicationStatus[ - replication_status_value.upper()] + replication_status = ( + replication_status_value and ReplicationStatus[replication_status_value.upper()] + ) return self.FILE_VERSION_CLASS( self.api, @@ -654,11 +660,11 @@ def from_cancel_or_delete_response(cls, response): return cls(response['fileId'], response['fileName']) def as_dict(self): - """ represents the object as a dict which looks almost exactly like the raw api output for delete_file_version """ + """represents the object as a dict which looks almost exactly like the raw api output for delete_file_version""" return {'action': 'delete', 'fileId': self.file_id, 'fileName': self.file_name} def __eq__(self, other): - return (self.file_id == other.file_id and self.file_name == other.file_name) + return self.file_id == other.file_id and self.file_name == other.file_name def __repr__(self): return f'{self.__class__.__name__}({repr(self.file_id)}, {repr(self.file_name)})' diff --git a/b2sdk/_internal/filter.py b/b2sdk/_internal/filter.py index 0fc5a2eaf..c8c2a0a7b 100644 --- a/b2sdk/_internal/filter.py +++ b/b2sdk/_internal/filter.py @@ -16,8 +16,8 @@ class FilterType(Enum): - INCLUDE = "include" - EXCLUDE = "exclude" + INCLUDE = 'include' + EXCLUDE = 'exclude' @dataclass @@ -51,7 +51,7 @@ class FilterMatcher: def __init__(self, filters: Sequence[Filter]): if filters and all(filter_.type == FilterType.INCLUDE for filter_ in filters): - filters = [Filter(type=FilterType.EXCLUDE, pattern="*"), *filters] + filters = [Filter(type=FilterType.EXCLUDE, pattern='*'), *filters] self.filters = filters diff --git a/b2sdk/_internal/large_file/part.py b/b2sdk/_internal/large_file/part.py index c18beb927..ea4629dfa 100644 --- a/b2sdk/_internal/large_file/part.py +++ b/b2sdk/_internal/large_file/part.py @@ -38,10 +38,7 @@ def __init__(self, file_id, part_number, content_length, content_sha1): self.content_sha1 = content_sha1 def __repr__(self): - return '<{} {} {} {} {}>'.format( - self.__class__.__name__, self.file_id, self.part_number, self.content_length, - self.content_sha1 - ) + return f'<{self.__class__.__name__} {self.file_id} {self.part_number} {self.content_length} {self.content_sha1}>' def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ diff --git a/b2sdk/_internal/large_file/services.py b/b2sdk/_internal/large_file/services.py index a7ff0666a..6dd5dd35d 100644 --- a/b2sdk/_internal/large_file/services.py +++ b/b2sdk/_internal/large_file/services.py @@ -17,7 +17,6 @@ class LargeFileServices: - UNFINISHED_LARGE_FILE_CLASS = staticmethod(UnfinishedLargeFile) def __init__(self, services): diff --git a/b2sdk/_internal/progress.py b/b2sdk/_internal/progress.py index 43ed4fbba..1bf195564 100644 --- a/b2sdk/_internal/progress.py +++ b/b2sdk/_internal/progress.py @@ -84,8 +84,10 @@ def close(self) -> None: Must be called when you're done with the listener. In well-structured code, should be called only once. """ - #import traceback, sys; traceback.print_stack(file=sys.stdout) - assert self._closed is False, 'progress listener was closed twice! uncomment the line above to debug this' + # import traceback, sys; traceback.print_stack(file=sys.stdout) + assert ( + self._closed is False + ), 'progress listener was closed twice! uncomment the line above to debug this' self._closed = True def __enter__(self): diff --git a/b2sdk/_internal/raw_api.py b/b2sdk/_internal/raw_api.py index 55eb19c35..12bb36de8 100644 --- a/b2sdk/_internal/raw_api.py +++ b/b2sdk/_internal/raw_api.py @@ -86,7 +86,8 @@ @unique class MetadataDirectiveMode(Enum): - """ Mode of handling metadata when copying a file """ + """Mode of handling metadata when copying a file""" + COPY = 401 #: copy metadata from the source file REPLACE = 402 #: ignore the source file metadata and set it to provided values @@ -100,6 +101,7 @@ class LifecycleRule(TypedDict): .. _B2 Cloud Storage Lifecycle Rules: https://www.backblaze.com/docs/cloud-storage-lifecycle-rules """ + fileNamePrefix: str daysFromHidingToDeleting: NotRequired[PositiveInt | None] daysFromUploadingToHiding: NotRequired[PositiveInt | None] @@ -117,6 +119,7 @@ class NotificationTargetConfiguration(TypedDict): `hmacSha256SigningSecret`, if present, has to be a string of 32 alphanumeric characters. """ + # TODO: add URL to the documentation targetType: Literal['webhook'] @@ -126,16 +129,26 @@ class NotificationTargetConfiguration(TypedDict): EVENT_TYPE = Literal[ - 'b2:ObjectCreated:*', 'b2:ObjectCreated:Upload', 'b2:ObjectCreated:MultipartUpload', - 'b2:ObjectCreated:Copy', 'b2:ObjectCreated:Replica', 'b2:ObjectCreated:MultipartReplica', - 'b2:ObjectDeleted:*', 'b2:ObjectDeleted:Delete', 'b2:ObjectDeleted:LifecycleRule', - 'b2:HideMarkerCreated:*', 'b2:HideMarkerCreated:Hide', 'b2:HideMarkerCreated:LifecycleRule',] + 'b2:ObjectCreated:*', + 'b2:ObjectCreated:Upload', + 'b2:ObjectCreated:MultipartUpload', + 'b2:ObjectCreated:Copy', + 'b2:ObjectCreated:Replica', + 'b2:ObjectCreated:MultipartReplica', + 'b2:ObjectDeleted:*', + 'b2:ObjectDeleted:Delete', + 'b2:ObjectDeleted:LifecycleRule', + 'b2:HideMarkerCreated:*', + 'b2:HideMarkerCreated:Hide', + 'b2:HideMarkerCreated:LifecycleRule', +] class _NotificationRule(TypedDict): """ Notification Rule. """ + eventTypes: list[EVENT_TYPE] isEnabled: bool name: str @@ -150,6 +163,7 @@ class NotificationRule(_NotificationRule): When creating or modifying a notification rule, `isSuspended` and `suspensionReason` are ignored. """ + isSuspended: NotRequired[bool] @@ -232,8 +246,15 @@ def create_bucket( @abstractmethod def create_key( - self, api_url, account_auth_token, account_id, capabilities, key_name, - valid_duration_seconds, bucket_id, name_prefix + self, + api_url, + account_auth_token, + account_id, + capabilities, + key_name, + valid_duration_seconds, + bucket_id, + name_prefix, ): pass @@ -272,8 +293,9 @@ def get_download_authorization( pass @abstractmethod - def get_file_info_by_id(self, api_url: str, account_auth_token: str, - file_id: str) -> dict[str, Any]: + def get_file_info_by_id( + self, api_url: str, account_auth_token: str, file_id: str + ) -> dict[str, Any]: pass @abstractmethod @@ -337,7 +359,7 @@ def list_keys( account_auth_token, account_id, max_key_count=None, - start_application_key_id=None + start_application_key_id=None, ): pass @@ -428,7 +450,9 @@ def get_upload_file_headers( headers[FILE_INFO_HEADER_PREFIX + k] = b2_url_encode(v) if server_side_encryption is not None: assert server_side_encryption.mode in ( - EncryptionMode.NONE, EncryptionMode.SSE_B2, EncryptionMode.SSE_C + EncryptionMode.NONE, + EncryptionMode.SSE_B2, + EncryptionMode.SSE_C, ) server_side_encryption.add_to_upload_headers(headers) @@ -482,14 +506,18 @@ def get_download_url_by_name(self, download_url, bucket_name, file_name): @abstractmethod def set_bucket_notification_rules( - self, api_url: str, account_auth_token: str, bucket_id: str, - rules: Iterable[NotificationRule] + self, + api_url: str, + account_auth_token: str, + bucket_id: str, + rules: Iterable[NotificationRule], ) -> list[NotificationRuleResponse]: pass @abstractmethod - def get_bucket_notification_rules(self, api_url: str, account_auth_token: str, - bucket_id: str) -> list[NotificationRuleResponse]: + def get_bucket_notification_rules( + self, api_url: str, account_auth_token: str, bucket_id: str + ) -> list[NotificationRuleResponse]: pass @@ -533,7 +561,9 @@ def _get_json(self, base_url: str, endpoint: str, auth: str, **params) -> JSON: return self.b2_http.request_content_return_json('GET', url, headers, params=params) def authorize_account(self, realm_url, application_key_id, application_key): - auth = f"Basic {base64.b64encode(f'{application_key_id}:{application_key}'.encode()).decode()}" + auth = ( + f"Basic {base64.b64encode(f'{application_key_id}:{application_key}'.encode()).decode()}" + ) return self._post_json(realm_url, 'b2_authorize_account', auth) def cancel_large_file(self, api_url, account_auth_token, file_id): @@ -567,8 +597,9 @@ def create_bucket( if default_server_side_encryption is not None: if not default_server_side_encryption.mode.can_be_set_as_bucket_default(): raise WrongEncryptionModeForBucketDefault(default_server_side_encryption.mode) - kwargs['defaultServerSideEncryption' - ] = default_server_side_encryption.serialize_to_json_for_request() + kwargs['defaultServerSideEncryption'] = ( + default_server_side_encryption.serialize_to_json_for_request() + ) if is_file_lock_enabled is not None: kwargs['fileLockEnabled'] = is_file_lock_enabled if replication is not None: @@ -581,8 +612,15 @@ def create_bucket( ) def create_key( - self, api_url, account_auth_token, account_id, capabilities, key_name, - valid_duration_seconds, bucket_id, name_prefix + self, + api_url, + account_auth_token, + account_id, + capabilities, + key_name, + valid_duration_seconds, + bucket_id, + name_prefix, ): return self._post_json( api_url, @@ -602,7 +640,7 @@ def delete_bucket(self, api_url, account_auth_token, account_id, bucket_id): 'b2_delete_bucket', account_auth_token, accountId=account_id, - bucketId=bucket_id + bucketId=bucket_id, ) def delete_file_version( @@ -646,7 +684,9 @@ def download_file_from_url( if encryption is not None: assert encryption.mode in ( - EncryptionMode.NONE, EncryptionMode.SSE_B2, EncryptionMode.SSE_C + EncryptionMode.NONE, + EncryptionMode.SSE_B2, + EncryptionMode.SSE_C, ) encryption.add_to_download_headers(request_headers) @@ -663,7 +703,7 @@ def finish_large_file(self, api_url, account_auth_token, file_id, part_sha1_arra 'b2_finish_large_file', account_auth_token, fileId=file_id, - partSha1Array=part_sha1_array + partSha1Array=part_sha1_array, ) def get_download_authorization( @@ -675,11 +715,12 @@ def get_download_authorization( account_auth_token, bucketId=bucket_id, fileNamePrefix=file_name_prefix, - validDurationInSeconds=valid_duration_in_seconds + validDurationInSeconds=valid_duration_in_seconds, ) - def get_file_info_by_id(self, api_url: str, account_auth_token: str, - file_id: str) -> dict[str, Any]: + def get_file_info_by_id( + self, api_url: str, account_auth_token: str, file_id: str + ) -> dict[str, Any]: return self._post_json(api_url, 'b2_get_file_info', account_auth_token, fileId=file_id) def get_file_info_by_name( @@ -688,11 +729,11 @@ def get_file_info_by_name( download_url = self.get_download_url_by_name(download_url, bucket_name, file_name) try: response = self.b2_http.head_content( - download_url, headers={"Authorization": account_auth_token} + download_url, headers={'Authorization': account_auth_token} ) return response.headers except ResourceNotFound: - logger.debug("Resource Not Found: %s" % download_url) + logger.debug('Resource Not Found: %s' % download_url) raise FileOrBucketNotFound(bucket_name, file_name) def get_upload_url(self, api_url, account_auth_token, bucket_id): @@ -772,7 +813,7 @@ def list_keys( account_auth_token, account_id, max_key_count=None, - start_application_key_id=None + start_application_key_id=None, ): return self._post_json( api_url, @@ -790,7 +831,7 @@ def list_parts(self, api_url, account_auth_token, file_id, start_part_number, ma account_auth_token, fileId=file_id, startPartNumber=start_part_number, - maxPartCount=max_part_count + maxPartCount=max_part_count, ) def list_unfinished_large_files( @@ -828,7 +869,9 @@ def start_large_file( kwargs = {} if server_side_encryption is not None: assert server_side_encryption.mode in ( - EncryptionMode.NONE, EncryptionMode.SSE_B2, EncryptionMode.SSE_C + EncryptionMode.NONE, + EncryptionMode.SSE_B2, + EncryptionMode.SSE_C, ) kwargs['serverSideEncryption'] = server_side_encryption.serialize_to_json_for_request() @@ -852,7 +895,7 @@ def start_large_file( fileName=file_name, fileInfo=file_info, contentType=content_type, - **kwargs + **kwargs, ) def update_bucket( @@ -885,8 +928,9 @@ def update_bucket( if default_server_side_encryption is not None: if not default_server_side_encryption.mode.can_be_set_as_bucket_default(): raise WrongEncryptionModeForBucketDefault(default_server_side_encryption.mode) - kwargs['defaultServerSideEncryption' - ] = default_server_side_encryption.serialize_to_json_for_request() + kwargs['defaultServerSideEncryption'] = ( + default_server_side_encryption.serialize_to_json_for_request() + ) if default_retention is not None: kwargs['defaultRetention'] = default_retention.serialize_to_json_for_request() if replication is not None: @@ -902,7 +946,7 @@ def update_bucket( account_auth_token, accountId=account_id, bucketId=bucket_id, - **kwargs + **kwargs, ) def update_file_retention( @@ -924,7 +968,7 @@ def update_file_retention( fileId=file_id, fileName=file_name, bypassGovernance=bypass_governance, - **kwargs + **kwargs, ) except AccessDenied: raise RetentionWriteError() @@ -961,25 +1005,23 @@ def check_b2_filename(self, filename): encoded_name = filename.encode('utf-8') length_in_bytes = len(encoded_name) if length_in_bytes < 1: - raise UnusableFileName("Filename must be at least 1 character.") + raise UnusableFileName('Filename must be at least 1 character.') if length_in_bytes > 1024: - raise UnusableFileName("Filename is too long (can be at most 1024 bytes).") + raise UnusableFileName('Filename is too long (can be at most 1024 bytes).') lowest_unicode_value = ord(min(filename)) if lowest_unicode_value < 32: - message = "Filename \"{}\" contains code {} (hex {:02x}), less than 32.".format( - unprintable_to_hex(filename), lowest_unicode_value, lowest_unicode_value - ) + message = f'Filename "{unprintable_to_hex(filename)}" contains code {lowest_unicode_value} (hex {lowest_unicode_value:02x}), less than 32.' raise UnusableFileName(message) # No DEL for you. if '\x7f' in filename: - raise UnusableFileName("DEL character (0x7f) not allowed.") + raise UnusableFileName('DEL character (0x7f) not allowed.') if filename[0] == '/' or filename[-1] == '/': raise UnusableFileName("Filename may not start or end with '/'.") if '//' in filename: - raise UnusableFileName("Filename may not contain \"//\".") + raise UnusableFileName('Filename may not contain "//".') long_segment = max([len(segment.encode('utf-8')) for segment in filename.split('/')]) if long_segment > 250: - raise UnusableFileName("Filename segment too long (maximum 250 bytes in utf-8).") + raise UnusableFileName('Filename segment too long (maximum 250 bytes in utf-8).') def upload_file( self, @@ -1043,11 +1085,13 @@ def upload_part( 'Authorization': upload_auth_token, 'Content-Length': str(content_length), 'X-Bz-Part-Number': str(part_number), - 'X-Bz-Content-Sha1': content_sha1 + 'X-Bz-Content-Sha1': content_sha1, } if server_side_encryption is not None: assert server_side_encryption.mode in ( - EncryptionMode.NONE, EncryptionMode.SSE_B2, EncryptionMode.SSE_C + EncryptionMode.NONE, + EncryptionMode.SSE_B2, + EncryptionMode.SSE_C, ) server_side_encryption.add_to_upload_headers(headers) @@ -1097,14 +1141,18 @@ def copy_file( kwargs['destinationBucketId'] = destination_bucket_id if destination_server_side_encryption is not None: assert destination_server_side_encryption.mode in ( - EncryptionMode.NONE, EncryptionMode.SSE_B2, EncryptionMode.SSE_C + EncryptionMode.NONE, + EncryptionMode.SSE_B2, + EncryptionMode.SSE_C, + ) + kwargs['destinationServerSideEncryption'] = ( + destination_server_side_encryption.serialize_to_json_for_request() ) - kwargs['destinationServerSideEncryption' - ] = destination_server_side_encryption.serialize_to_json_for_request() if source_server_side_encryption is not None: assert source_server_side_encryption.mode == EncryptionMode.SSE_C - kwargs['sourceServerSideEncryption' - ] = source_server_side_encryption.serialize_to_json_for_request() + kwargs['sourceServerSideEncryption'] = ( + source_server_side_encryption.serialize_to_json_for_request() + ) if legal_hold is not None: kwargs['legalHold'] = legal_hold.to_server() @@ -1119,7 +1167,7 @@ def copy_file( account_auth_token, sourceFileId=source_file_id, fileName=new_file_name, - **kwargs + **kwargs, ) except AccessDenied: raise SSECKeyError() @@ -1142,16 +1190,22 @@ def copy_part( kwargs['range'] = range_dict['Range'] if destination_server_side_encryption is not None: assert destination_server_side_encryption.mode in ( - EncryptionMode.NONE, EncryptionMode.SSE_B2, EncryptionMode.SSE_C + EncryptionMode.NONE, + EncryptionMode.SSE_B2, + EncryptionMode.SSE_C, + ) + kwargs['destinationServerSideEncryption'] = ( + destination_server_side_encryption.serialize_to_json_for_request() ) - kwargs['destinationServerSideEncryption' - ] = destination_server_side_encryption.serialize_to_json_for_request() if source_server_side_encryption is not None: assert source_server_side_encryption.mode in ( - EncryptionMode.NONE, EncryptionMode.SSE_B2, EncryptionMode.SSE_C + EncryptionMode.NONE, + EncryptionMode.SSE_B2, + EncryptionMode.SSE_C, + ) + kwargs['sourceServerSideEncryption'] = ( + source_server_side_encryption.serialize_to_json_for_request() ) - kwargs['sourceServerSideEncryption' - ] = source_server_side_encryption.serialize_to_json_for_request() try: return self._post_json( api_url, @@ -1160,7 +1214,7 @@ def copy_part( sourceFileId=source_file_id, largeFileId=large_file_id, partNumber=part_number, - **kwargs + **kwargs, ) except AccessDenied: raise SSECKeyError() @@ -1176,10 +1230,11 @@ def set_bucket_notification_rules( 'bucketId': bucket_id, 'eventNotificationRules': rules, }, - )["eventNotificationRules"] + )['eventNotificationRules'] - def get_bucket_notification_rules(self, api_url: str, account_auth_token: str, - bucket_id: str) -> list[NotificationRuleResponse]: + def get_bucket_notification_rules( + self, api_url: str, account_auth_token: str, bucket_id: str + ) -> list[NotificationRuleResponse]: return self._get_json( api_url, 'b2_get_bucket_notification_rules', @@ -1187,7 +1242,7 @@ def get_bucket_notification_rules(self, api_url: str, account_auth_token: str, **{ 'bucketId': bucket_id, }, - )["eventNotificationRules"] + )['eventNotificationRules'] def _add_range_header(headers, range_): @@ -1195,4 +1250,4 @@ def _add_range_header(headers, range_): assert len(range_) == 2, range_ assert (range_[0] + 0) <= (range_[1] + 0), range_ # not strings assert range_[0] >= 0, range_ - headers['Range'] = "bytes=%d-%d" % range_ + headers['Range'] = 'bytes=%d-%d' % range_ diff --git a/b2sdk/_internal/raw_simulator.py b/b2sdk/_internal/raw_simulator.py index b5b3a0303..75623b363 100644 --- a/b2sdk/_internal/raw_simulator.py +++ b/b2sdk/_internal/raw_simulator.py @@ -73,7 +73,7 @@ def get_bytes_range(data_bytes, bytes_range): - """ Slice bytes array using bytes range """ + """Slice bytes array using bytes range""" if bytes_range is None: return data_bytes if bytes_range[0] > bytes_range[1]: @@ -82,7 +82,7 @@ def get_bytes_range(data_bytes, bytes_range): raise UnsatisfiableRange() if bytes_range[1] > len(data_bytes): raise UnsatisfiableRange() - return data_bytes[bytes_range[0]:bytes_range[1] + 1] + return data_bytes[bytes_range[0] : bytes_range[1] + 1] class KeySimulator: @@ -92,8 +92,16 @@ class KeySimulator: """ def __init__( - self, account_id, name, application_key_id, key, capabilities, expiration_timestamp_or_none, - bucket_id_or_none, bucket_name_or_none, name_prefix_or_none + self, + account_id, + name, + application_key_id, + key, + capabilities, + expiration_timestamp_or_none, + bucket_id_or_none, + bucket_name_or_none, + name_prefix_or_none, ): self.name = name self.account_id = account_id @@ -111,8 +119,8 @@ def as_key(self): bucketId=self.bucket_id_or_none, applicationKeyId=self.application_key_id, capabilities=self.capabilities, - expirationTimestamp=self.expiration_timestamp_or_none and - self.expiration_timestamp_or_none * 1000, + expirationTimestamp=self.expiration_timestamp_or_none + and self.expiration_timestamp_or_none * 1000, keyName=self.name, namePrefix=self.name_prefix_or_none, ) @@ -152,8 +160,8 @@ def as_list_parts_dict(self): fileId=self.file_id, partNumber=self.part_number, contentLength=self.content_length, - contentSha1=self.content_sha1 - ) # yapf: disable + contentSha1=self.content_sha1, + ) class FileSimulator: @@ -280,10 +288,12 @@ def as_download_headers( if self.server_side_encryption.mode == EncryptionMode.SSE_B2: headers['X-Bz-Server-Side-Encryption'] = self.server_side_encryption.algorithm.value elif self.server_side_encryption.mode == EncryptionMode.SSE_C: - headers['X-Bz-Server-Side-Encryption-Customer-Algorithm' - ] = self.server_side_encryption.algorithm.value - headers['X-Bz-Server-Side-Encryption-Customer-Key-Md5' - ] = self.server_side_encryption.key.key_md5() + headers['X-Bz-Server-Side-Encryption-Customer-Algorithm'] = ( + self.server_side_encryption.algorithm.value + ) + headers['X-Bz-Server-Side-Encryption-Customer-Key-Md5'] = ( + self.server_side_encryption.key.key_md5() + ) elif self.server_side_encryption.mode in (EncryptionMode.NONE, EncryptionMode.UNKNOWN): pass else: @@ -291,8 +301,10 @@ def as_download_headers( if range_ is not None: headers['Content-Range'] = 'bytes %d-%d/%d' % ( - range_[0], range_[0] + content_length - 1, len(self.data_bytes) - ) # yapf: disable + range_[0], + range_[0] + content_length - 1, + len(self.data_bytes), + ) return headers def as_upload_result(self, account_auth_token): @@ -308,10 +320,11 @@ def as_upload_result(self, account_auth_token): action=self.action, uploadTimestamp=self.upload_timestamp, replicationStatus=self.replication_status and self.replication_status.value, - ) # yapf: disable + ) if self.server_side_encryption is not None: - result['serverSideEncryption' - ] = self.server_side_encryption.serialize_to_json_for_request() + result['serverSideEncryption'] = ( + self.server_side_encryption.serialize_to_json_for_request() + ) result['fileRetention'] = self._file_retention_dict(account_auth_token) result['legalHold'] = self._legal_hold_dict(account_auth_token) return result @@ -329,10 +342,11 @@ def as_list_files_dict(self, account_auth_token): action=self.action, uploadTimestamp=self.upload_timestamp, replicationStatus=self.replication_status and self.replication_status.value, - ) # yapf: disable + ) if self.server_side_encryption is not None: - result['serverSideEncryption' - ] = self.server_side_encryption.serialize_to_json_for_request() + result['serverSideEncryption'] = ( + self.server_side_encryption.serialize_to_json_for_request() + ) result['fileRetention'] = self._file_retention_dict(account_auth_token) result['legalHold'] = self._legal_hold_dict(account_auth_token) return result @@ -353,10 +367,11 @@ def as_start_large_file_result(self, account_auth_token): fileInfo=self.file_info, uploadTimestamp=self.upload_timestamp, replicationStatus=self.replication_status and self.replication_status.value, - ) # yapf: disable + ) if self.server_side_encryption is not None: - result['serverSideEncryption' - ] = self.server_side_encryption.serialize_to_json_for_request() + result['serverSideEncryption'] = ( + self.server_side_encryption.serialize_to_json_for_request() + ) result['fileRetention'] = self._file_retention_dict(account_auth_token) result['legalHold'] = self._legal_hold_dict(account_auth_token) return result @@ -374,8 +389,9 @@ def _file_retention_dict(self, account_auth_token): else: file_lock_configuration['value'] = {'mode': self.file_retention.mode.value} if self.file_retention.retain_until is not None: - file_lock_configuration['value']['retainUntilTimestamp' - ] = self.file_retention.retain_until + file_lock_configuration['value']['retainUntilTimestamp'] = ( + self.file_retention.retain_until + ) return file_lock_configuration def _legal_hold_dict(self, account_auth_token): @@ -423,7 +439,8 @@ def list_parts(self, start_part_number, max_part_count): max_part_count = max_part_count or 100 parts = [ part.as_list_parts_dict() - for part in self.parts if part is not None and start_part_number <= part.part_number + for part in self.parts + if part is not None and start_part_number <= part.part_number ] if len(parts) <= max_part_count: next_part_number = None @@ -474,7 +491,7 @@ def tell(self): return self._position def read(self, size): - data = self.data_bytes[self._position:self._position + size] + data = self.data_bytes[self._position : self._position + size] self._position += len(data) return data @@ -486,7 +503,7 @@ def __init__(self, account_auth_token_or_none, file_sim, url, range_=None): self.url = url self.range_ = range_ if range_ is not None: - self.data_bytes = self.data_bytes[range_[0]:range_[1] + 1] + self.data_bytes = self.data_bytes[range_[0] : range_[1] + 1] @property def data_bytes(self): @@ -524,7 +541,6 @@ def __exit__(self, *args): class BucketSimulator: - # File IDs start at 9999 and count down, so they sort in the order # returned by list_file_versions. The IDs are strings. FIRST_FILE_NUMBER = 9999 @@ -575,8 +591,14 @@ def __init__( self.default_retention = NO_RETENTION_BUCKET_SETTING self.replication = replication if self.replication is not None: - assert self.replication.asReplicationSource is None or self.replication.asReplicationSource.rules - assert self.replication.asReplicationDestination is None or self.replication.asReplicationDestination.sourceToDestinationKeyMapping + assert ( + self.replication.asReplicationSource is None + or self.replication.asReplicationSource.rules + ) + assert ( + self.replication.asReplicationDestination is None + or self.replication.asReplicationDestination.sourceToDestinationKeyMapping + ) def get_file(self, file_id, file_name) -> FileSimulator: try: @@ -608,8 +630,9 @@ def bucket_dict(self, account_auth_token): default_sse['isClientAuthorizedToRead'] = True default_sse['value'] = {'mode': self.default_server_side_encryption.mode.value} if self.default_server_side_encryption.algorithm is not None: - default_sse['value']['algorithm' - ] = self.default_server_side_encryption.algorithm.value + default_sse['value']['algorithm'] = ( + self.default_server_side_encryption.algorithm.value + ) else: default_sse['value'] = {'mode': EncryptionMode.UNKNOWN.value} @@ -619,11 +642,13 @@ def bucket_dict(self, account_auth_token): 'value': { 'defaultRetention': { 'mode': self.default_retention.mode.value, - 'period': self.default_retention.period.as_dict() if self.default_retention.period else None, + 'period': self.default_retention.period.as_dict() + if self.default_retention.period + else None, }, 'isFileLockEnabled': self.is_file_lock_enabled, }, - } # yapf: disable + } else: file_lock_configuration = {'isClientAuthorizedToRead': False, 'value': None} @@ -656,8 +681,8 @@ def cancel_large_file(self, file_id): accountId=self.account_id, bucketId=self.bucket_id, fileId=file_id, - fileName=file_sim.name - ) # yapf: disable + fileName=file_sim.name, + ) def delete_file_version( self, account_auth_token, file_id, file_name, bypass_governance: bool = False @@ -700,8 +725,9 @@ def download_file_by_name( range_=None, encryption: EncryptionSetting | None = None, ): - files = self.list_file_names(self.api.current_token, file_name, - 1)['files'] # token is not important here + files = self.list_file_names(self.api.current_token, file_name, 1)[ + 'files' + ] # token is not important here if len(files) == 0: raise FileNotPresent(file_id_or_name=file_name) @@ -736,7 +762,7 @@ def get_file_info_by_id(self, account_auth_token, file_id): def get_file_info_by_name(self, account_auth_token, file_name): # Sorting files by name and ID, so lower ID (newer upload) is returned first. - for ((name, id), file) in sorted(self.file_name_and_id_to_file.items()): + for (name, id), file in sorted(self.file_name_and_id_to_file.items()): if file_name == name: return file.as_download_headers(account_auth_token_or_none=account_auth_token) raise FileNotPresent(file_id_or_name=file_name, bucket_name=self.bucket_name) @@ -744,21 +770,33 @@ def get_file_info_by_name(self, account_auth_token, file_name): def get_upload_url(self, account_auth_token): upload_id = next(self.upload_url_counter) upload_url = 'https://upload.example.com/%s/%d/%s' % ( - self.bucket_id, upload_id, account_auth_token + self.bucket_id, + upload_id, + account_auth_token, ) return dict(bucketId=self.bucket_id, uploadUrl=upload_url, authorizationToken=upload_url) def get_upload_part_url(self, account_auth_token, file_id): upload_url = 'https://upload.example.com/part/%s/%d/%s' % ( - file_id, random.randint(1, 10**9), account_auth_token + file_id, + random.randint(1, 10**9), + account_auth_token, ) return dict(bucketId=self.bucket_id, uploadUrl=upload_url, authorizationToken=upload_url) def hide_file(self, account_auth_token, file_name): file_id = self._next_file_id() file_sim = self.FILE_SIMULATOR_CLASS( - self.account_id, self, file_id, 'hide', file_name, None, "none", {}, b'', - next(self.upload_timestamp_counter) + self.account_id, + self, + file_id, + 'hide', + file_name, + None, + 'none', + {}, + b'', + next(self.upload_timestamp_counter), ) self.file_id_to_file[file_id] = file_sim self.file_name_and_id_to_file[file_sim.sort_key()] = file_sim @@ -849,14 +887,16 @@ def copy_file( 'upload', new_file_name, file_sim.content_type, - hex_sha1_of_bytes(data_bytes), # we hash here again because bytes_range may not cover the full source + hex_sha1_of_bytes( + data_bytes + ), # we hash here again because bytes_range may not cover the full source file_sim.file_info, data_bytes, next(self.upload_timestamp_counter), server_side_encryption=sse, file_retention=file_retention, legal_hold=legal_hold, - ) # yapf: disable + ) destination_bucket.file_id_to_file[copy_file_sim.file_id] = copy_file_sim destination_bucket.file_name_and_id_to_file[copy_file_sim.sort_key()] = copy_file_sim @@ -865,14 +905,14 @@ def copy_file( copy_file_sim.file_info = file_info or file_sim.file_info ## long term storage of that file has action="upload", but here we need to return action="copy", just this once - #class TestFileVersionFactory(FileVersionFactory): + # class TestFileVersionFactory(FileVersionFactory): # FILE_VERSION_CLASS = self.FILE_SIMULATOR_CLASS - #file_version_dict = copy_file_sim.as_upload_result(account_auth_token) - #del file_version_dict['action'] - #print(file_version_dict) - #copy_file_sim_with_action_copy = TestFileVersionFactory(self.api).from_api_response(file_version_dict, force_action='copy') - #return copy_file_sim_with_action_copy + # file_version_dict = copy_file_sim.as_upload_result(account_auth_token) + # del file_version_dict['action'] + # print(file_version_dict) + # copy_file_sim_with_action_copy = TestFileVersionFactory(self.api).from_api_response(file_version_dict, force_action='copy') + # return copy_file_sim_with_action_copy # TODO: the code above cannot be used right now because FileSimulator.__init__ is incompatible with FileVersionFactory / FileVersion.__init__ - refactor is needed # for now we'll just return the newly constructed object with a copy action... @@ -899,8 +939,9 @@ def list_file_names( max_file_count=None, prefix=None, ): - assert prefix is None or start_file_name is None or start_file_name.startswith(prefix - ), locals() + assert ( + prefix is None or start_file_name is None or start_file_name.startswith(prefix) + ), locals() start_file_name = start_file_name or '' max_file_count = max_file_count or 100 result_files = [] @@ -931,8 +972,9 @@ def list_file_versions( max_file_count=None, prefix=None, ): - assert prefix is None or start_file_name is None or start_file_name.startswith(prefix - ), locals() + assert ( + prefix is None or start_file_name is None or start_file_name.startswith(prefix) + ), locals() start_file_name = start_file_name or '' start_file_id = start_file_id or '' max_file_count = max_file_count or 100 @@ -942,8 +984,8 @@ def list_file_versions( for key in sorted(self.file_name_and_id_to_file): (file_name, file_id) = key if (start_file_name < file_name) or ( - start_file_name == file_name and - (start_file_id == '' or int(start_file_id) <= int(file_id)) + start_file_name == file_name + and (start_file_id == '' or int(start_file_id) <= int(file_id)) ): file_sim = self.file_name_and_id_to_file[key] if prefix is not None and not file_name.startswith(prefix): @@ -965,9 +1007,11 @@ def list_unfinished_large_files( start_file_id = start_file_id or self.FIRST_FILE_ID max_file_count = max_file_count or 100 all_unfinished_ids = set( - k for (k, v) in self.file_id_to_file.items() - if v.action == 'start' and k <= start_file_id and - (prefix is None or v.name.startswith(prefix)) + k + for (k, v) in self.file_id_to_file.items() + if v.action == 'start' + and k <= start_file_id + and (prefix is None or v.name.startswith(prefix)) ) ids_in_order = sorted(all_unfinished_ids, reverse=True) @@ -976,7 +1020,7 @@ def list_unfinished_large_files( for file_sim in ( self.file_id_to_file[file_id] for file_id in ids_in_order[:max_file_count] ) - ] # yapf: disable + ] next_file_id = None if len(file_dict_list) == max_file_count: next_file_id = str(int(file_dict_list[-1]['fileId']) - 1) @@ -995,7 +1039,9 @@ def start_large_file( ): file_id = self._next_file_id() sse = server_side_encryption or self.default_server_side_encryption - if sse: # FIXME: remove this part when RawApi<->Encryption adapters are implemented properly + if ( + sse + ): # FIXME: remove this part when RawApi<->Encryption adapters are implemented properly file_info = sse.add_key_id_to_file_info(file_info) upload_timestamp = next(self.upload_timestamp_counter) @@ -1003,10 +1049,20 @@ def start_large_file( upload_timestamp = custom_upload_timestamp file_sim = self.FILE_SIMULATOR_CLASS( - self.account_id, self, file_id, 'start', file_name, content_type, 'none', - file_info, None, upload_timestamp, server_side_encryption=sse, - file_retention=file_retention, legal_hold=legal_hold, - ) # yapf: disable + self.account_id, + self, + file_id, + 'start', + file_name, + content_type, + 'none', + file_info, + None, + upload_timestamp, + server_side_encryption=sse, + file_retention=file_retention, + legal_hold=legal_hold, + ) self.file_id_to_file[file_id] = file_sim self.file_name_and_id_to_file[file_sim.sort_key()] = file_sim return file_sim.as_start_large_file_result(account_auth_token) @@ -1031,8 +1087,10 @@ def _update_bucket( raise DisablingFileLockNotSupported() if ( - not self.is_file_lock_enabled and is_file_lock_enabled and self.replication and - self.replication.is_source + not self.is_file_lock_enabled + and is_file_lock_enabled + and self.replication + and self.replication.is_source ): raise SourceReplicationConflict() @@ -1087,7 +1145,9 @@ def upload_file( file_id = self._next_file_id() encryption = server_side_encryption or self.default_server_side_encryption - if encryption: # FIXME: remove this part when RawApi<->Encryption adapters are implemented properly + if ( + encryption + ): # FIXME: remove this part when RawApi<->Encryption adapters are implemented properly file_info = encryption.add_key_id_to_file_info(file_info) upload_timestamp = next(self.upload_timestamp_counter) @@ -1141,7 +1201,7 @@ def upload_part( partNumber=part_number, contentLength=content_length, contentSha1=sha1_sum, - ) # yapf: disable + ) if server_side_encryption is not None: result['serverSideEncryption'] = server_side_encryption.serialize_to_json_for_request() return result @@ -1181,26 +1241,26 @@ def _next_file_id(self): def get_notification_rules(self) -> list[NotificationRule]: return self._notification_rules - def set_notification_rules(self, - rules: Iterable[NotificationRule]) -> list[NotificationRuleResponse]: - old_rules_by_name = {rule["name"]: rule for rule in self._notification_rules} + def set_notification_rules( + self, rules: Iterable[NotificationRule] + ) -> list[NotificationRuleResponse]: + old_rules_by_name = {rule['name']: rule for rule in self._notification_rules} new_rules: list[NotificationRuleResponse] = [] for rule in rules: - for field in ("isSuspended", "suspensionReason"): + for field in ('isSuspended', 'suspensionReason'): rule.pop(field, None) - old_rule = old_rules_by_name.get(rule["name"], {"targetConfiguration": {}}) + old_rule = old_rules_by_name.get(rule['name'], {'targetConfiguration': {}}) new_rule = { **{ - "isSuspended": False, - "suspensionReason": "", + 'isSuspended': False, + 'suspensionReason': '', }, **old_rule, **rule, - "targetConfiguration": - { - **old_rule.get("targetConfiguration", {}), - **rule.get("targetConfiguration", {}), - }, + 'targetConfiguration': { + **old_rule.get('targetConfiguration', {}), + **rule.get('targetConfiguration', {}), + }, } new_rules.append(new_rule) self._notification_rules = new_rules @@ -1210,11 +1270,11 @@ def simulate_notification_rule_suspension( self, rule_name: str, reason: str, is_suspended: bool | None = None ) -> None: for rule in self._notification_rules: - if rule["name"] == rule_name: - rule["isSuspended"] = bool(reason) if is_suspended is None else is_suspended - rule["suspensionReason"] = reason + if rule['name'] == rule_name: + rule['isSuspended'] = bool(reason) if is_suspended is None else is_suspended + rule['suspensionReason'] = reason return - raise ResourceNotFound(f"Rule {rule_name} not found") + raise ResourceNotFound(f'Rule {rule_name} not found') class RawSimulator(AbstractRawApi): @@ -1240,13 +1300,16 @@ class RawSimulator(AbstractRawApi): UPLOAD_PART_MATCHER = re.compile('https://upload.example.com/part/([^/]*)') UPLOAD_URL_MATCHER = re.compile(r'https://upload.example.com/([^/]*)/([^/]*)') DOWNLOAD_URL_MATCHER = re.compile( - DOWNLOAD_URL + '(?:' + '|'.join( + DOWNLOAD_URL + + '(?:' + + '|'.join( ( r'/b2api/v[0-9]+/b2_download_file_by_id\?fileId=(?P[^/]+)', '/file/(?P[^/]+)/(?P.+)', ) - ) + ')$' - ) # yapf: disable + ) + + ')$' + ) def __init__(self, b2_http=None): # Map from application_key_id to KeySimulator. @@ -1561,16 +1624,14 @@ def get_download_authorization( bucket = self._get_bucket_by_id(bucket_id) self._assert_account_auth(api_url, account_auth_token, bucket.account_id, 'shareFiles') return { - 'bucketId': + 'bucketId': bucket_id, + 'fileNamePrefix': file_name_prefix, + 'authorizationToken': 'fake_download_auth_token_%s_%s_%d' + % ( bucket_id, - 'fileNamePrefix': - file_name_prefix, - 'authorizationToken': - 'fake_download_auth_token_%s_%s_%d' % ( - bucket_id, - b2_url_encode(file_name_prefix), - valid_duration_in_seconds, - ) + b2_url_encode(file_name_prefix), + valid_duration_in_seconds, + ), } def get_file_info_by_id(self, api_url, account_auth_token, file_id): @@ -1656,7 +1717,10 @@ def copy_part( destination_server_side_encryption: EncryptionSetting | None = None, source_server_side_encryption: EncryptionSetting | None = None, ): - if destination_server_side_encryption is not None and destination_server_side_encryption.mode == EncryptionMode.SSE_B2: + if ( + destination_server_side_encryption is not None + and destination_server_side_encryption.mode == EncryptionMode.SSE_B2 + ): raise ValueError( 'unsupported sse mode for copy_part!' ) # SSE-B2 is only to be marked in b2_start_large_file @@ -1702,7 +1766,8 @@ def list_buckets( ] bucket_list = [ bucket.bucket_dict(account_auth_token) - for bucket in sorted_buckets if self._bucket_matches(bucket, bucket_id, bucket_name) + for bucket in sorted_buckets + if self._bucket_matches(bucket, bucket_id, bucket_name) ] return dict(buckets=bucket_list) @@ -1712,9 +1777,8 @@ def _get_bucket_id_or_none_for_bucket_name(self, bucket_name): return bucket.bucket_id def _bucket_matches(self, bucket, bucket_id, bucket_name): - return ( - (bucket_id is None or bucket.bucket_id == bucket_id) and - (bucket_name is None or bucket.bucket_name == bucket_name) + return (bucket_id is None or bucket.bucket_id == bucket_id) and ( + bucket_name is None or bucket.bucket_name == bucket_name ) def list_file_names( @@ -1770,7 +1834,7 @@ def list_keys( account_auth_token, account_id, max_key_count=1000, - start_application_key_id=None + start_application_key_id=None, ): self._assert_account_auth(api_url, account_auth_token, account_id, 'listKeys') next_application_key_id = None @@ -1807,7 +1871,7 @@ def list_unfinished_large_files( bucket_id, start_file_id=None, max_file_count=None, - prefix=None + prefix=None, ): bucket = self._get_bucket_by_id(bucket_id) self._assert_account_auth( @@ -1864,7 +1928,15 @@ def update_bucket( replication: ReplicationConfiguration | None = None, is_file_lock_enabled: bool | None = None, ): - assert bucket_type or bucket_info or cors_rules or lifecycle_rules or default_server_side_encryption or replication or is_file_lock_enabled is not None + assert ( + bucket_type + or bucket_info + or cors_rules + or lifecycle_rules + or default_server_side_encryption + or replication + or is_file_lock_enabled is not None + ) bucket = self._get_bucket_by_id(bucket_id) self._assert_account_auth(api_url, account_auth_token, bucket.account_id, 'writeBuckets') return bucket._update_bucket( @@ -1893,11 +1965,12 @@ def get_upload_file_headers( legal_hold: LegalHold | None, custom_upload_timestamp: int | None = None, ) -> dict: - # fix to allow calculating headers on unknown key - only for simulation - if server_side_encryption is not None \ - and server_side_encryption.mode == EncryptionMode.SSE_C \ - and server_side_encryption.key.secret is None: + if ( + server_side_encryption is not None + and server_side_encryption.mode == EncryptionMode.SSE_C + and server_side_encryption.key.secret is None + ): server_side_encryption.key.secret = b'secret' return super().get_upload_file_headers( @@ -1941,7 +2014,9 @@ def upload_file( bucket = self._get_bucket_by_id(bucket_id) if server_side_encryption is not None: assert server_side_encryption.mode in ( - EncryptionMode.NONE, EncryptionMode.SSE_B2, EncryptionMode.SSE_C + EncryptionMode.NONE, + EncryptionMode.SSE_B2, + EncryptionMode.SSE_C, ) file_info = server_side_encryption.add_key_id_to_file_info(file_info) @@ -2033,8 +2108,11 @@ def _get_bucket_by_name(self, bucket_name): return self.bucket_name_to_bucket[bucket_name] def set_bucket_notification_rules( - self, api_url: str, account_auth_token: str, bucket_id: str, - rules: Iterable[NotificationRule] + self, + api_url: str, + account_auth_token: str, + bucket_id: str, + rules: Iterable[NotificationRule], ): bucket = self._get_bucket_by_id(bucket_id) self._assert_account_auth( @@ -2042,8 +2120,9 @@ def set_bucket_notification_rules( ) return bucket.set_notification_rules(rules) - def get_bucket_notification_rules(self, api_url: str, account_auth_token: str, - bucket_id: str) -> list[NotificationRule]: + def get_bucket_notification_rules( + self, api_url: str, account_auth_token: str, bucket_id: str + ) -> list[NotificationRule]: bucket = self._get_bucket_by_id(bucket_id) self._assert_account_auth( api_url, account_auth_token, bucket.account_id, 'readBucketNotifications' diff --git a/b2sdk/_internal/replication/monitoring.py b/b2sdk/_internal/replication/monitoring.py index 2f6446a16..bae3a2a83 100644 --- a/b2sdk/_internal/replication/monitoring.py +++ b/b2sdk/_internal/replication/monitoring.py @@ -70,26 +70,20 @@ def from_files( source_file_version = source_file.selected_version params.update( { - 'source_replication_status': - source_file_version.replication_status, - 'source_has_hide_marker': - not source_file.is_visible(), - 'source_encryption_mode': - source_file_version.server_side_encryption.mode, - 'source_has_large_metadata': - source_file_version.has_large_header, - 'source_has_file_retention': - source_file_version.file_retention is not NO_RETENTION_FILE_SETTING, - 'source_has_legal_hold': - source_file_version.legal_hold is LegalHold.ON, + 'source_replication_status': source_file_version.replication_status, + 'source_has_hide_marker': not source_file.is_visible(), + 'source_encryption_mode': source_file_version.server_side_encryption.mode, + 'source_has_large_metadata': source_file_version.has_large_header, + 'source_has_file_retention': source_file_version.file_retention + is not NO_RETENTION_FILE_SETTING, + 'source_has_legal_hold': source_file_version.legal_hold is LegalHold.ON, } ) if destination_file: params.update( { - 'destination_replication_status': - destination_file.selected_version.replication_status, + 'destination_replication_status': destination_file.selected_version.replication_status, } ) @@ -99,11 +93,9 @@ def from_files( params.update( { - 'metadata_differs': - source_version.file_info != destination_version.file_info, - 'hash_differs': - (source_version.content_md5 != destination_version.content_md5) or - (source_version.content_sha1 != destination_version.content_sha1) + 'metadata_differs': source_version.file_info != destination_version.file_info, + 'hash_differs': (source_version.content_md5 != destination_version.content_md5) + or (source_version.content_sha1 != destination_version.content_sha1), } ) diff --git a/b2sdk/_internal/replication/setting.py b/b2sdk/_internal/replication/setting.py index 489b2a394..86691c565 100644 --- a/b2sdk/_internal/replication/setting.py +++ b/b2sdk/_internal/replication/setting.py @@ -44,7 +44,8 @@ def __post_init__(self): if not (self.MIN_PRIORITY <= self.priority <= self.MAX_PRIORITY): raise ValueError( - 'priority should be within [%d, %d] interval' % ( + 'priority should be within [%d, %d] interval' + % ( self.MIN_PRIORITY, self.MAX_PRIORITY, ) @@ -84,6 +85,7 @@ class ReplicationConfiguration: """ Hold information about bucket replication configuration """ + # configuration as source: rules: list[ReplicationRule] = field(default_factory=list) source_key_id: str | None = None @@ -92,13 +94,13 @@ class ReplicationConfiguration: def __post_init__(self): if self.rules and not self.source_key_id: - raise ValueError("source_key_id must not be empty") + raise ValueError('source_key_id must not be empty') for source, destination in self.source_to_destination_key_mapping.items(): if not source or not destination: raise ValueError( - f"source_to_destination_key_mapping must not contain \ - empty keys or values: ({source}, {destination})" + f'source_to_destination_key_mapping must not contain \ + empty keys or values: ({source}, {destination})' ) @property @@ -159,15 +161,17 @@ def as_dict(self) -> dict: """ result = { - 'asReplicationSource': - { - "replicationRules": [rule.as_dict() for rule in self.rules], - "sourceApplicationKeyId": self.source_key_id, - } if self.is_source else None, - 'asReplicationDestination': - { - 'sourceToDestinationKeyMapping': self.source_to_destination_key_mapping, - } if self.is_destination else None, + 'asReplicationSource': { + 'replicationRules': [rule.as_dict() for rule in self.rules], + 'sourceApplicationKeyId': self.source_key_id, + } + if self.is_source + else None, + 'asReplicationDestination': { + 'sourceToDestinationKeyMapping': self.source_to_destination_key_mapping, + } + if self.is_destination + else None, } return result diff --git a/b2sdk/_internal/replication/setup.py b/b2sdk/_internal/replication/setup.py index cb58a7872..b41502750 100644 --- a/b2sdk/_internal/replication/setup.py +++ b/b2sdk/_internal/replication/setup.py @@ -32,13 +32,15 @@ class ReplicationSetupHelper(metaclass=B2TraceMeta): - """ class with various methods that help with setting up repliction """ + """class with various methods that help with setting up repliction""" + PRIORITY_OFFSET: ClassVar[int] = 5 #: how far to to put the new rule from the existing rules - DEFAULT_PRIORITY: ClassVar[ - int - ] = ReplicationRule.DEFAULT_PRIORITY #: what priority to set if there are no preexisting rules - MAX_PRIORITY: ClassVar[ - int] = ReplicationRule.MAX_PRIORITY #: maximum allowed priority of a replication rule + DEFAULT_PRIORITY: ClassVar[int] = ( + ReplicationRule.DEFAULT_PRIORITY + ) #: what priority to set if there are no preexisting rules + MAX_PRIORITY: ClassVar[int] = ( + ReplicationRule.MAX_PRIORITY + ) #: maximum allowed priority of a replication rule DEFAULT_SOURCE_CAPABILITIES: ClassVar[tuple[str, ...]] = ( 'readFiles', 'readFileLegalHolds', @@ -60,7 +62,6 @@ def setup_both( prefix: str | None = None, include_existing_files: bool = False, ) -> tuple[Bucket, Bucket]: - # setup source key source_key = self._get_source_key( source_bucket, @@ -94,12 +95,17 @@ def setup_destination( ) -> Bucket: api: B2Api = destination_bucket.api - # yapf: disable - source_configuration = destination_bucket.replication.get_source_configuration_as_dict( - ) if destination_bucket.replication else {} + source_configuration = ( + destination_bucket.replication.get_source_configuration_as_dict() + if destination_bucket.replication + else {} + ) - destination_configuration = destination_bucket.replication.get_destination_configuration_as_dict( - ) if destination_bucket.replication else {'source_to_destination_key_mapping': {}} + destination_configuration = ( + destination_bucket.replication.get_destination_configuration_as_dict() + if destination_bucket.replication + else {'source_to_destination_key_mapping': {}} + ) keys_to_purge, destination_key = self._get_destination_key( api, @@ -107,12 +113,14 @@ def setup_destination( ) # note: no clean up of keys_to_purge is actually done - destination_configuration['source_to_destination_key_mapping'][source_key_id] = destination_key.id_ + destination_configuration['source_to_destination_key_mapping'][source_key_id] = ( + destination_key.id_ + ) new_replication_configuration = ReplicationConfiguration( **source_configuration, **destination_configuration, ) - # yapf: enable + return destination_bucket.update( if_revision_is=destination_bucket.revision, replication=new_replication_configuration, @@ -126,7 +134,9 @@ def _get_destination_key( ): keys_to_purge = [] if destination_bucket.replication is not None: - current_destination_key_ids = destination_bucket.replication.source_to_destination_key_mapping.values() # yapf: disable + current_destination_key_ids = ( + destination_bucket.replication.source_to_destination_key_mapping.values() + ) else: current_destination_key_ids = [] key = None @@ -138,20 +148,21 @@ def _get_destination_key( if current_destination_key is None: logger.debug( 'zombie key found in replication destination_configuration.source_to_destination_key_mapping: %s', - current_destination_key_id + current_destination_key_id, ) keys_to_purge.append(current_destination_key_id) continue - if current_destination_key.has_capabilities( - cls.DEFAULT_DESTINATION_CAPABILITIES - ) and not current_destination_key.name_prefix: + if ( + current_destination_key.has_capabilities(cls.DEFAULT_DESTINATION_CAPABILITIES) + and not current_destination_key.name_prefix + ): logger.debug('matching destination key found: %s', current_destination_key_id) key = current_destination_key # not breaking here since we want to fill the purge list else: logger.info('non-matching destination key found: %s', current_destination_key) if not key: - logger.debug("no matching key found, making a new one") + logger.debug('no matching key found, making a new one') key = cls._create_destination_key( name=destination_bucket.name[:91] + '-replidst', bucket=destination_bucket, @@ -170,11 +181,12 @@ def setup_source( include_existing_files: bool = False, ) -> Bucket: if prefix is None: - prefix = "" + prefix = '' if source_bucket.replication: current_source_rules = source_bucket.replication.rules - destination_configuration = source_bucket.replication.get_destination_configuration_as_dict( + destination_configuration = ( + source_bucket.replication.get_destination_configuration_as_dict() ) else: current_source_rules = [] diff --git a/b2sdk/_internal/requests/__init__.py b/b2sdk/_internal/requests/__init__.py index b9a71580f..133541785 100644 --- a/b2sdk/_internal/requests/__init__.py +++ b/b2sdk/_internal/requests/__init__.py @@ -52,7 +52,7 @@ def generate(): if self._content_consumed and isinstance(self._content, bool): raise StreamConsumedError() elif chunk_size is not None and not isinstance(chunk_size, int): - raise TypeError("chunk_size must be an int, it is instead a %s." % type(chunk_size)) + raise TypeError('chunk_size must be an int, it is instead a %s.' % type(chunk_size)) # simulate reading small chunks of the content reused_chunks = iter_slices(self._content, chunk_size) diff --git a/b2sdk/_internal/requests/included_source_meta.py b/b2sdk/_internal/requests/included_source_meta.py index 9c48a9fb0..34741f26b 100644 --- a/b2sdk/_internal/requests/included_source_meta.py +++ b/b2sdk/_internal/requests/included_source_meta.py @@ -10,15 +10,16 @@ from b2sdk._internal.included_sources import IncludedSourceMeta, add_included_source included_source_meta = IncludedSourceMeta( - 'requests', 'Included in a revised form', { - 'NOTICE': - """Requests + 'requests', + 'Included in a revised form', + { + 'NOTICE': """Requests Copyright 2019 Kenneth Reitz Copyright 2021 Backblaze Inc. Changes made to the original source: requests.models.Response.iter_content has been overridden to pass `decode_content=False` argument to `self.raw.stream` in order to NOT decompress data based on Content-Encoding header""" - } + }, ) add_included_source(included_source_meta) diff --git a/b2sdk/_internal/scan/exception.py b/b2sdk/_internal/scan/exception.py index ff000f8c4..40a0c4b84 100644 --- a/b2sdk/_internal/scan/exception.py +++ b/b2sdk/_internal/scan/exception.py @@ -31,13 +31,10 @@ def __init__(self, filename, encoding): self.encoding = encoding def __str__(self): - return """file name {} cannot be decoded with system encoding ({}). + return f"""file name {self.filename} cannot be decoded with system encoding ({self.encoding}). We think this is an environment error which you should workaround by setting your system encoding properly, for example like this: -export LANG=en_US.UTF-8""".format( - self.filename, - self.encoding, - ) +export LANG=en_US.UTF-8""" class InvalidArgument(B2Error): @@ -55,7 +52,7 @@ def __init__(self, parameter_name, message): self.message = message def __str__(self): - return f"{self.parameter_name} {self.message}" + return f'{self.parameter_name} {self.message}' class UnsupportedFilename(B2Error): @@ -73,12 +70,13 @@ def __init__(self, message, filename): self.message = message def __str__(self): - return f"{self.message}: {self.filename}" + return f'{self.message}: {self.filename}' @contextmanager -def check_invalid_argument(parameter_name: str, message: str, - *exceptions: type[Exception]) -> Iterator[None]: +def check_invalid_argument( + parameter_name: str, message: str, *exceptions: type[Exception] +) -> Iterator[None]: """Raise `InvalidArgument` in case of one of given exception was thrown.""" try: yield diff --git a/b2sdk/_internal/scan/folder.py b/b2sdk/_internal/scan/folder.py index d1a709637..62d4500d5 100644 --- a/b2sdk/_internal/scan/folder.py +++ b/b2sdk/_internal/scan/folder.py @@ -30,20 +30,20 @@ from .policies import DEFAULT_SCAN_MANAGER, ScanPoliciesManager from .report import ProgressReport -DRIVE_MATCHER = re.compile(r"^([A-Za-z]):([/\\])") -ABSOLUTE_PATH_MATCHER = re.compile(r"^(/)|^(\\)") +DRIVE_MATCHER = re.compile(r'^([A-Za-z]):([/\\])') +ABSOLUTE_PATH_MATCHER = re.compile(r'^(/)|^(\\)') RELATIVE_PATH_MATCHER = re.compile( - # "abc" and "xyz" represent anything, including "nothing" - r"^(\.\.[/\\])|" + # ../abc or ..\abc - r"^(\.[/\\])|" + # ./abc or .\abc - r"([/\\]\.\.[/\\])|" + # abc/../xyz or abc\..\xyz or abc\../xyz or abc/..\xyz - r"([/\\]\.[/\\])|" + # abc/./xyz or abc\.\xyz or abc\./xyz or abc/.\xyz - r"([/\\]\.\.)$|" + # abc/.. or abc\.. - r"([/\\]\.)$|" + # abc/. or abc\. - r"^(\.\.)$|" + # just ".." - r"([/\\][/\\])|" + # abc\/xyz or abc/\xyz or abc//xyz or abc\\xyz - r"^(\.)$" # just "." -) # yapf: disable + # "abc" and "xyz" represent anything, including "nothing" + r'^(\.\.[/\\])|' # ../abc or ..\abc + + r'^(\.[/\\])|' # ./abc or .\abc + + r'([/\\]\.\.[/\\])|' # abc/../xyz or abc\..\xyz or abc\../xyz or abc/..\xyz + + r'([/\\]\.[/\\])|' # abc/./xyz or abc\.\xyz or abc\./xyz or abc/.\xyz + + r'([/\\]\.\.)$|' # abc/.. or abc\.. + + r'([/\\]\.)$|' # abc/. or abc\. + + r'^(\.\.)$|' # just ".." + + r'([/\\][/\\])|' # abc\/xyz or abc/\xyz or abc//xyz or abc\\xyz + + r'^(\.)$' # just "." +) logger = logging.getLogger(__name__) @@ -59,8 +59,9 @@ class AbstractFolder(metaclass=ABCMeta): """ @abstractmethod - def all_files(self, reporter: ProgressReport | None, - policies_manager=DEFAULT_SCAN_MANAGER) -> Iterator[AbstractPath]: + def all_files( + self, reporter: ProgressReport | None, policies_manager=DEFAULT_SCAN_MANAGER + ) -> Iterator[AbstractPath]: """ Return an iterator over all of the files in the folder, in the order that B2 uses (lexicographic by object path). @@ -147,8 +148,9 @@ def folder_type(self): """ return 'local' - def all_files(self, reporter: ProgressReport | None, - policies_manager=DEFAULT_SCAN_MANAGER) -> Iterator[LocalPath]: + def all_files( + self, reporter: ProgressReport | None, policies_manager=DEFAULT_SCAN_MANAGER + ) -> Iterator[LocalPath]: """ Yield all files. @@ -183,7 +185,7 @@ def make_full_path(self, file_name): # Ensure the new full_path is inside the self.root directory if common_prefix != self.root: - raise UnsupportedFilename("illegal file name", full_path) + raise UnsupportedFilename('illegal file name', full_path) return full_path @@ -303,7 +305,7 @@ def _walk_relative_paths( absolute_path=self.make_full_path(str(relative_file_path)), relative_path=str(relative_file_path), mod_time=file_mod_time, - size=file_size + size=file_size, ) if policies_manager.should_exclude_local_path(local_scan_path): continue # Skip excluded files @@ -368,7 +370,7 @@ def __init__(self, bucket_name, folder_name, api): def all_files( self, reporter: ProgressReport | None, - policies_manager: ScanPoliciesManager = DEFAULT_SCAN_MANAGER + policies_manager: ScanPoliciesManager = DEFAULT_SCAN_MANAGER, ) -> Iterator[B2Path]: """ Yield all files. @@ -384,7 +386,7 @@ def all_files( assert file_version.file_name.startswith(self.prefix) if file_version.action == 'start': continue - file_name = file_version.file_name[len(self.prefix):] + file_name = file_version.file_name[len(self.prefix) :] if last_ignored_dir is not None and file_name.startswith(last_ignored_dir): continue @@ -405,7 +407,7 @@ def all_files( yield B2Path( relative_path=current_name, selected_version=current_versions[0], - all_versions=current_versions + all_versions=current_versions, ) current_versions = [] @@ -416,7 +418,7 @@ def all_files( yield B2Path( relative_path=current_name, selected_version=current_versions[0], - all_versions=current_versions + all_versions=current_versions, ) def get_file_versions(self): @@ -431,17 +433,17 @@ def _validate_file_name(self, file_name): # Do not allow relative paths in file names if RELATIVE_PATH_MATCHER.search(file_name): raise UnsupportedFilename( - "scan does not support file names that include relative paths", file_name + 'scan does not support file names that include relative paths', file_name ) # Do not allow absolute paths in file names if ABSOLUTE_PATH_MATCHER.search(file_name): raise UnsupportedFilename( - "scan does not support file names with absolute paths", file_name + 'scan does not support file names with absolute paths', file_name ) # On Windows, do not allow drive letters in file names - if platform.system() == "Windows" and DRIVE_MATCHER.search(file_name): + if platform.system() == 'Windows' and DRIVE_MATCHER.search(file_name): raise UnsupportedFilename( - "scan does not support file names with drive letters", file_name + 'scan does not support file names with drive letters', file_name ) def folder_type(self): diff --git a/b2sdk/_internal/scan/path.py b/b2sdk/_internal/scan/path.py index 58a9d0395..f6a4f8644 100644 --- a/b2sdk/_internal/scan/path.py +++ b/b2sdk/_internal/scan/path.py @@ -29,9 +29,7 @@ def is_visible(self) -> bool: """Is the path visible/not deleted on it's storage""" def __repr__(self): - return '{}({}, {}, {})'.format( - self.__class__.__name__, repr(self.relative_path), repr(self.mod_time), repr(self.size) - ) + return f'{self.__class__.__name__}({repr(self.relative_path)}, {repr(self.mod_time)}, {repr(self.size)})' class LocalPath(AbstractPath): @@ -46,9 +44,10 @@ def is_visible(self) -> bool: def __eq__(self, other): return ( - self.absolute_path == other.absolute_path and - self.relative_path == other.relative_path and self.mod_time == other.mod_time and - self.size == other.size + self.absolute_path == other.absolute_path + and self.relative_path == other.relative_path + and self.mod_time == other.mod_time + and self.size == other.size ) @@ -75,15 +74,17 @@ def size(self) -> int: def __repr__(self): return '{}({}, [{}])'.format( - self.__class__.__name__, self.relative_path, ', '.join( + self.__class__.__name__, + self.relative_path, + ', '.join( f'({repr(fv.id_)}, {repr(fv.mod_time_millis)}, {repr(fv.action)})' for fv in self.all_versions - ) + ), ) def __eq__(self, other): return ( - self.relative_path == other.relative_path and - self.selected_version == other.selected_version and - self.all_versions == other.all_versions + self.relative_path == other.relative_path + and self.selected_version == other.selected_version + and self.all_versions == other.all_versions ) diff --git a/b2sdk/_internal/scan/policies.py b/b2sdk/_internal/scan/policies.py index 93921cb58..41575346a 100644 --- a/b2sdk/_internal/scan/policies.py +++ b/b2sdk/_internal/scan/policies.py @@ -152,7 +152,7 @@ def __init__( if include_file_regexes and not exclude_file_regexes: raise InvalidArgument( 'include_file_regexes', - 'cannot be used without exclude_file_regexes at the same time' + 'cannot be used without exclude_file_regexes at the same time', ) with check_invalid_argument( diff --git a/b2sdk/_internal/scan/report.py b/b2sdk/_internal/scan/report.py index 1a408f216..c72c9a18a 100644 --- a/b2sdk/_internal/scan/report.py +++ b/b2sdk/_internal/scan/report.py @@ -137,8 +137,8 @@ def _update_progress(self): message = ' count: %d/%d %s' % ( self.count, self.total_count, - format_and_scale_number(rate, '/s') - ) # yapf: disable + format_and_scale_number(rate, '/s'), + ) self._print_line(message, False) @@ -238,6 +238,7 @@ def sample_report_run(): Generate a sample report. """ import sys + report = ProgressReport(sys.stdout, False) for i in range(20): diff --git a/b2sdk/_internal/scan/scan.py b/b2sdk/_internal/scan/scan.py index 342a2ae5b..bbfe22735 100644 --- a/b2sdk/_internal/scan/scan.py +++ b/b2sdk/_internal/scan/scan.py @@ -86,6 +86,7 @@ class AbstractScanReport(metaclass=ABCMeta): """ Aggregation of valuable information about files after scanning. """ + SCAN_RESULT_CLASS: ClassVar[type] = AbstractScanResult @abstractmethod @@ -99,6 +100,7 @@ class CountAndSampleScanReport(AbstractScanReport): Scan report which groups and counts files by their `AbstractScanResult` and also stores first and last seen examples of such files. """ + counter_by_status: Counter = field(default_factory=Counter) samples_by_status_first: dict[AbstractScanResult, tuple[FileVersion, ...]] = field( default_factory=dict diff --git a/b2sdk/_internal/session.py b/b2sdk/_internal/session.py index 000e723a9..9e47d5262 100644 --- a/b2sdk/_internal/session.py +++ b/b2sdk/_internal/session.py @@ -39,9 +39,10 @@ class TokenType(Enum): class B2Session: """ - A facade that supplies the correct api_url and account_auth_token - to methods of underlying raw_api and reauthorizes if necessary. + A facade that supplies the correct api_url and account_auth_token + to methods of underlying raw_api and reauthorizes if necessary. """ + SQLITE_ACCOUNT_INFO_CLASS = staticmethod(SqliteAccountInfo) B2HTTP_CLASS = staticmethod(B2Http) @@ -49,7 +50,7 @@ def __init__( self, account_info: AbstractAccountInfo | None = None, cache: AbstractCache | None = None, - api_config: B2HttpApiConfig = DEFAULT_HTTP_API_CONFIG + api_config: B2HttpApiConfig = DEFAULT_HTTP_API_CONFIG, ): """ Initialize Session using given account info. @@ -207,8 +208,10 @@ def finish_large_file(self, file_id, part_sha1_array): def get_download_authorization(self, bucket_id, file_name_prefix, valid_duration_in_seconds): return self._wrap_default_token( - self.raw_api.get_download_authorization, bucket_id, file_name_prefix, - valid_duration_in_seconds + self.raw_api.get_download_authorization, + bucket_id, + file_name_prefix, + valid_duration_in_seconds, ) def get_file_info_by_id(self, file_id: str) -> dict[str, Any]: diff --git a/b2sdk/_internal/stream/chained.py b/b2sdk/_internal/stream/chained.py index 796cf362c..90fcbb842 100644 --- a/b2sdk/_internal/stream/chained.py +++ b/b2sdk/_internal/stream/chained.py @@ -16,7 +16,7 @@ class ChainedStream(ReadOnlyStreamMixin, io.IOBase): - """ Chains multiple streams in single stream, sort of what :py:class:`itertools.chain` does for iterators. + """Chains multiple streams in single stream, sort of what :py:class:`itertools.chain` does for iterators. Cleans up buffers of underlying streams when closed. @@ -42,7 +42,7 @@ def __init__(self, stream_openers): @property def stream(self): - """ Return currently processed stream. """ + """Return currently processed stream.""" if self._current_stream is None: self._next_stream() return self._current_stream @@ -138,17 +138,17 @@ def close(self): class StreamOpener(metaclass=ABCMeta): - """ Abstract class to define stream opener with cleanup. """ + """Abstract class to define stream opener with cleanup.""" @abstractmethod def __call__(self): - """ Create or open the stream to read and return. + """Create or open the stream to read and return. Can be called multiple times, but streamed data may be cached and reused. """ def cleanup(self): - """ Clean up stream opener after chained stream closes. + """Clean up stream opener after chained stream closes. Can be used for cleaning cached data that are stored in memory to allow resetting chained stream without getting this data more than once, diff --git a/b2sdk/_internal/stream/hashing.py b/b2sdk/_internal/stream/hashing.py index d62c8df63..858774d7e 100644 --- a/b2sdk/_internal/stream/hashing.py +++ b/b2sdk/_internal/stream/hashing.py @@ -70,7 +70,7 @@ def read(self, size=None): if self.hash is not None: # The end of stream was reached, return hash now size = size or len(self.hash) - data += str.encode(self.hash[self.hash_read:self.hash_read + size]) + data += str.encode(self.hash[self.hash_read : self.hash_read + size]) self.hash_read += size return data diff --git a/b2sdk/_internal/sync/action.py b/b2sdk/_internal/sync/action.py index 2b9f3811f..7a7a01e00 100644 --- a/b2sdk/_internal/sync/action.py +++ b/b2sdk/_internal/sync/action.py @@ -57,7 +57,7 @@ def run(self, bucket: Bucket, reporter: ProgressReport, dry_run: bool = False): self.do_report(bucket, reporter) except Exception as e: logger.exception('an exception occurred in a sync action') - reporter.error(str(self) + ": " + repr(e) + ' ' + str(e)) + reporter.error(str(self) + ': ' + repr(e) + ' ' + str(e)) raise # Re-throw so we can identify failed actions @abstractmethod @@ -123,12 +123,12 @@ def get_bytes(self) -> int: @functools.cached_property def _upload_source(self) -> UploadSourceLocalFile: - """ Upload source if the file was to be uploaded in full """ + """Upload source if the file was to be uploaded in full""" # NOTE: We're caching this to ensure that sha1 is not recalculated. return UploadSourceLocalFile(self.local_full_path) def get_all_sources(self) -> list[OutboundTransferSource]: - """ Get list of sources required to complete this upload """ + """Get list of sources required to complete this upload""" return [self._upload_source] def do_action(self, bucket: Bucket, reporter: ProgressReport) -> None: @@ -205,8 +205,12 @@ def __init__( :param absolute_minimum_part_size: minimum file part size for large files """ super().__init__( - local_full_path, relative_name, b2_file_name, mod_time_millis, size, - encryption_settings_provider + local_full_path, + relative_name, + b2_file_name, + mod_time_millis, + size, + encryption_settings_provider, ) self.file_version = file_version self.absolute_minimum_part_size = absolute_minimum_part_size @@ -339,11 +343,11 @@ def do_report(self, bucket: Bucket, reporter: ProgressReport) -> None: reporter.print_completion('dnload ' + self.source_path.relative_path) def __str__(self) -> str: - return ( - 'b2_download(%s, %s, %s, %d)' % ( - self.b2_file_name, self.source_path.selected_version.id_, self.local_full_path, - self.source_path.mod_time - ) + return 'b2_download(%s, %s, %s, %d)' % ( + self.b2_file_name, + self.source_path.selected_version.id_, + self.local_full_path, + self.source_path.mod_time, ) @@ -429,11 +433,11 @@ def do_report(self, bucket: Bucket, reporter: ProgressReport) -> None: reporter.print_completion('copy ' + self.source_path.relative_path) def __str__(self) -> str: - return ( - 'b2_copy(%s, %s, %s, %d)' % ( - self.b2_file_name, self.source_path.selected_version.id_, self.dest_b2_file_name, - self.source_path.mod_time - ) + return 'b2_copy(%s, %s, %s, %d)' % ( + self.b2_file_name, + self.source_path.selected_version.id_, + self.dest_b2_file_name, + self.source_path.mod_time, ) @@ -475,7 +479,7 @@ def do_report(self, bucket: Bucket, reporter: SyncReport): :param reporter: a place to report errors """ reporter.update_transfer(1, 0) - reporter.print_completion(f"delete {escape_control_chars(self.relative_name)} {self.note}") + reporter.print_completion(f'delete {escape_control_chars(self.relative_name)} {self.note}') def __str__(self) -> str: return f'b2_delete({self.b2_file_name}, {self.file_id}, {self.note})' diff --git a/b2sdk/_internal/sync/policy.py b/b2sdk/_internal/sync/policy.py index 93b1263bb..724d09c42 100644 --- a/b2sdk/_internal/sync/policy.py +++ b/b2sdk/_internal/sync/policy.py @@ -40,7 +40,8 @@ @unique class NewerFileSyncMode(Enum): - """ Mode of handling files newer on destination than on source """ + """Mode of handling files newer on destination than on source""" + SKIP = 101 #: skip syncing such file REPLACE = 102 #: replace the file on the destination with the (older) file on source RAISE_ERROR = 103 #: raise a non-transient error, failing the sync operation @@ -48,7 +49,8 @@ class NewerFileSyncMode(Enum): @unique class CompareVersionMode(Enum): - """ Mode of comparing versions of files to determine what should be synced and what shouldn't """ + """Mode of comparing versions of files to determine what should be synced and what shouldn't""" + MODTIME = 201 #: use file modification time on source filesystem SIZE = 202 #: compare using file size NONE = 203 #: compare using file name only @@ -58,6 +60,7 @@ class AbstractFileSyncPolicy(metaclass=ABCMeta): """ Abstract policy class. """ + DESTINATION_PREFIX = NotImplemented SOURCE_PREFIX = NotImplemented @@ -72,8 +75,7 @@ def __init__( newer_file_mode: NewerFileSyncMode, compare_threshold: int, compare_version_mode: CompareVersionMode = CompareVersionMode.MODTIME, - encryption_settings_provider: - AbstractSyncEncryptionSettingsProvider = SERVER_DEFAULT_SYNC_ENCRYPTION_SETTINGS_PROVIDER, + encryption_settings_provider: AbstractSyncEncryptionSettingsProvider = SERVER_DEFAULT_SYNC_ENCRYPTION_SETTINGS_PROVIDER, upload_mode: UploadMode = UploadMode.FULL, absolute_minimum_part_size: int | None = None, ): @@ -242,6 +244,7 @@ class DownPolicy(AbstractFileSyncPolicy): """ File is synced down (from the cloud to disk). """ + DESTINATION_PREFIX = 'local://' SOURCE_PREFIX = 'b2://' @@ -258,6 +261,7 @@ class UpPolicy(AbstractFileSyncPolicy): """ File is synced up (from disk the cloud). """ + DESTINATION_PREFIX = 'b2://' SOURCE_PREFIX = 'local://' @@ -331,7 +335,7 @@ def _get_hide_delete_actions(self): ): yield LocalDeleteAction( self._dest_path.relative_path, - self._dest_folder.make_full_path(self._dest_path.relative_path) + self._dest_folder.make_full_path(self._dest_path.relative_path), ) @@ -339,6 +343,7 @@ class DownAndKeepDaysPolicy(DownPolicy): """ File is synced down (from the cloud to disk) and the keepDays flag is SET. """ + pass @@ -346,11 +351,11 @@ class CopyPolicy(AbstractFileSyncPolicy): """ File is copied (server-side). """ + DESTINATION_PREFIX = 'b2://' SOURCE_PREFIX = 'b2://' def _make_transfer_action(self): - return B2CopyAction( self._source_folder.make_full_path(self._source_path.relative_path), cast(B2Path, self._source_path), diff --git a/b2sdk/_internal/sync/report.py b/b2sdk/_internal/sync/report.py index 54433430b..7c6b5dd36 100644 --- a/b2sdk/_internal/sync/report.py +++ b/b2sdk/_internal/sync/report.py @@ -69,16 +69,16 @@ def _update_progress(self): self.compare_count, self.transfer_files, format_and_scale_number(self.transfer_bytes, 'B'), - format_and_scale_number(rate, 'B/s') - ) # yapf: disable + format_and_scale_number(rate, 'B/s'), + ) elif not self.compare_done: message = ' compare: %d/%d files updated: %d files %s %s' % ( self.compare_count, self.total_count, self.transfer_files, format_and_scale_number(self.transfer_bytes, 'B'), - format_and_scale_number(rate, 'B/s') - ) # yapf: disable + format_and_scale_number(rate, 'B/s'), + ) else: message = ' compare: %d/%d files updated: %d/%d files %s %s' % ( self.compare_count, @@ -86,8 +86,8 @@ def _update_progress(self): self.transfer_files, self.total_transfer_files, format_and_scale_fraction(self.transfer_bytes, self.total_transfer_bytes, 'B'), - format_and_scale_number(rate, 'B/s') - ) # yapf: disable + format_and_scale_number(rate, 'B/s'), + ) self._print_line(message, False) def update_compare(self, delta): @@ -176,6 +176,7 @@ def sample_sync_report_run(): Generate a sample report. """ import sys + sync_report = SyncReport(sys.stdout, False) for i in range(20): diff --git a/b2sdk/_internal/sync/sync.py b/b2sdk/_internal/sync/sync.py index 553d6dfc9..2ce4f72d5 100644 --- a/b2sdk/_internal/sync/sync.py +++ b/b2sdk/_internal/sync/sync.py @@ -49,7 +49,8 @@ def count_files(local_folder, reporter, policies_manager): @unique class KeepOrDeleteMode(Enum): - """ Mode of dealing with old versions of files on the destination """ + """Mode of dealing with old versions of files on the destination""" + DELETE = 301 #: delete the old version as soon as the new one has been uploaded KEEP_BEFORE_DELETE = 302 #: keep the old versions of the file for a configurable number of days before deleting them, always keeping the newest version NO_DELETE = 303 #: keep old versions of the file, do not delete anything @@ -118,7 +119,9 @@ def __init__( self.compare_threshold = compare_threshold or 0 self.dry_run = dry_run self.allow_empty_source = allow_empty_source - self.policies_manager = policies_manager # actually it should be called scan_policies_manager + self.policies_manager = ( + policies_manager # actually it should be called scan_policies_manager + ) self.sync_policy_manager = sync_policy_manager self.max_workers = max_workers self.upload_mode = upload_mode @@ -141,7 +144,10 @@ def _validate(self): 'must be one of :%s' % KeepOrDeleteMode.__members__, ) - if self.keep_days_or_delete == KeepOrDeleteMode.KEEP_BEFORE_DELETE and self.keep_days is None: + if ( + self.keep_days_or_delete == KeepOrDeleteMode.KEEP_BEFORE_DELETE + and self.keep_days is None + ): raise InvalidArgument( 'keep_days', 'is required when keep_days_or_delete is %s' % KeepOrDeleteMode.KEEP_BEFORE_DELETE, @@ -159,8 +165,7 @@ def sync_folders( dest_folder: AbstractFolder, now_millis: int, reporter: SyncReport | None, - encryption_settings_provider: - AbstractSyncEncryptionSettingsProvider = SERVER_DEFAULT_SYNC_ENCRYPTION_SETTINGS_PROVIDER, + encryption_settings_provider: AbstractSyncEncryptionSettingsProvider = SERVER_DEFAULT_SYNC_ENCRYPTION_SETTINGS_PROVIDER, ): """ Syncs two folders. Always ensures that every file in the @@ -234,8 +239,7 @@ def _make_folder_sync_actions( now_millis: int, reporter: SyncReport, policies_manager: ScanPoliciesManager = DEFAULT_SCAN_MANAGER, - encryption_settings_provider: - AbstractSyncEncryptionSettingsProvider = SERVER_DEFAULT_SYNC_ENCRYPTION_SETTINGS_PROVIDER, + encryption_settings_provider: AbstractSyncEncryptionSettingsProvider = SERVER_DEFAULT_SYNC_ENCRYPTION_SETTINGS_PROVIDER, ): """ Yield a sequence of actions that will sync the destination @@ -248,8 +252,10 @@ def _make_folder_sync_actions( :param policies_manager: object which decides which files to process :param encryption_settings_provider: encryption setting provider """ - if self.keep_days_or_delete == KeepOrDeleteMode.KEEP_BEFORE_DELETE and dest_folder.folder_type( - ) == 'local': + if ( + self.keep_days_or_delete == KeepOrDeleteMode.KEEP_BEFORE_DELETE + and dest_folder.folder_type() == 'local' + ): raise InvalidArgument('keep_days_or_delete', 'cannot be used for local files') source_type = source_folder.folder_type() @@ -306,8 +312,7 @@ def _make_file_sync_actions( source_folder: AbstractFolder, dest_folder: AbstractFolder, now_millis: int, - encryption_settings_provider: - AbstractSyncEncryptionSettingsProvider = SERVER_DEFAULT_SYNC_ENCRYPTION_SETTINGS_PROVIDER, + encryption_settings_provider: AbstractSyncEncryptionSettingsProvider = SERVER_DEFAULT_SYNC_ENCRYPTION_SETTINGS_PROVIDER, ): """ Yields the sequence of actions needed to sync the two files diff --git a/b2sdk/_internal/transfer/emerge/emerger.py b/b2sdk/_internal/transfer/emerge/emerger.py index 567e98e4d..2201655c4 100644 --- a/b2sdk/_internal/transfer/emerge/emerger.py +++ b/b2sdk/_internal/transfer/emerge/emerger.py @@ -100,8 +100,9 @@ def _emerge( all_write_intents = write_intents_iterable if check_first_intent_for_sha1: write_intents_iterator = iter(all_write_intents) - large_file_sha1_intents_for_check, all_write_intents = \ - iterator_peek(write_intents_iterator, 2) + large_file_sha1_intents_for_check, all_write_intents = iterator_peek( + write_intents_iterator, 2 + ) emerge_plan = emerge_function(planner, all_write_intents) diff --git a/b2sdk/_internal/transfer/emerge/exception.py b/b2sdk/_internal/transfer/emerge/exception.py index 29728024c..4b3fa1fe3 100644 --- a/b2sdk/_internal/transfer/emerge/exception.py +++ b/b2sdk/_internal/transfer/emerge/exception.py @@ -16,4 +16,5 @@ class UnboundStreamBufferTimeout(B2SimpleError): """ Raised when there is no space for a new buffer for a certain amount of time. """ + pass diff --git a/b2sdk/_internal/transfer/emerge/executor.py b/b2sdk/_internal/transfer/emerge/executor.py index f1b24516f..21596e295 100644 --- a/b2sdk/_internal/transfer/emerge/executor.py +++ b/b2sdk/_internal/transfer/emerge/executor.py @@ -335,7 +335,7 @@ def _find_matching_unfinished_file( :param custom_upload_timestamp: The custom timestamp for the upload, if any. :param check_file_info_without_large_file_sha1: A flag indicating whether the file information should be checked without the `large_file_sha1`. :param eager_mode: A flag indicating whether the first matching file should be returned. - + :return: A tuple of the best matching unfinished file and its finished parts. If no match is found, returns `None`. """ @@ -356,12 +356,13 @@ def _find_matching_unfinished_file( file_info_without_large_file_sha1 = self._get_file_info_without_large_file_sha1( file_info ) - if file_info_without_large_file_sha1 != self._get_file_info_without_large_file_sha1( - file_.file_info + if ( + file_info_without_large_file_sha1 + != self._get_file_info_without_large_file_sha1(file_.file_info) ): logger.debug( 'Rejecting %s: file info mismatch after dropping `large_file_sha1`', - file_.file_id + file_.file_id, ) continue else: @@ -384,14 +385,16 @@ def _find_matching_unfinished_file( logger.debug('Rejecting %s: retention mismatch', file_.file_id) continue - if custom_upload_timestamp is not None and file_.upload_timestamp != custom_upload_timestamp: + if ( + custom_upload_timestamp is not None + and file_.upload_timestamp != custom_upload_timestamp + ): logger.debug('Rejecting %s: custom_upload_timestamp mismatch', file_.file_id) continue finished_parts = {} for part in self.services.large_file.list_parts(file_.file_id): - emerge_part = emerge_parts_dict.get(part.part_number) if emerge_part is None: @@ -399,7 +402,8 @@ def _find_matching_unfinished_file( # so we can't resume this upload logger.debug( 'Rejecting %s: part %s not found in emerge parts, giving up.', - file_.file_id, part.part_number + file_.file_id, + part.part_number, ) finished_parts = None break @@ -451,11 +455,11 @@ def _find_unfinished_file_by_plan_id( """ Search for a matching unfinished large file by plan_id in the specified bucket. - This function aims to locate a matching unfinished large file using the plan_id and the supplied parameters. - It's used to resume an interrupted upload, centralizing the shared logic between `_find_unfinished_file_by_plan_id` + This function aims to locate a matching unfinished large file using the plan_id and the supplied parameters. + It's used to resume an interrupted upload, centralizing the shared logic between `_find_unfinished_file_by_plan_id` and `_match_unfinished_file_if_possible`. - In case a matching file is found but has inconsistencies (for example, mismatching file info or encryption settings), + In case a matching file is found but has inconsistencies (for example, mismatching file info or encryption settings), the function checks if 'plan_id' is in file_info, as this is a prerequisite. :param bucket_id: The identifier of the bucket where the unfinished file resides. @@ -466,7 +470,7 @@ def _find_unfinished_file_by_plan_id( :param file_retention: The retention settings for the file, if any. :param legal_hold: The legal hold status of the file, if any. :param custom_upload_timestamp: The custom timestamp for the upload, if any. - + :return: A tuple of the best matching unfinished file and its finished parts. If no match is found, it returns `None`. """ if 'plan_id' not in file_info: @@ -528,7 +532,7 @@ def _match_unfinished_file_if_possible( :param file_retention: The retention settings for the file, if applicable. :param legal_hold: The legal hold status of the file, if applicable. :param custom_upload_timestamp: The custom timestamp for the upload, if set. - + :return: A tuple of the best matching unfinished file and its finished parts. If no match is found, returns `None`. """ logger.debug('Checking for matching unfinished large files for %s...', file_name) @@ -582,7 +586,7 @@ def create_upload_execution_step(self, stream_opener, stream_length=None, stream self.emerge_execution, stream_opener, stream_length=stream_length, - stream_sha1=stream_sha1 + stream_sha1=stream_sha1, ) @@ -669,7 +673,7 @@ def __init__( part_number, large_file_id, large_file_upload_state, - finished_parts=None + finished_parts=None, ): super().__init__(emerge_execution) self.copy_source_range = copy_source_range @@ -728,7 +732,7 @@ def __init__( large_file_upload_state, stream_length=None, stream_sha1=None, - finished_parts=None + finished_parts=None, ): super().__init__(emerge_execution) self.stream_opener = stream_opener diff --git a/b2sdk/_internal/transfer/emerge/planner/part_definition.py b/b2sdk/_internal/transfer/emerge/planner/part_definition.py index 9bef7faa6..e46fff408 100644 --- a/b2sdk/_internal/transfer/emerge/planner/part_definition.py +++ b/b2sdk/_internal/transfer/emerge/planner/part_definition.py @@ -50,13 +50,8 @@ def __init__(self, upload_source: UnboundSourceBytes, relative_offset, length): def __repr__(self): return ( - '<{classname} upload_source={upload_source} relative_offset={relative_offset} ' - 'length={length}>' - ).format( - classname=self.__class__.__name__, - upload_source=repr(self.upload_source), - relative_offset=self.relative_offset, - length=self.length, + f'<{self.__class__.__name__} upload_source={repr(self.upload_source)} relative_offset={self.relative_offset} ' + f'length={self.length}>' ) def get_length(self): @@ -144,13 +139,8 @@ def __init__(self, copy_source, relative_offset, length): def __repr__(self): return ( - '<{classname} copy_source={copy_source} relative_offset={relative_offset} ' - 'length={length}>' - ).format( - classname=self.__class__.__name__, - copy_source=repr(self.copy_source), - relative_offset=self.relative_offset, - length=self.length, + f'<{self.__class__.__name__} copy_source={repr(self.copy_source)} relative_offset={self.relative_offset} ' + f'length={self.length}>' ) def get_length(self): diff --git a/b2sdk/_internal/transfer/emerge/planner/planner.py b/b2sdk/_internal/transfer/emerge/planner/planner.py index 4282d9100..2a932ec19 100644 --- a/b2sdk/_internal/transfer/emerge/planner/planner.py +++ b/b2sdk/_internal/transfer/emerge/planner/planner.py @@ -38,7 +38,7 @@ class UploadBuffer: - """ data container used by EmergePlanner for temporary storage of write intents """ + """data container used by EmergePlanner for temporary storage of write intents""" def __init__(self, start_offset, buff=None): self._start_offset = start_offset @@ -83,7 +83,7 @@ def get_slice(self, start_idx=None, end_idx=None, start_offset=None): if start_idx == 0: start_offset = self.start_offset else: - start_offset = self._buff[start_idx - 1:start_idx][0][1] + start_offset = self._buff[start_idx - 1 : start_idx][0][1] return self.__class__(start_offset, buff_slice) @@ -92,7 +92,7 @@ def _filter_out_none(*args): class EmergePlanner: - """ Creates a list of actions required for advanced creation of an object in the cloud from an iterator of write intent objects """ + """Creates a list of actions required for advanced creation of an object in the cloud from an iterator of write intent objects""" def __init__( self, @@ -102,9 +102,14 @@ def __init__( ): # ensure default values do not break min<=recommended<=max condition, # while respecting user input and not auto fixing if something was provided explicitly - self.min_part_size = min( - DEFAULT_MIN_PART_SIZE, *_filter_out_none(recommended_upload_part_size, max_part_size) - ) if min_part_size is None else min_part_size + self.min_part_size = ( + min( + DEFAULT_MIN_PART_SIZE, + *_filter_out_none(recommended_upload_part_size, max_part_size), + ) + if min_part_size is None + else min_part_size + ) self.recommended_upload_part_size = recommended_upload_part_size or max( DEFAULT_RECOMMENDED_UPLOAD_PART_SIZE, self.min_part_size ) @@ -113,11 +118,11 @@ def __init__( ) if self.min_part_size > self.recommended_upload_part_size: raise InvalidUserInput( - f"min_part_size value ({self.min_part_size}) exceeding recommended_upload_part_size value ({self.recommended_upload_part_size})" + f'min_part_size value ({self.min_part_size}) exceeding recommended_upload_part_size value ({self.recommended_upload_part_size})' ) if self.recommended_upload_part_size > self.max_part_size: raise InvalidUserInput( - f"recommended_upload_part_size value ({self.recommended_upload_part_size}) exceeding max_part_size value ({self.max_part_size})" + f'recommended_upload_part_size value ({self.recommended_upload_part_size}) exceeding max_part_size value ({self.max_part_size})' ) @classmethod @@ -126,7 +131,7 @@ def from_account_info( account_info: AbstractAccountInfo, min_part_size=None, recommended_upload_part_size=None, - max_part_size=None + max_part_size=None, ): if recommended_upload_part_size is None: recommended_upload_part_size = account_info.get_recommended_part_size() @@ -156,10 +161,12 @@ def get_emerge_plan(self, write_intents): min( ceil(1.5 * max_destination_offset / 10000), self.max_part_size, - ) + ), ) assert self.min_part_size <= self.recommended_upload_part_size <= self.max_part_size, ( - self.min_part_size, self.recommended_upload_part_size, self.max_part_size + self.min_part_size, + self.recommended_upload_part_size, + self.max_part_size, ) return self._get_emerge_plan(write_intents, EmergePlan) @@ -296,7 +303,7 @@ def _get_emerge_parts(self, intent_fragments_iterator): yield self._get_upload_part(upload_buffer_part) def _get_upload_part(self, upload_buffer): - """ Build emerge part from upload buffer. """ + """Build emerge part from upload buffer.""" if upload_buffer.intent_count() == 1 and upload_buffer.get_intent(0).is_upload(): intent = upload_buffer.get_intent(0) relative_offset = upload_buffer.start_offset - intent.destination_offset @@ -320,7 +327,7 @@ def _get_upload_part(self, upload_buffer): return EmergePart(definition) def _get_copy_parts(self, copy_intent, start_offset, end_offset): - """ Split copy intent to emerge parts. """ + """Split copy intent to emerge parts.""" fragment_length = end_offset - start_offset part_count = int(fragment_length / self.max_part_size) last_part_length = fragment_length % self.max_part_size @@ -347,7 +354,7 @@ def _get_copy_parts(self, copy_intent, start_offset, end_offset): relative_offset += part_size def _buff_split(self, upload_buffer): - """ Split upload buffer to parts candidates - smaller upload buffers. + """Split upload buffer to parts candidates - smaller upload buffers. :rtype iterator[b2sdk._internal.transfer.emerge.planner.planner.UploadBuffer]: """ @@ -365,7 +372,7 @@ def _buff_split(self, upload_buffer): yield head_buff def _buff_partition(self, upload_buffer): - """ Split upload buffer to two parts (smaller upload buffers). + """Split upload buffer to two parts (smaller upload buffers). In result left part cannot be split more, and nothing can be assumed about right part. @@ -390,7 +397,7 @@ def _buff_partition(self, upload_buffer): return left_buff, UploadBuffer(left_buff.end_offset) def _select_intent_fragments(self, write_intent_iterator): - """ Select overlapping write intent fragments to use. + """Select overlapping write intent fragments to use. To solve overlapping intents selection, intents can be split to smaller fragments. Those fragments are yielded as soon as decision can be made to use them, @@ -439,7 +446,10 @@ def _select_intent_fragments(self, write_intent_iterator): ) if incoming_intent is None: - yield None, None # lets yield sentinel for cleaner `_get_emerge_parts` implementation + yield ( + None, + None, + ) # lets yield sentinel for cleaner `_get_emerge_parts` implementation return if incoming_intent.is_upload(): upload_intents_state.add(incoming_intent) @@ -449,7 +459,7 @@ def _select_intent_fragments(self, write_intent_iterator): raise RuntimeError('This should not happen at all!') def _merge_intent_fragments(self, start_offset, upload_intents, copy_intents): - """ Select "competing" upload and copy fragments. + """Select "competing" upload and copy fragments. Upload and copy fragments may overlap so we need to choose right one to use - copy fragments are prioritized unless this fragment is unprotected @@ -486,7 +496,7 @@ def _merge_intent_fragments(self, start_offset, upload_intents, copy_intents): return def _validatation_iterator(self, write_intents): - """ Iterate over write intents and validate length and order. """ + """Iterate over write intents and validate length and order.""" last_offset = 0 for write_intent in write_intents: if write_intent.length is None: @@ -498,7 +508,7 @@ def _validatation_iterator(self, write_intents): class IntentsState: - """ Store and process state of incoming write intents to solve + """Store and process state of incoming write intents to solve overlapping intents selection in streaming manner. It does not check if intents are of the same kind (upload/copy), but the intention @@ -520,7 +530,7 @@ def __init__(self, protected_intent_length=0): self._next_intent_end = None def add(self, incoming_intent): - """ Add incoming intent to state. + """Add incoming intent to state. It has to called *after* ``IntentsState.state_update`` but it is not verified. """ @@ -536,7 +546,7 @@ def add(self, incoming_intent): self._set_next_intent(incoming_intent) def state_update(self, last_sent_offset, incoming_offset): - """ Update the state using incoming intent offset. + """Update the state using incoming intent offset. It has to be called *before* ``IntentsState.add`` and even if incoming intent would not be added to this intents state. It would yield a state of this stream @@ -561,9 +571,11 @@ def state_update(self, last_sent_offset, incoming_offset): return if ( - self._current_intent is None and self._next_intent is not None and ( - self._next_intent.destination_offset != effective_incoming_offset or - incoming_offset is None + self._current_intent is None + and self._next_intent is not None + and ( + self._next_intent.destination_offset != effective_incoming_offset + or incoming_offset is None ) ): self._set_current_intent(self._next_intent, last_sent_offset) @@ -571,8 +583,9 @@ def state_update(self, last_sent_offset, incoming_offset): # current and next can be both not None at this point only if they overlap if ( - self._current_intent is not None and self._next_intent is not None and - effective_incoming_offset > self._current_intent_end + self._current_intent is not None + and self._next_intent is not None + and effective_incoming_offset > self._current_intent_end ): # incoming intent does not overlap with current intent # so we switch to next because we are sure that we will have to use it anyway @@ -619,7 +632,7 @@ def _set_next_intent(self, intent): self._next_intent_end = None def _is_current_intent_protected(self): - """ States if current intent is protected. + """States if current intent is protected. Intent can be split to smaller fragments, but to choose upload over "small copy" we need to know for fragment if it is a "small copy" or not. In result of solving diff --git a/b2sdk/_internal/transfer/emerge/planner/upload_subpart.py b/b2sdk/_internal/transfer/emerge/planner/upload_subpart.py index cc2bf534d..218ff446d 100644 --- a/b2sdk/_internal/transfer/emerge/planner/upload_subpart.py +++ b/b2sdk/_internal/transfer/emerge/planner/upload_subpart.py @@ -26,13 +26,8 @@ def __init__(self, outbound_source, relative_offset, length): def __repr__(self): return ( - '<{classname} outbound_source={outbound_source} relative_offset={relative_offset} ' - 'length={length}>' - ).format( - classname=self.__class__.__name__, - outbound_source=repr(self.outbound_source), - relative_offset=self.relative_offset, - length=self.length, + f'<{self.__class__.__name__} outbound_source={repr(self.outbound_source)} relative_offset={self.relative_offset} ' + f'length={self.length}>' ) @abstractmethod diff --git a/b2sdk/_internal/transfer/emerge/unbound_write_intent.py b/b2sdk/_internal/transfer/emerge/unbound_write_intent.py index 306202a6a..68f1e9fd2 100644 --- a/b2sdk/_internal/transfer/emerge/unbound_write_intent.py +++ b/b2sdk/_internal/transfer/emerge/unbound_write_intent.py @@ -125,7 +125,12 @@ def __init__( :param queue_timeout_seconds: Iterator will wait at most this many seconds for an empty slot for a buffer. After that time it's considered an error. """ - assert queue_size >= 1 and read_size > 0 and buffer_size_bytes > 0 and queue_timeout_seconds > 0.0 + assert ( + queue_size >= 1 + and read_size > 0 + and buffer_size_bytes > 0 + and queue_timeout_seconds > 0.0 + ) self.read_only_source = read_only_source self.read_size = read_size diff --git a/b2sdk/_internal/transfer/emerge/write_intent.py b/b2sdk/_internal/transfer/emerge/write_intent.py index 6d8d4835b..99df58f4d 100644 --- a/b2sdk/_internal/transfer/emerge/write_intent.py +++ b/b2sdk/_internal/transfer/emerge/write_intent.py @@ -13,7 +13,7 @@ class WriteIntent: - """ Wrapper for outbound source that defines destination offset. """ + """Wrapper for outbound source that defines destination offset.""" def __init__(self, outbound_source, destination_offset=0): """ @@ -33,7 +33,7 @@ def __repr__(self): @property def length(self): - """ Length of the write intent. + """Length of the write intent. :rtype: int """ @@ -41,21 +41,21 @@ def length(self): @property def destination_end_offset(self): - """ Offset of source end in destination file. + """Offset of source end in destination file. :rtype: int """ return self.destination_offset + self.length def is_copy(self): - """ States if outbound source is remote source and requires copying. + """States if outbound source is remote source and requires copying. :rtype: bool """ return self.outbound_source.is_copy() def is_upload(self): - """ States if outbound source is local source and requires uploading. + """States if outbound source is local source and requires uploading. :rtype: bool """ @@ -76,7 +76,7 @@ def get_content_sha1(self) -> Sha1HexDigest | None: @classmethod def wrap_sources_iterator(cls, outbound_sources_iterator): - """ Helper that wraps outbound sources iterator with write intents. + """Helper that wraps outbound sources iterator with write intents. Can be used in cases similar to concatenate to automatically compute destination offsets diff --git a/b2sdk/_internal/transfer/inbound/download_manager.py b/b2sdk/_internal/transfer/inbound/download_manager.py index 0b21e4821..14909f38c 100644 --- a/b2sdk/_internal/transfer/inbound/download_manager.py +++ b/b2sdk/_internal/transfer/inbound/download_manager.py @@ -48,7 +48,7 @@ def __init__( write_buffer_size: int | None = None, check_hash: bool = True, max_download_streams_per_file: int | None = None, - **kwargs + **kwargs, ): """ Initialize the DownloadManager using the given services object. diff --git a/b2sdk/_internal/transfer/inbound/downloaded_file.py b/b2sdk/_internal/transfer/inbound/downloaded_file.py index ec23df13c..7242b19df 100644 --- a/b2sdk/_internal/transfer/inbound/downloaded_file.py +++ b/b2sdk/_internal/transfer/inbound/downloaded_file.py @@ -170,15 +170,19 @@ def __init__( self.check_hash = check_hash def _validate_download(self, bytes_read, actual_sha1): - if self.download_version.content_encoding is not None and self.download_version.api.api_config.decode_content: + if ( + self.download_version.content_encoding is not None + and self.download_version.api.api_config.decode_content + ): return if self.range_ is None: if bytes_read != self.download_version.content_length: raise TruncatedOutput(bytes_read, self.download_version.content_length) if ( - self.check_hash and self.download_version.content_sha1 != 'none' and - actual_sha1 != self.download_version.content_sha1 + self.check_hash + and self.download_version.content_sha1 != 'none' + and actual_sha1 != self.download_version.content_sha1 ): raise ChecksumMismatch( checksum_type='sha1', @@ -257,7 +261,8 @@ def save_to( if is_stdout and _IS_WINDOWS: if self.write_buffer_size and self.write_buffer_size not in ( - -1, io.DEFAULT_BUFFER_SIZE + -1, + io.DEFAULT_BUFFER_SIZE, ): logger.warning( 'Unable to set arbitrary write_buffer_size for stdout on Windows' diff --git a/b2sdk/_internal/transfer/inbound/downloader/abstract.py b/b2sdk/_internal/transfer/inbound/downloader/abstract.py index 35601b8b8..86c71d22d 100644 --- a/b2sdk/_internal/transfer/inbound/downloader/abstract.py +++ b/b2sdk/_internal/transfer/inbound/downloader/abstract.py @@ -64,20 +64,23 @@ def __init__( max_chunk_size: int | None = None, align_factor: int | None = None, check_hash: bool = True, - **kwargs + **kwargs, ): align_factor = align_factor or self.DEFAULT_ALIGN_FACTOR assert force_chunk_size is not None or ( - min_chunk_size is not None and max_chunk_size is not None and - 0 < min_chunk_size <= max_chunk_size and max_chunk_size >= align_factor + min_chunk_size is not None + and max_chunk_size is not None + and 0 < min_chunk_size <= max_chunk_size + and max_chunk_size >= align_factor ) self._min_chunk_size = min_chunk_size self._max_chunk_size = max_chunk_size self._forced_chunk_size = force_chunk_size self._align_factor = align_factor self._check_hash = check_hash - self._thread_pool = thread_pool if thread_pool is not None \ - else self.DEFAULT_THREAD_POOL_CLASS() + self._thread_pool = ( + thread_pool if thread_pool is not None else self.DEFAULT_THREAD_POOL_CLASS() + ) super().__init__(**kwargs) def _get_hasher(self): @@ -113,7 +116,11 @@ def is_suitable(self, download_version: DownloadVersion, allow_seeking: bool): """ if self.REQUIRES_SEEKING and not allow_seeking: return False - if not self.SUPPORTS_DECODE_CONTENT and download_version.content_encoding and download_version.api.api_config.decode_content: + if ( + not self.SUPPORTS_DECODE_CONTENT + and download_version.content_encoding + and download_version.api.api_config.decode_content + ): return False return True diff --git a/b2sdk/_internal/transfer/inbound/downloader/parallel.py b/b2sdk/_internal/transfer/inbound/downloader/parallel.py index cd3f0e74f..a3ae8c0f5 100644 --- a/b2sdk/_internal/transfer/inbound/downloader/parallel.py +++ b/b2sdk/_internal/transfer/inbound/downloader/parallel.py @@ -39,6 +39,7 @@ class ParallelDownloader(AbstractDownloader): Each part is downloaded by its own thread, while all writes are done by additional dedicated thread. This can increase performance even for a small file, as fetching & writing can be done in parallel. """ + # situations to consider: # # local file start local file end @@ -151,7 +152,7 @@ def _finish_hashing(self, first_part, file, hasher, content_length): if not data: break if current_offset + len(data) >= last_offset: - to_hash = data[:last_offset - current_offset] + to_hash = data[: last_offset - current_offset] stop = True else: to_hash = data @@ -161,8 +162,15 @@ def _finish_hashing(self, first_part, file, hasher, content_length): break def _get_parts( - self, response, session, writer, hasher, first_part, parts_to_download, chunk_size, - encryption + self, + response, + session, + writer, + hasher, + first_part, + parts_to_download, + chunk_size, + encryption, ): stream = self._thread_pool.submit( download_first_part, @@ -200,7 +208,7 @@ def _get_parts( for stream in streams_futures.done: stream.result() except Exception: - if platform.python_implementation() == "PyPy": + if platform.python_implementation() == 'PyPy': # Await all threads to avoid PyPy hanging bug. # https://github.com/pypy/pypy/issues/4994#issuecomment-2258962665 futures.wait(streams_futures.not_done) @@ -354,7 +362,7 @@ def download_first_part( predicted_bytes_read = bytes_read + len(data) if predicted_bytes_read > actual_part_size: - to_write = data[:actual_part_size - bytes_read] + to_write = data[: actual_part_size - bytes_read] part_not_completed = False else: to_write = data @@ -375,7 +383,11 @@ def download_first_part( cloud_range = starting_cloud_range.subrange(bytes_read, actual_part_size - 1) logger.debug( 'download part %s %s attempt: %i, bytes read already: %i. Getting range %s now.', - url, part_to_download, attempt, bytes_read, cloud_range + url, + part_to_download, + attempt, + bytes_read, + cloud_range, ) try: with session.download_file_from_url( @@ -403,8 +415,11 @@ def download_first_part( should_retry = e.should_retry_http() if isinstance(e, B2Error) else True if should_retry and attempt < max_attempts: logger.debug( - 'Download of %s %s attempt %d failed with %s, retrying', url, - part_to_download, attempt, e + 'Download of %s %s attempt %d failed with %s, retrying', + url, + part_to_download, + attempt, + e, ) else: raise @@ -413,8 +428,12 @@ def download_first_part( if bytes_read != actual_part_size: logger.error( - "Failed to download %s %s; Downloaded %d/%d after %d attempts", url, part_to_download, - bytes_read, actual_part_size, attempt + 'Failed to download %s %s; Downloaded %d/%d after %d attempts', + url, + part_to_download, + bytes_read, + actual_part_size, + attempt, ) raise TruncatedOutput( bytes_read=bytes_read, @@ -422,8 +441,12 @@ def download_first_part( ) else: logger.debug( - "Successfully downloaded %s %s; Downloaded %d/%d after %d attempts", url, - part_to_download, bytes_read, actual_part_size, attempt + 'Successfully downloaded %s %s; Downloaded %d/%d after %d attempts', + url, + part_to_download, + bytes_read, + actual_part_size, + attempt, ) @@ -462,8 +485,12 @@ def download_non_first_part( attempt += 1 cloud_range = starting_cloud_range.subrange(bytes_read, actual_part_size - 1) logger.debug( - 'download part %s %s attempt: %i, bytes read already: %i. Getting range %s now.', url, - part_to_download, attempt, bytes_read, cloud_range + 'download part %s %s attempt: %i, bytes read already: %i. Getting range %s now.', + url, + part_to_download, + attempt, + bytes_read, + cloud_range, ) with stats_collector.total: @@ -490,8 +517,11 @@ def download_non_first_part( should_retry = e.should_retry_http() if isinstance(e, B2Error) else True if should_retry and attempt < max_attempts: logger.debug( - 'Download of %s %s attempt %d failed with %s, retrying', url, - part_to_download, attempt, e + 'Download of %s %s attempt %d failed with %s, retrying', + url, + part_to_download, + attempt, + e, ) else: raise @@ -500,8 +530,12 @@ def download_non_first_part( if bytes_read != actual_part_size: logger.error( - "Failed to download %s %s; Downloaded %d/%d after %d attempts", url, part_to_download, - bytes_read, actual_part_size, attempt + 'Failed to download %s %s; Downloaded %d/%d after %d attempts', + url, + part_to_download, + bytes_read, + actual_part_size, + attempt, ) raise TruncatedOutput( bytes_read=bytes_read, @@ -509,8 +543,12 @@ def download_non_first_part( ) else: logger.debug( - "Successfully downloaded %s %s; Downloaded %d/%d after %d attempts", url, - part_to_download, bytes_read, actual_part_size, attempt + 'Successfully downloaded %s %s; Downloaded %d/%d after %d attempts', + url, + part_to_download, + bytes_read, + actual_part_size, + attempt, ) diff --git a/b2sdk/_internal/transfer/inbound/downloader/simple.py b/b2sdk/_internal/transfer/inbound/downloader/simple.py index a0597de27..87fabac05 100644 --- a/b2sdk/_internal/transfer/inbound/downloader/simple.py +++ b/b2sdk/_internal/transfer/inbound/downloader/simple.py @@ -24,7 +24,6 @@ class SimpleDownloader(AbstractDownloader): - REQUIRES_SEEKING = False SUPPORTS_DECODE_CONTENT = True @@ -51,7 +50,9 @@ def _download( bytes_read = response.raw.tell() response.close() - assert actual_size >= 1 # code below does `actual_size - 1`, but it should never reach that part with an empty file + assert ( + actual_size >= 1 + ) # code below does `actual_size - 1`, but it should never reach that part with an empty file # now, normally bytes_read == download_version.content_length, but sometimes there is a timeout # or something and the server closes connection, while neither tcp or http have a problem @@ -68,7 +69,10 @@ def _download( # but this is a very rare case and so it is not worth the optimization logger.debug( 're-download attempts remaining: %i, bytes read: %i (decoded: %i). Getting range %s now.', - retries_left, bytes_read, decoded_bytes_read, new_range + retries_left, + bytes_read, + decoded_bytes_read, + new_range, ) with session.download_file_from_url( response.request.url, diff --git a/b2sdk/_internal/transfer/inbound/downloader/stats_collector.py b/b2sdk/_internal/transfer/inbound/downloader/stats_collector.py index ba960d8de..e47262036 100644 --- a/b2sdk/_internal/transfer/inbound/downloader/stats_collector.py +++ b/b2sdk/_internal/transfer/inbound/downloader/stats_collector.py @@ -66,8 +66,9 @@ def report(self): if self.read.has_any_entry: logger.info('download stats | %s | TTFB: %.3f ms', self, self.read.latest_ms) logger.info( - 'download stats | %s | read() without TTFB: %.3f ms', self, - (self.read.sum_of_all_entries - self.read.latest_entry) / self.read.TO_MS + 'download stats | %s | read() without TTFB: %.3f ms', + self, + (self.read.sum_of_all_entries - self.read.latest_entry) / self.read.TO_MS, ) if self.other.has_any_entry: logger.info( @@ -76,9 +77,11 @@ def report(self): if self.write.has_any_entry: logger.info('download stats | %s | write() total: %.3f ms', self, self.write.sum_ms) if self.total.has_any_entry: - basic_operation_time = self.write.sum_of_all_entries \ - + self.other.sum_of_all_entries \ - + self.read.sum_of_all_entries + basic_operation_time = ( + self.write.sum_of_all_entries + + self.other.sum_of_all_entries + + self.read.sum_of_all_entries + ) overhead = self.total.sum_of_all_entries - basic_operation_time logger.info( 'download stats | %s | overhead: %.3f ms', self, overhead / self.total.TO_MS diff --git a/b2sdk/_internal/transfer/outbound/copy_manager.py b/b2sdk/_internal/transfer/outbound/copy_manager.py index b027b2b56..1eb7b2e33 100644 --- a/b2sdk/_internal/transfer/outbound/copy_manager.py +++ b/b2sdk/_internal/transfer/outbound/copy_manager.py @@ -110,7 +110,10 @@ def _copy_part( (``None`` if unknown) """ # b2_copy_part doesn't need SSE-B2. Large file encryption is decided on b2_start_large_file. - if destination_encryption is not None and destination_encryption.mode == EncryptionMode.SSE_B2: + if ( + destination_encryption is not None + and destination_encryption.mode == EncryptionMode.SSE_B2 + ): destination_encryption = None # Check if this part was uploaded before @@ -215,12 +218,18 @@ def establish_sse_c_file_metadata( source_key_id = None destination_key_id = None - if destination_server_side_encryption is not None and destination_server_side_encryption.key is not None and \ - destination_server_side_encryption.key.key_id is not None: + if ( + destination_server_side_encryption is not None + and destination_server_side_encryption.key is not None + and destination_server_side_encryption.key.key_id is not None + ): destination_key_id = destination_server_side_encryption.key.key_id - if source_server_side_encryption is not None and source_server_side_encryption.key is not None and \ - source_server_side_encryption.key.key_id is not None: + if ( + source_server_side_encryption is not None + and source_server_side_encryption.key is not None + and source_server_side_encryption.key.key_id is not None + ): source_key_id = source_server_side_encryption.key.key_id if source_key_id == destination_key_id: @@ -228,11 +237,9 @@ def establish_sse_c_file_metadata( if source_file_info is None or source_content_type is None: raise SSECKeyIdMismatchInCopy( - 'attempting to copy file using {} without providing source_file_info ' - 'and source_content_type for differing sse_c_key_ids: source="{}", ' - 'destination="{}"'.format( - MetadataDirectiveMode.COPY, source_key_id, destination_key_id - ) + f'attempting to copy file using {MetadataDirectiveMode.COPY} without providing source_file_info ' + f'and source_content_type for differing sse_c_key_ids: source="{source_key_id}", ' + f'destination="{destination_key_id}"' ) destination_file_info = source_file_info.copy() diff --git a/b2sdk/_internal/transfer/outbound/copy_source.py b/b2sdk/_internal/transfer/outbound/copy_source.py index 9bf960c36..f65005351 100644 --- a/b2sdk/_internal/transfer/outbound/copy_source.py +++ b/b2sdk/_internal/transfer/outbound/copy_source.py @@ -35,17 +35,8 @@ def __init__( def __repr__(self): return ( - '<{classname} file_id={file_id} offset={offset} length={length} id={id}, encryption={encryption},' - 'source_content_type={source_content_type}>, source_file_info={source_file_info}' - ).format( - classname=self.__class__.__name__, - file_id=self.file_id, - offset=self.offset, - length=self.length, - id=id(self), - encryption=self.encryption, - source_content_type=self.source_content_type, - source_file_info=self.source_file_info, + f'<{self.__class__.__name__} file_id={self.file_id} offset={self.offset} length={self.length} id={id(self)}, encryption={self.encryption},' + f'source_content_type={self.source_content_type}>, source_file_info={self.source_file_info}' ) def get_content_length(self): @@ -78,7 +69,7 @@ def get_copy_source_range(self, relative_offset, range_length): range_length, encryption=self.encryption, source_file_info=self.source_file_info, - source_content_type=self.source_content_type + source_content_type=self.source_content_type, ) def get_content_sha1(self): diff --git a/b2sdk/_internal/transfer/outbound/outbound_source.py b/b2sdk/_internal/transfer/outbound/outbound_source.py index bf4b5573d..a350c626f 100644 --- a/b2sdk/_internal/transfer/outbound/outbound_source.py +++ b/b2sdk/_internal/transfer/outbound/outbound_source.py @@ -15,7 +15,7 @@ class OutboundTransferSource(metaclass=ABCMeta): - """ Abstract class for defining outbound transfer sources. + """Abstract class for defining outbound transfer sources. Supported outbound transfer sources are: diff --git a/b2sdk/_internal/transfer/outbound/upload_manager.py b/b2sdk/_internal/transfer/outbound/upload_manager.py index 1005634e5..b6c8dabd0 100644 --- a/b2sdk/_internal/transfer/outbound/upload_manager.py +++ b/b2sdk/_internal/transfer/outbound/upload_manager.py @@ -33,7 +33,7 @@ if TYPE_CHECKING: from b2sdk._internal.transfer.outbound.upload_source import AbstractUploadSource - _TypeUploadSource = TypeVar("_TypeUploadSource", bound=AbstractUploadSource) + _TypeUploadSource = TypeVar('_TypeUploadSource', bound=AbstractUploadSource) class UploadManager(TransferManager, ThreadPoolMixin): @@ -231,8 +231,9 @@ def _upload_small_file( ) if content_sha1 == HEX_DIGITS_AT_END: content_sha1 = input_stream.hash - assert content_sha1 == 'do_not_verify' or content_sha1 == response[ - 'contentSha1'], '{} != {}'.format(content_sha1, response['contentSha1']) + assert ( + content_sha1 == 'do_not_verify' or content_sha1 == response['contentSha1'] + ), '{} != {}'.format(content_sha1, response['contentSha1']) return self.services.api.file_version_factory.from_api_response(response) except B2Error as e: diff --git a/b2sdk/_internal/transfer/outbound/upload_source.py b/b2sdk/_internal/transfer/outbound/upload_source.py index c10a38fb1..647336a21 100644 --- a/b2sdk/_internal/transfer/outbound/upload_source.py +++ b/b2sdk/_internal/transfer/outbound/upload_source.py @@ -35,7 +35,8 @@ @unique class UploadMode(Enum): - """ Mode of file uploads """ + """Mode of file uploads""" + FULL = auto() #: always upload the whole file INCREMENTAL = auto() #: use incremental uploads when possible @@ -90,8 +91,9 @@ def __init__( def __repr__(self) -> str: return '<{classname} data={data} id={id}>'.format( classname=self.__class__.__name__, - data=str(self.data_bytes[:20]) + - '...' if len(self.data_bytes) > 20 else self.data_bytes, + data=str(self.data_bytes[:20]) + '...' + if len(self.data_bytes) > 20 + else self.data_bytes, id=id(self), ) @@ -134,14 +136,8 @@ def check_path_and_get_size(self) -> None: def __repr__(self) -> str: return ( - '<{classname} local_path={local_path} content_length={content_length} ' - 'content_sha1={content_sha1} id={id}>' - ).format( - classname=self.__class__.__name__, - local_path=self.local_path, - content_length=self.content_length, - content_sha1=self.content_sha1, - id=id(self), + f'<{self.__class__.__name__} local_path={self.local_path} content_length={self.content_length} ' + f'content_sha1={self.content_sha1} id={id(self)}>' ) def get_content_length(self) -> int: @@ -215,7 +211,7 @@ def get_incremental_sources( if not file_version: logger.debug( - "Fallback to full upload for %s -- no matching file on server", self.local_path + 'Fallback to full upload for %s -- no matching file on server', self.local_path ) return [self] @@ -223,15 +219,16 @@ def get_incremental_sources( if file_version.size < min_part_size: # existing file size below minimal large file part size logger.debug( - "Fallback to full upload for %s -- remote file is smaller than %i bytes", - self.local_path, min_part_size + 'Fallback to full upload for %s -- remote file is smaller than %i bytes', + self.local_path, + min_part_size, ) return [self] if self.get_content_length() < file_version.size: logger.debug( - "Fallback to full upload for %s -- local file is smaller than remote", - self.local_path + 'Fallback to full upload for %s -- local file is smaller than remote', + self.local_path, ) return [self] @@ -239,8 +236,8 @@ def get_incremental_sources( if not content_sha1: logger.debug( - "Fallback to full upload for %s -- remote file content SHA1 unknown", - self.local_path + 'Fallback to full upload for %s -- remote file content SHA1 unknown', + self.local_path, ) return [self] @@ -251,14 +248,14 @@ def get_incremental_sources( hex_digest = digester.update_from_stream(file_version.size) if hex_digest != content_sha1: logger.debug( - "Fallback to full upload for %s -- content in common range differs", + 'Fallback to full upload for %s -- content in common range differs', self.local_path, ) # Calculate SHA1 of the remainder of the file and set it. self.content_sha1 = digester.update_from_stream() return [self] - logger.debug("Incremental upload of %s is possible.", self.local_path) + logger.debug('Incremental upload of %s is possible.', self.local_path) if file_version.server_side_encryption and file_version.server_side_encryption.is_unknown(): source_encryption = None @@ -301,14 +298,8 @@ def __init__( def __repr__(self) -> str: return ( - '<{classname} stream_opener={stream_opener} content_length={content_length} ' - 'content_sha1={content_sha1} id={id}>' - ).format( - classname=self.__class__.__name__, - stream_opener=repr(self.stream_opener), - content_length=self._content_length, - content_sha1=self._content_sha1, - id=id(self), + f'<{self.__class__.__name__} stream_opener={repr(self.stream_opener)} content_length={self._content_length} ' + f'content_sha1={self._content_sha1} id={id(self)}>' ) def get_content_length(self) -> int: @@ -360,15 +351,8 @@ def __init__( def __repr__(self) -> str: return ( - '<{classname} stream_opener={stream_opener} offset={offset} ' - 'content_length={content_length} content_sha1={content_sha1} id={id}>' - ).format( - classname=self.__class__.__name__, - stream_opener=repr(self.stream_opener), - offset=self._offset, - content_length=self._content_length, - content_sha1=self._content_sha1, - id=id(self), + f'<{self.__class__.__name__} stream_opener={repr(self.stream_opener)} offset={self._offset} ' + f'content_length={self._content_length} content_sha1={self._content_sha1} id={id(self)}>' ) def open(self): diff --git a/b2sdk/_internal/types.py b/b2sdk/_internal/types.py index e25bacbb8..5e3dbbd3d 100644 --- a/b2sdk/_internal/types.py +++ b/b2sdk/_internal/types.py @@ -12,6 +12,7 @@ We use this module to support pydantic-less installs, as well as native typing module us on newer python versions. """ + import sys from annotated_types import Ge @@ -22,16 +23,16 @@ from typing import Annotated, NotRequired, TypedDict __all__ = [ # prevents linter from removing "unused imports" which we want to export - "NotRequired", - "PositiveInt", - "TypedDict", - "pydantic", + 'NotRequired', + 'PositiveInt', + 'TypedDict', + 'pydantic', ] try: import pydantic - if getattr(pydantic, "__version__", "") < "2": + if getattr(pydantic, '__version__', '') < '2': raise ImportError if sys.version_info < (3, 10): # https://github.com/pydantic/pydantic/issues/7873 diff --git a/b2sdk/_internal/utils/__init__.py b/b2sdk/_internal/utils/__init__.py index f4ae98ceb..ed409a188 100644 --- a/b2sdk/_internal/utils/__init__.py +++ b/b2sdk/_internal/utils/__init__.py @@ -23,7 +23,13 @@ from typing import Any, Iterator, NewType, TypeVar from urllib.parse import quote, unquote_plus -from logfury.v1 import DefaultTraceAbstractMeta, DefaultTraceMeta, limit_trace_arguments, disable_trace, trace_call +from logfury.v1 import ( + DefaultTraceAbstractMeta, + DefaultTraceMeta, + limit_trace_arguments, + disable_trace, + trace_call, +) logger = logging.getLogger(__name__) @@ -142,6 +148,7 @@ class IncrementalHexDigester: """ Calculates digest of a stream or parts of it. """ + stream: ReadOnlyStream digest: 'hashlib._Hash' = field( # noqa (_Hash is a dynamic object) default_factory=hashlib.sha1 @@ -247,7 +254,7 @@ def validate_b2_file_name(name): if '//' in name: raise ValueError("file names must not contain '//'") if chr(127) in name: - raise ValueError("file names must not contain DEL") + raise ValueError('file names must not contain DEL') if any(250 < len(segment) for segment in name_utf8.split(b'/')): raise ValueError("file names segments (between '/') can be at most 250 utf-8 bytes") @@ -273,8 +280,10 @@ def is_special_file(path: str | pathlib.Path) -> bool: """ path_str = str(path) return ( - path == os.devnull or path_str.startswith('/dev/') or - platform.system() == 'Windows' and path_str.upper() in ('CON', 'NUL') + path == os.devnull + or path_str.startswith('/dev/') + or platform.system() == 'Windows' + and path_str.upper() in ('CON', 'NUL') ) @@ -430,6 +439,7 @@ class B2TraceMeta(DefaultTraceMeta): """ Trace all public method calls, except for ones with names that begin with `get_`. """ + pass @@ -438,6 +448,7 @@ class B2TraceMetaAbstract(DefaultTraceAbstractMeta): Default class for tracers, to be set as a metaclass for abstract base classes. """ + pass @@ -458,6 +469,7 @@ def __init__(self, lock, token): def __enter__(self): if not self.lock.acquire(False): from b2sdk._internal.exception import UploadTokenUsedConcurrently + raise UploadTokenUsedConcurrently(self.token) def __exit__(self, exc_type, exc_val, exc_tb): diff --git a/b2sdk/_internal/utils/docs.py b/b2sdk/_internal/utils/docs.py index d860d2eb4..79fa1203c 100644 --- a/b2sdk/_internal/utils/docs.py +++ b/b2sdk/_internal/utils/docs.py @@ -49,7 +49,7 @@ def _extract_restructedtext_links(docstring: str) -> dict[str, str]: for line in docstring.splitlines(): line = line.strip() if line.startswith(_rest_link_prefix): - name, url = line[len(_rest_link_prefix):].split(': ', 1) + name, url = line[len(_rest_link_prefix) :].split(': ', 1) if name and url: links[name] = url return links diff --git a/b2sdk/_internal/utils/escape.py b/b2sdk/_internal/utils/escape.py index d988350a5..58dc4d253 100644 --- a/b2sdk/_internal/utils/escape.py +++ b/b2sdk/_internal/utils/escape.py @@ -26,7 +26,7 @@ def unprintable_to_hex(s: str) -> str: """ def hexify(match): - return rf"\x{ord(match.group()):02x}" + return rf'\x{ord(match.group()):02x}' if s: return UNPRINTABLE_PATTERN.sub(hexify, s) @@ -52,5 +52,5 @@ def substitute_control_chars(s: str) -> tuple[str, bool]: :param s: an arbitrary string, possibly with unprintable characters. :return: tuple of the string with � replacements made and boolean indicated if chars were replaced """ - new_value = UNPRINTABLE_PATTERN.sub("�", s) + new_value = UNPRINTABLE_PATTERN.sub('�', s) return new_value, new_value != s diff --git a/b2sdk/_internal/utils/filesystem.py b/b2sdk/_internal/utils/filesystem.py index eaf543659..1a7c26548 100644 --- a/b2sdk/_internal/utils/filesystem.py +++ b/b2sdk/_internal/utils/filesystem.py @@ -11,20 +11,19 @@ import platform import stat -_IS_WINDOWS = platform.system() == "Windows" +_IS_WINDOWS = platform.system() == 'Windows' def points_to_fifo(path: pathlib.Path) -> bool: """Check if the path points to a fifo.""" path = path.resolve() try: - return stat.S_ISFIFO(path.stat().st_mode) except OSError: return False -_STDOUT_FILENAME = "CON" if _IS_WINDOWS else "/dev/stdout" +_STDOUT_FILENAME = 'CON' if _IS_WINDOWS else '/dev/stdout' STDOUT_FILEPATH = pathlib.Path(_STDOUT_FILENAME) diff --git a/b2sdk/_internal/utils/range_.py b/b2sdk/_internal/utils/range_.py index e8bafa600..cf1d39b2a 100644 --- a/b2sdk/_internal/utils/range_.py +++ b/b2sdk/_internal/utils/range_.py @@ -22,6 +22,7 @@ class Range: """ HTTP ranges use an *inclusive* index at the end. """ + __slots__ = ['start', 'end'] start: int diff --git a/b2sdk/_internal/utils/thread_pool.py b/b2sdk/_internal/utils/thread_pool.py index d169eb7de..331c3b715 100644 --- a/b2sdk/_internal/utils/thread_pool.py +++ b/b2sdk/_internal/utils/thread_pool.py @@ -22,8 +22,7 @@ class DynamicThreadPoolExecutorProtocol(Protocol): - def submit(self, fn: Callable, *args, **kwargs) -> Future: - ... + def submit(self, fn: Callable, *args, **kwargs) -> Future: ... def set_size(self, max_workers: int) -> None: """Set the size of the thread pool.""" @@ -94,7 +93,8 @@ def __init__( """ self._thread_pool = ( thread_pool - if thread_pool is not None else self.DEFAULT_THREAD_POOL_CLASS(max_workers=max_workers) + if thread_pool is not None + else self.DEFAULT_THREAD_POOL_CLASS(max_workers=max_workers) ) self._max_workers = max_workers super().__init__(**kwargs) diff --git a/b2sdk/_internal/utils/typing.py b/b2sdk/_internal/utils/typing.py index 252cb95a9..f07126d5a 100644 --- a/b2sdk/_internal/utils/typing.py +++ b/b2sdk/_internal/utils/typing.py @@ -16,4 +16,4 @@ except ImportError: from typing import TypeAlias -JSON: TypeAlias = Union[Dict[str, "JSON"], List["JSON"], str, int, float, bool, None] +JSON: TypeAlias = Union[Dict[str, 'JSON'], List['JSON'], str, int, float, bool, None] diff --git a/b2sdk/_internal/version_utils.py b/b2sdk/_internal/version_utils.py index 582bad8de..b7d495b45 100644 --- a/b2sdk/_internal/version_utils.py +++ b/b2sdk/_internal/version_utils.py @@ -43,8 +43,8 @@ def __lt__(self, other): @classmethod def _parse_version(cls, version: str) -> tuple[int, ...]: - if "!" in version: # strip PEP 440 epoch - version = version.split("!", 1)[1] + if '!' in version: # strip PEP 440 epoch + version = version.split('!', 1)[1] return tuple(map(int, re.findall(r'\d+', version))) @@ -79,13 +79,9 @@ def __call__(self, func): The actual implementation of decorator. Needs self.source to be set before it's called. """ if self.cutoff_version and self.changed_version: - assert self.changed_version < self.cutoff_version, '{} decorator is set to start renaming {} {!r} starting at version {} and finishing in {}. It needs to start at a lower version and finish at a higher version.'.format( - self.__class__.__name__, - self.WHAT, - self.source, - self.changed_version, - self.cutoff_version, - ) + assert ( + self.changed_version < self.cutoff_version + ), f'{self.__class__.__name__} decorator is set to start renaming {self.WHAT} {self.source!r} starting at version {self.changed_version} and finishing in {self.cutoff_version}. It needs to start at a lower version and finish at a higher version.' class AbstractDeprecator(AbstractVersionDecorator): @@ -109,6 +105,7 @@ class rename_argument(AbstractDeprecator): 5 >>> """ + WHAT = 'argument' ALTERNATIVE_DECORATOR = 'discourage_argument' @@ -127,9 +124,9 @@ def __call__(self, func): @wraps(func) def wrapper(*args, **kwargs): if self.source in kwargs: - assert self.target not in kwargs, 'both argument names were provided: {!r} (deprecated) and {!r} (new)'.format( - self.source, self.target - ) + assert ( + self.target not in kwargs + ), f'both argument names were provided: {self.source!r} (deprecated) and {self.target!r} (new)' kwargs[self.target] = kwargs[self.source] del kwargs[self.source] info = f'{self.source!r} is a deprecated argument for {func.__name__!r} function/method - it was renamed to {self.target!r}' @@ -139,7 +136,7 @@ def wrapper(*args, **kwargs): info += f'. Support for the old name is going to be dropped in {self.cutoff_version}' warnings.warn( - f"{info}.", + f'{info}.', DeprecationWarning, ) return func(*args, **kwargs) @@ -162,6 +159,7 @@ class rename_function(AbstractDeprecator): >>> """ + WHAT = 'function' ALTERNATIVE_DECORATOR = 'discourage_function' @@ -177,13 +175,7 @@ def __call__(self, func): @wraps(func) def wrapper(*args, **kwargs): warnings.warn( - '{!r} is deprecated since version {} - it was moved to {!r}, please switch to use that. The proxy for the old name is going to be removed in {}.' - .format( - func.__name__, - self.changed_version, - self.target, - self.cutoff_version, - ), + f'{func.__name__!r} is deprecated since version {self.changed_version} - it was moved to {self.target!r}, please switch to use that. The proxy for the old name is going to be removed in {self.cutoff_version}.', DeprecationWarning, ) return func(*args, **kwargs) diff --git a/b2sdk/_v3/__init__.py b/b2sdk/_v3/__init__.py index 8418b5bc5..b0633d08f 100644 --- a/b2sdk/_v3/__init__.py +++ b/b2sdk/_v3/__init__.py @@ -12,12 +12,12 @@ # Set default logging handler to avoid "No handler found" warnings. import logging as _logging -_logging.getLogger("b2sdk").addHandler(_logging.NullHandler()) +_logging.getLogger('b2sdk').addHandler(_logging.NullHandler()) class UrllibWarningFilter: def filter(self, record): - return record.msg != "Connection pool is full, discarding connection: %s" + return record.msg != 'Connection pool is full, discarding connection: %s' _logging.getLogger('urllib3.connectionpool').addFilter(UrllibWarningFilter()) @@ -138,7 +138,11 @@ def filter(self, record): from b2sdk._internal.raw_api import B2RawHTTPApi from b2sdk._internal.raw_api import MetadataDirectiveMode from b2sdk._internal.raw_api import LifecycleRule -from b2sdk._internal.raw_api import NotificationRule, NotificationRuleResponse, notification_rule_response_to_request +from b2sdk._internal.raw_api import ( + NotificationRule, + NotificationRuleResponse, + notification_rule_response_to_request, +) # stream @@ -211,7 +215,9 @@ def filter(self, record): from b2sdk._internal.sync.encryption_provider import AbstractSyncEncryptionSettingsProvider from b2sdk._internal.sync.encryption_provider import BasicSyncEncryptionSettingsProvider from b2sdk._internal.sync.encryption_provider import ServerDefaultSyncEncryptionSettingsProvider -from b2sdk._internal.sync.encryption_provider import SERVER_DEFAULT_SYNC_ENCRYPTION_SETTINGS_PROVIDER +from b2sdk._internal.sync.encryption_provider import ( + SERVER_DEFAULT_SYNC_ENCRYPTION_SETTINGS_PROVIDER, +) # scan @@ -271,7 +277,11 @@ def filter(self, record): ) from b2sdk._internal.session import B2Session from b2sdk._internal.utils.thread_pool import ThreadPoolMixin -from b2sdk._internal.utils.escape import unprintable_to_hex, escape_control_chars, substitute_control_chars +from b2sdk._internal.utils.escape import ( + unprintable_to_hex, + escape_control_chars, + substitute_control_chars, +) # filter from b2sdk._internal.filter import FilterType, Filter diff --git a/b2sdk/v0/__init__.py b/b2sdk/v0/__init__.py index 7db238f20..da07aef7d 100644 --- a/b2sdk/v0/__init__.py +++ b/b2sdk/v0/__init__.py @@ -10,7 +10,12 @@ from __future__ import annotations from b2sdk.v1 import * # noqa -from b2sdk.v0.account_info import AbstractAccountInfo, InMemoryAccountInfo, UrlPoolAccountInfo, SqliteAccountInfo +from b2sdk.v0.account_info import ( + AbstractAccountInfo, + InMemoryAccountInfo, + UrlPoolAccountInfo, + SqliteAccountInfo, +) from b2sdk.v0.api import B2Api from b2sdk.v0.bucket import Bucket from b2sdk.v0.bucket import BucketFactory diff --git a/b2sdk/v0/account_info.py b/b2sdk/v0/account_info.py index 42c77effc..9db39fd49 100644 --- a/b2sdk/v0/account_info.py +++ b/b2sdk/v0/account_info.py @@ -14,7 +14,7 @@ class OldAccountInfoMethods: - """ this class contains proxy methods for deprecated signatures renamed for consistency in mid-2019 """ + """this class contains proxy methods for deprecated signatures renamed for consistency in mid-2019""" def get_account_id_or_app_key_id(self): """ diff --git a/b2sdk/v0/exception.py b/b2sdk/v0/exception.py index a90a9cf40..c2271b492 100644 --- a/b2sdk/v0/exception.py +++ b/b2sdk/v0/exception.py @@ -19,11 +19,4 @@ # override to retain old style __str__ class DestFileNewer(v1DestFileNewer): def __str__(self): - return 'source file is older than destination: {}{} with a time of {} cannot be synced to {}{} with a time of {}, unless --skipNewer or --replaceNewer is provided'.format( - self.source_prefix, - self.source_file.name, - self.source_file.latest_version().mod_time, - self.dest_prefix, - self.dest_file.name, - self.dest_file.latest_version().mod_time, - ) + return f'source file is older than destination: {self.source_prefix}{self.source_file.name} with a time of {self.source_file.latest_version().mod_time} cannot be synced to {self.dest_prefix}{self.dest_file.name} with a time of {self.dest_file.latest_version().mod_time}, unless --skipNewer or --replaceNewer is provided' diff --git a/b2sdk/v0/sync.py b/b2sdk/v0/sync.py index 1fcf806f3..f2d3cca1b 100644 --- a/b2sdk/v0/sync.py +++ b/b2sdk/v0/sync.py @@ -21,7 +21,10 @@ from b2sdk.v1 import DEFAULT_SCAN_MANAGER from b2sdk.v1 import SyncReport from b2sdk.v1 import Synchronizer as SynchronizerV1 -from b2sdk.v1 import AbstractSyncEncryptionSettingsProvider, SERVER_DEFAULT_SYNC_ENCRYPTION_SETTINGS_PROVIDER +from b2sdk.v1 import ( + AbstractSyncEncryptionSettingsProvider, + SERVER_DEFAULT_SYNC_ENCRYPTION_SETTINGS_PROVIDER, +) logger = logging.getLogger(__name__) @@ -113,8 +116,7 @@ def make_folder_sync_actions( now_millis, reporter, policies_manager=DEFAULT_SCAN_MANAGER, - encryption_settings_provider: - AbstractSyncEncryptionSettingsProvider = SERVER_DEFAULT_SYNC_ENCRYPTION_SETTINGS_PROVIDER, + encryption_settings_provider: AbstractSyncEncryptionSettingsProvider = SERVER_DEFAULT_SYNC_ENCRYPTION_SETTINGS_PROVIDER, ): """ This is deprecated. Use the new Synchronizer class. @@ -147,7 +149,7 @@ def make_folder_sync_actions( now_millis, reporter, policies_manager=policies_manager, - encryption_settings_provider=encryption_settings_provider + encryption_settings_provider=encryption_settings_provider, ) except InvalidArgument as e: raise CommandError(f'--{e.parameter_name} {e.message}') @@ -165,8 +167,7 @@ def sync_folders( policies_manager=DEFAULT_SCAN_MANAGER, dry_run=False, allow_empty_source=False, - encryption_settings_provider: - AbstractSyncEncryptionSettingsProvider = SERVER_DEFAULT_SYNC_ENCRYPTION_SETTINGS_PROVIDER, + encryption_settings_provider: AbstractSyncEncryptionSettingsProvider = SERVER_DEFAULT_SYNC_ENCRYPTION_SETTINGS_PROVIDER, ): """ This is deprecated. Use the new Synchronizer class. @@ -207,5 +208,5 @@ def sync_folders( dest_folder, now_millis, reporter, - encryption_settings_provider=encryption_settings_provider + encryption_settings_provider=encryption_settings_provider, ) diff --git a/b2sdk/v1/__init__.py b/b2sdk/v1/__init__.py index d51cb011d..ed4e9f3d1 100644 --- a/b2sdk/v1/__init__.py +++ b/b2sdk/v1/__init__.py @@ -11,24 +11,41 @@ from b2sdk.v2 import * # noqa from b2sdk.v1.account_info import ( - AbstractAccountInfo, InMemoryAccountInfo, UrlPoolAccountInfo, SqliteAccountInfo, StubAccountInfo + AbstractAccountInfo, + InMemoryAccountInfo, + UrlPoolAccountInfo, + SqliteAccountInfo, + StubAccountInfo, ) from b2sdk.v1.api import B2Api from b2sdk.v1.b2http import B2Http from b2sdk.v1.bucket import Bucket, BucketFactory from b2sdk.v1.cache import AbstractCache from b2sdk.v1.download_dest import ( - AbstractDownloadDestination, DownloadDestLocalFile, PreSeekedDownloadDest, DownloadDestBytes, - DownloadDestProgressWrapper + AbstractDownloadDestination, + DownloadDestLocalFile, + PreSeekedDownloadDest, + DownloadDestBytes, + DownloadDestProgressWrapper, ) from b2sdk.v1.exception import CommandError, DestFileNewer from b2sdk.v1.file_metadata import FileMetadata from b2sdk.v1.file_version import FileVersionInfo from b2sdk.v1.session import B2Session from b2sdk.v1.sync import ( - ScanPoliciesManager, DEFAULT_SCAN_MANAGER, zip_folders, Synchronizer, AbstractFolder, - LocalFolder, B2Folder, parse_sync_folder, SyncReport, File, B2File, FileVersion, - AbstractSyncEncryptionSettingsProvider + ScanPoliciesManager, + DEFAULT_SCAN_MANAGER, + zip_folders, + Synchronizer, + AbstractFolder, + LocalFolder, + B2Folder, + parse_sync_folder, + SyncReport, + File, + B2File, + FileVersion, + AbstractSyncEncryptionSettingsProvider, ) from b2sdk.v1.replication.monitoring import ReplicationMonitor diff --git a/b2sdk/v1/account_info.py b/b2sdk/v1/account_info.py index 54a98a741..c557519dd 100644 --- a/b2sdk/v1/account_info.py +++ b/b2sdk/v1/account_info.py @@ -48,7 +48,6 @@ def set_auth_data( application_key_id=None, s3_api_url=None, ): - if 's3_api_url' in inspect.getfullargspec(self._set_auth_data).args: s3_kwargs = dict(s3_api_url=s3_api_url) else: @@ -84,7 +83,7 @@ def _set_auth_data( realm, s3_api_url=None, allowed=None, - application_key_id=None + application_key_id=None, ): if 's3_api_url' in inspect.getfullargspec(super()._set_auth_data).args: s3_kwargs = dict(s3_api_url=s3_api_url) @@ -153,8 +152,17 @@ def get_minimum_part_size(self): @abstractmethod def _set_auth_data( - self, account_id, auth_token, api_url, download_url, minimum_part_size, application_key, - realm, s3_api_url, allowed, application_key_id + self, + account_id, + auth_token, + api_url, + download_url, + minimum_part_size, + application_key, + realm, + s3_api_url, + allowed, + application_key_id, ): """ Actually store the auth data. Can assume that 'allowed' is present and valid. diff --git a/b2sdk/v1/api.py b/b2sdk/v1/api.py index 13feef019..fafbe7d88 100644 --- a/b2sdk/v1/api.py +++ b/b2sdk/v1/api.py @@ -17,7 +17,11 @@ from .account_info import AbstractAccountInfo from .bucket import Bucket, BucketFactory, download_file_and_return_info_dict from .cache import AbstractCache -from .file_version import FileVersionInfo, FileVersionInfoFactory, file_version_info_from_id_and_name +from .file_version import ( + FileVersionInfo, + FileVersionInfoFactory, + file_version_info_from_id_and_name, +) from .session import B2Session @@ -114,8 +118,7 @@ def download_file_by_id( progress_listener: v2.AbstractProgressListener | None = None, range_: tuple[int, int] | None = None, encryption: v2.EncryptionSetting | None = None, - ) -> dict: - ... + ) -> dict: ... @overload def download_file_by_id( @@ -124,8 +127,7 @@ def download_file_by_id( progress_listener: v2.AbstractProgressListener | None = None, range_: tuple[int, int] | None = None, encryption: v2.EncryptionSetting | None = None, - ) -> v2.DownloadedFile: - ... + ) -> v2.DownloadedFile: ... def download_file_by_id( self, @@ -184,7 +186,7 @@ def list_keys(self, start_application_key_id=None) -> dict: return self.session.list_keys( account_id, max_key_count=self.DEFAULT_LIST_KEY_COUNT, - start_application_key_id=start_application_key_id + start_application_key_id=start_application_key_id, ) def create_key( @@ -195,13 +197,17 @@ def create_key( bucket_id: str | None = None, name_prefix: str | None = None, ): - return super().create_key( - capabilities=capabilities, - key_name=key_name, - valid_duration_seconds=valid_duration_seconds, - bucket_id=bucket_id, - name_prefix=name_prefix, - ).as_dict() + return ( + super() + .create_key( + capabilities=capabilities, + key_name=key_name, + valid_duration_seconds=valid_duration_seconds, + bucket_id=bucket_id, + name_prefix=name_prefix, + ) + .as_dict() + ) def delete_key(self, application_key_id): return super().delete_key_by_id(application_key_id).as_dict() diff --git a/b2sdk/v1/b2http.py b/b2sdk/v1/b2http.py index 85d909152..9ce343ed2 100644 --- a/b2sdk/v1/b2http.py +++ b/b2sdk/v1/b2http.py @@ -56,6 +56,6 @@ def __init__(self, requests_module=None, install_clock_skew_hook=True, user_agen v2.B2HttpApiConfig( http_session_factory=(requests_module or requests).Session, install_clock_skew_hook=install_clock_skew_hook, - user_agent_append=user_agent_append + user_agent_append=user_agent_append, ) ) diff --git a/b2sdk/v1/bucket.py b/b2sdk/v1/bucket.py index e6d1a4b61..f2922128c 100644 --- a/b2sdk/v1/bucket.py +++ b/b2sdk/v1/bucket.py @@ -14,7 +14,11 @@ from .download_dest import AbstractDownloadDestination from .file_metadata import FileMetadata -from .file_version import FileVersionInfo, FileVersionInfoFactory, file_version_info_from_download_version +from .file_version import ( + FileVersionInfo, + FileVersionInfoFactory, + file_version_info_from_download_version, +) from b2sdk import v2 from b2sdk._internal.utils import validate_b2_file_name from b2sdk._internal.raw_api import LifecycleRule @@ -156,8 +160,7 @@ def download_file_by_id( progress_listener: v2.AbstractProgressListener | None = None, range_: tuple[int, int] | None = None, encryption: v2.EncryptionSetting | None = None, - ) -> dict: - ... + ) -> dict: ... @overload def download_file_by_id( @@ -166,8 +169,7 @@ def download_file_by_id( progress_listener: v2.AbstractProgressListener | None = None, range_: tuple[int, int] | None = None, encryption: v2.EncryptionSetting | None = None, - ) -> v2.DownloadedFile: - ... + ) -> v2.DownloadedFile: ... def download_file_by_id( self, @@ -222,7 +224,7 @@ def update( default_server_side_encryption: v2.EncryptionSetting | None = None, default_retention: v2.BucketRetentionSetting | None = None, is_file_lock_enabled: bool | None = None, - **kwargs + **kwargs, ): """ Update various bucket parameters. @@ -241,7 +243,9 @@ def update( with suppress(KeyError): del kwargs['replication'] self.replication = None - assert not kwargs # after we get rid of everything we don't support in this apiver, this should be empty + assert ( + not kwargs + ) # after we get rid of everything we don't support in this apiver, this should be empty account_id = self.api.account_info.get_account_id() return self.api.session.update_bucket( @@ -263,7 +267,7 @@ def ls( show_versions: bool = False, recursive: bool = False, fetch_count: int | None = 10000, - **kwargs + **kwargs, ): """ Pretend that folders exist and yields the information about the files in a folder. @@ -292,8 +296,9 @@ def ls( def download_file_and_return_info_dict( - downloaded_file: v2.DownloadedFile, download_dest: AbstractDownloadDestination, - range_: tuple[int, int] | None + downloaded_file: v2.DownloadedFile, + download_dest: AbstractDownloadDestination, + range_: tuple[int, int] | None, ): with download_dest.make_file_context( file_id=downloaded_file.download_version.id_, diff --git a/b2sdk/v1/download_dest.py b/b2sdk/v1/download_dest.py index cd78db94e..f2c18f7bb 100644 --- a/b2sdk/v1/download_dest.py +++ b/b2sdk/v1/download_dest.py @@ -25,9 +25,11 @@ class AbstractDownloadDestination(metaclass=B2TraceMetaAbstract): """ @abstractmethod - @limit_trace_arguments(skip=[ - 'content_sha1', - ]) + @limit_trace_arguments( + skip=[ + 'content_sha1', + ] + ) def make_file_context( self, file_id, @@ -37,7 +39,7 @@ def make_file_context( content_sha1, file_info, mod_time_millis, - range_=None + range_=None, ): """ Return a context manager that yields a binary file-like object to use for @@ -60,6 +62,7 @@ class DownloadDestLocalFile(AbstractDownloadDestination): """ Store a downloaded file into a local file and sets its modification time. """ + MODE = 'wb+' def __init__(self, local_file_path): @@ -74,7 +77,7 @@ def make_file_context( content_sha1, file_info, mod_time_millis, - range_=None + range_=None, ): self.file_id = file_id self.file_name = file_name @@ -112,6 +115,7 @@ class PreSeekedDownloadDest(DownloadDestLocalFile): Does not truncate the target file, seeks to a given offset just after opening a descriptor. """ + MODE = 'rb+' def __init__(self, local_file_path, seek_target): @@ -142,7 +146,7 @@ def make_file_context( content_sha1, file_info, mod_time_millis, - range_=None + range_=None, ): self.file_id = file_id self.file_name = file_name @@ -194,21 +198,40 @@ def make_file_context( content_sha1, file_info, mod_time_millis, - range_=None + range_=None, ): return self.write_file_and_report_progress_context( - file_id, file_name, content_length, content_type, content_sha1, file_info, - mod_time_millis, range_ + file_id, + file_name, + content_length, + content_type, + content_sha1, + file_info, + mod_time_millis, + range_, ) @contextmanager def write_file_and_report_progress_context( - self, file_id, file_name, content_length, content_type, content_sha1, file_info, - mod_time_millis, range_ + self, + file_id, + file_name, + content_length, + content_type, + content_sha1, + file_info, + mod_time_millis, + range_, ): with self.download_dest.make_file_context( - file_id, file_name, content_length, content_type, content_sha1, file_info, - mod_time_millis, range_ + file_id, + file_name, + content_length, + content_type, + content_sha1, + file_info, + mod_time_millis, + range_, ) as file_: total_bytes = content_length if range_ is not None: diff --git a/b2sdk/v1/exception.py b/b2sdk/v1/exception.py index 99cce5d60..2be0b15bb 100644 --- a/b2sdk/v1/exception.py +++ b/b2sdk/v1/exception.py @@ -10,6 +10,7 @@ from __future__ import annotations from b2sdk.v2.exception import * # noqa + v2DestFileNewer = DestFileNewer @@ -38,11 +39,4 @@ def __init__(self, dest_file, source_file, dest_prefix, source_prefix): self.source_prefix = source_prefix def __str__(self): - return 'source file is older than destination: {}{} with a time of {} cannot be synced to {}{} with a time of {}, unless a valid newer_file_mode is provided'.format( - self.source_prefix, - self.source_file.name, - self.source_file.latest_version().mod_time, - self.dest_prefix, - self.dest_file.name, - self.dest_file.latest_version().mod_time, - ) + return f'source file is older than destination: {self.source_prefix}{self.source_file.name} with a time of {self.source_file.latest_version().mod_time} cannot be synced to {self.dest_prefix}{self.dest_file.name} with a time of {self.dest_file.latest_version().mod_time}, unless a valid newer_file_mode is provided' diff --git a/b2sdk/v1/file_metadata.py b/b2sdk/v1/file_metadata.py index e11b0e263..615acda25 100644 --- a/b2sdk/v1/file_metadata.py +++ b/b2sdk/v1/file_metadata.py @@ -16,6 +16,7 @@ class FileMetadata: """ Hold information about a file which is being downloaded. """ + UNVERIFIED_CHECKSUM_PREFIX = 'unverified:' def __init__( @@ -47,7 +48,7 @@ def as_info_dict(self): @classmethod def _decode_content_sha1(cls, content_sha1): if content_sha1.startswith(cls.UNVERIFIED_CHECKSUM_PREFIX): - return content_sha1[len(cls.UNVERIFIED_CHECKSUM_PREFIX):], False + return content_sha1[len(cls.UNVERIFIED_CHECKSUM_PREFIX) :], False return content_sha1, True @classmethod diff --git a/b2sdk/v1/file_version.py b/b2sdk/v1/file_version.py index 02502eb34..ea8bf6b4c 100644 --- a/b2sdk/v1/file_version.py +++ b/b2sdk/v1/file_version.py @@ -24,7 +24,9 @@ class FileVersionInfo(v2.FileVersion): __slots__ = ['_api'] - LS_ENTRY_TEMPLATE = '%83s %6s %10s %8s %9d %s' # order is file_id, action, date, time, size, name + LS_ENTRY_TEMPLATE = ( + '%83s %6s %10s %8s %9d %s' # order is file_id, action, date, time, size, name + ) def __init__( self, @@ -44,7 +46,7 @@ def __init__( legal_hold: v2.LegalHold | None = None, api: v1api.B2Api | None = None, cache_control: str | None = None, - **kwargs + **kwargs, ): self.id_ = id_ self.file_name = file_name @@ -70,7 +72,9 @@ def __init__( with suppress(KeyError): del kwargs['replication_status'] self.replication_status = None - assert not kwargs # after we get rid of everything we don't support in this apiver, this should be empty + assert ( + not kwargs + ) # after we get rid of everything we don't support in this apiver, this should be empty if v2.SRC_LAST_MODIFIED_MILLIS in self.file_info: self.mod_time_millis = int(self.file_info[v2.SRC_LAST_MODIFIED_MILLIS]) @@ -148,11 +152,9 @@ def inner(*a, **kw): # override to return old style FileVersionInfo class FileVersionInfoFactory(v2.FileVersionFactory): - from_api_response = translate_single_file_version(v2.FileVersionFactory.from_api_response) def from_response_headers(self, headers): - file_info = v2.DownloadVersionFactory.file_info_from_headers(headers) return FileVersionInfo( api=self.api, diff --git a/b2sdk/v1/replication/monitoring.py b/b2sdk/v1/replication/monitoring.py index 93e69149b..8c54ebf21 100644 --- a/b2sdk/v1/replication/monitoring.py +++ b/b2sdk/v1/replication/monitoring.py @@ -19,7 +19,6 @@ @dataclass class ReplicationMonitor(v2.ReplicationMonitor): - # when passing in v1 Bucket objects to ReplicationMonitor, # the latter should use v1 B2Folder to correctly use # v1 Bucket's interface diff --git a/b2sdk/v1/session.py b/b2sdk/v1/session.py index a99929171..673605c3b 100644 --- a/b2sdk/v1/session.py +++ b/b2sdk/v1/session.py @@ -24,7 +24,7 @@ def __init__( account_info=None, cache=None, raw_api: v2.B2RawHTTPApi = None, - api_config: v2.B2HttpApiConfig | None = None + api_config: v2.B2HttpApiConfig | None = None, ): if raw_api is not None and api_config is not None: raise InvalidArgument( @@ -66,5 +66,5 @@ def authorize_account(self, realm, application_key_id, application_key): realm=realm, s3_api_url=response['s3ApiUrl'], allowed=allowed, - application_key_id=application_key_id + application_key_id=application_key_id, ) diff --git a/b2sdk/v1/sync/encryption_provider.py b/b2sdk/v1/sync/encryption_provider.py index f051d4420..31e3a1389 100644 --- a/b2sdk/v1/sync/encryption_provider.py +++ b/b2sdk/v1/sync/encryption_provider.py @@ -23,7 +23,7 @@ def __init__(self, provider): self.provider = provider def __repr__(self): - return f"{self.__class__.__name__}({self.provider})" + return f'{self.__class__.__name__}({self.provider})' def get_setting_for_upload( self, diff --git a/b2sdk/v1/sync/file.py b/b2sdk/v1/sync/file.py index d991cddb6..7c761707f 100644 --- a/b2sdk/v1/sync/file.py +++ b/b2sdk/v1/sync/file.py @@ -93,13 +93,7 @@ def __init__(self, id_, file_name, mod_time, action, size): self.size = size def __repr__(self): - return '{}({}, {}, {}, {})'.format( - self.__class__.__name__, - repr(self.id_), - repr(self.name), - repr(self.mod_time), - repr(self.action), - ) + return f'{self.__class__.__name__}({repr(self.id_)}, {repr(self.name)}, {repr(self.mod_time)}, {repr(self.action)})' class B2FileVersion(FileVersion): diff --git a/b2sdk/v1/sync/file_to_path_translator.py b/b2sdk/v1/sync/file_to_path_translator.py index 87fd56805..e3b51d642 100644 --- a/b2sdk/v1/sync/file_to_path_translator.py +++ b/b2sdk/v1/sync/file_to_path_translator.py @@ -50,8 +50,9 @@ def _translate_local_path_to_file(path: v2.LocalSyncPath) -> File: # The goal is to create v2.SyncPath objects from v1.File objects -def make_paths_from_files(dest_file: File, source_file: File, - sync_type: str) -> tuple[v2.AbstractSyncPath, v2.AbstractSyncPath]: +def make_paths_from_files( + dest_file: File, source_file: File, sync_type: str +) -> tuple[v2.AbstractSyncPath, v2.AbstractSyncPath]: assert sync_type in ('b2-to-b2', 'b2-to-local', 'local-to-b2') sync_type_split = sync_type.split('-') @@ -77,7 +78,7 @@ def _translate_local_file_to_path(file: File) -> v2.AbstractSyncPath: absolute_path=file.latest_version().id_, relative_path=file.name, mod_time=file.latest_version().mod_time, - size=file.latest_version().size + size=file.latest_version().size, ) diff --git a/b2sdk/v1/sync/scan_policies.py b/b2sdk/v1/sync/scan_policies.py index d94c78c07..4626feb0a 100644 --- a/b2sdk/v1/sync/scan_policies.py +++ b/b2sdk/v1/sync/scan_policies.py @@ -65,7 +65,7 @@ def __init__( if include_file_regexes and not exclude_file_regexes: raise v2_exception.InvalidArgument( 'include_file_regexes', - 'cannot be used without exclude_file_regexes at the same time' + 'cannot be used without exclude_file_regexes at the same time', ) self._exclude_dir_set = v2.RegexSet(exclude_dir_regexes) @@ -132,7 +132,7 @@ def __init__(self, scan_policies_manager: ScanPoliciesManager): self.exclude_all_symlinks = scan_policies_manager.exclude_all_symlinks def __repr__(self): - return f"{self.__class__.__name__}({self.scan_policies_manager})" + return f'{self.__class__.__name__}({self.scan_policies_manager})' def should_exclude_relative_path(self, relative_path: str): self.scan_policies_manager.should_exclude_file(relative_path) diff --git a/b2sdk/v1/sync/sync.py b/b2sdk/v1/sync/sync.py index feca84447..53c8324fb 100644 --- a/b2sdk/v1/sync/sync.py +++ b/b2sdk/v1/sync/sync.py @@ -63,9 +63,12 @@ def make_folder_sync_actions( encryption_settings_provider=v2.SERVER_DEFAULT_SYNC_ENCRYPTION_SETTINGS_PROVIDER, ): return super()._make_folder_sync_actions( - source_folder, dest_folder, now_millis, reporter, + source_folder, + dest_folder, + now_millis, + reporter, scan_wrap_if_necessary(policies_manager), - encryption_wrap_if_necessary(encryption_settings_provider) + encryption_wrap_if_necessary(encryption_settings_provider), ) # override to retain a public method @@ -77,8 +80,7 @@ def make_file_sync_actions( source_folder, dest_folder, now_millis, - encryption_settings_provider: v2.AbstractSyncEncryptionSettingsProvider = v2. - SERVER_DEFAULT_SYNC_ENCRYPTION_SETTINGS_PROVIDER, + encryption_settings_provider: v2.AbstractSyncEncryptionSettingsProvider = v2.SERVER_DEFAULT_SYNC_ENCRYPTION_SETTINGS_PROVIDER, ): """ Yields the sequence of actions needed to sync the two files @@ -111,8 +113,7 @@ def _make_file_sync_actions( source_folder, dest_folder, now_millis, - encryption_settings_provider: v2.AbstractSyncEncryptionSettingsProvider = v2. - SERVER_DEFAULT_SYNC_ENCRYPTION_SETTINGS_PROVIDER, + encryption_settings_provider: v2.AbstractSyncEncryptionSettingsProvider = v2.SERVER_DEFAULT_SYNC_ENCRYPTION_SETTINGS_PROVIDER, ): """ Yields the sequence of actions needed to sync the two files diff --git a/b2sdk/v2/__init__.py b/b2sdk/v2/__init__.py index ecedf98df..0537cda12 100644 --- a/b2sdk/v2/__init__.py +++ b/b2sdk/v2/__init__.py @@ -13,7 +13,11 @@ from b2sdk._v3 import parse_folder as parse_sync_folder from b2sdk._v3 import AbstractPath as AbstractSyncPath from b2sdk._v3 import LocalPath as LocalSyncPath -from b2sdk._internal.utils.escape import unprintable_to_hex, escape_control_chars, substitute_control_chars +from b2sdk._internal.utils.escape import ( + unprintable_to_hex, + escape_control_chars, + substitute_control_chars, +) from .account_info import AbstractAccountInfo from .api import B2Api diff --git a/b2sdk/v2/api.py b/b2sdk/v2/api.py index 349153285..aef3052a9 100644 --- a/b2sdk/v2/api.py +++ b/b2sdk/v2/api.py @@ -55,4 +55,4 @@ def authorize_account(self, realm, application_key_id, application_key): application_key_id=application_key_id, application_key=application_key, realm=realm, - ) \ No newline at end of file + ) diff --git a/b2sdk/v2/bucket.py b/b2sdk/v2/bucket.py index 4cbec26b6..66a35d76f 100644 --- a/b2sdk/v2/bucket.py +++ b/b2sdk/v2/bucket.py @@ -26,7 +26,6 @@ # Overridden to raise old style BucketIdNotFound exception class Bucket(v3.Bucket): - FILE_VERSION_FACTORY_CLASS = staticmethod(FileVersionFactory) def get_fresh_state(self) -> Bucket: @@ -50,7 +49,7 @@ def upload_bytes( custom_upload_timestamp: int | None = None, cache_control: str | None = None, *args, - **kwargs + **kwargs, ): return super().upload_bytes( data_bytes, @@ -85,7 +84,7 @@ def upload_local_file( custom_upload_timestamp: int | None = None, cache_control: str | None = None, *args, - **kwargs + **kwargs, ): return super().upload_local_file( local_file, @@ -114,7 +113,7 @@ def ls( with_wildcard: bool = False, filters: typing.Sequence[Filter] = (), folder_to_list_can_be_a_file: bool = False, - **kwargs + **kwargs, ) -> typing.Iterable[tuple[FileVersion, str]]: """ Pretend that folders exist and yields the information about the files in a folder. @@ -148,9 +147,12 @@ def ls( .. note:: In case of `recursive=True`, folder_name is not returned. """ - if not folder_to_list_can_be_a_file and folder_to_list and not folder_to_list.endswith( - '/' - ) and not with_wildcard: + if ( + not folder_to_list_can_be_a_file + and folder_to_list + and not folder_to_list.endswith('/') + and not with_wildcard + ): folder_to_list += '/' yield from super().ls( path=folder_to_list, @@ -159,7 +161,7 @@ def ls( fetch_count=fetch_count, with_wildcard=with_wildcard, filters=filters, - **kwargs + **kwargs, ) diff --git a/b2sdk/v2/raw_api.py b/b2sdk/v2/raw_api.py index d839d9a13..5003f133c 100644 --- a/b2sdk/v2/raw_api.py +++ b/b2sdk/v2/raw_api.py @@ -32,7 +32,7 @@ def get_upload_file_headers( custom_upload_timestamp: int | None = None, cache_control: str | None = None, *args, - **kwargs + **kwargs, ) -> dict: if cache_control is not None: file_info['b2-cache-control'] = cache_control @@ -71,7 +71,7 @@ def upload_file( custom_upload_timestamp: int | None = None, cache_control: str | None = None, *args, - **kwargs + **kwargs, ): if cache_control is not None: file_info['b2-cache-control'] = cache_control diff --git a/b2sdk/v2/raw_simulator.py b/b2sdk/v2/raw_simulator.py index e5495d018..95f18ecd6 100644 --- a/b2sdk/v2/raw_simulator.py +++ b/b2sdk/v2/raw_simulator.py @@ -31,7 +31,7 @@ def upload_file( custom_upload_timestamp: int | None = None, cache_control: str | None = None, *args, - **kwargs + **kwargs, ): if cache_control is not None: file_info['b2-cache-control'] = cache_control @@ -70,7 +70,7 @@ def get_upload_file_headers( custom_upload_timestamp: int | None = None, cache_control: str | None = None, *args, - **kwargs + **kwargs, ) -> dict: if cache_control is not None: file_info['b2-cache-control'] = cache_control @@ -106,7 +106,7 @@ def upload_file( custom_upload_timestamp: int | None = None, cache_control: str | None = None, *args, - **kwargs + **kwargs, ): if cache_control is not None: file_info['b2-cache-control'] = cache_control diff --git a/b2sdk/v2/session.py b/b2sdk/v2/session.py index 137b7df83..d62baab18 100644 --- a/b2sdk/v2/session.py +++ b/b2sdk/v2/session.py @@ -26,7 +26,7 @@ def __init__( self, account_info: _abstract.AbstractAccountInfo | None = None, cache: _cache.AbstractCache | None = None, - api_config: _api_config.B2HttpApiConfig = _api_config.DEFAULT_HTTP_API_CONFIG + api_config: _api_config.B2HttpApiConfig = _api_config.DEFAULT_HTTP_API_CONFIG, ): if account_info is not None and cache is None: # preserve legacy behavior https://github.com/Backblaze/b2-sdk-python/issues/497#issuecomment-2147461352 @@ -49,7 +49,7 @@ def upload_file( custom_upload_timestamp: int | None = None, cache_control: str | None = None, *args, - **kwargs + **kwargs, ): if cache_control is not None: file_info['b2-cache-control'] = cache_control diff --git a/b2sdk/v2/version_utils.py b/b2sdk/v2/version_utils.py index 0fcc6adf8..527997db7 100644 --- a/b2sdk/v2/version_utils.py +++ b/b2sdk/v2/version_utils.py @@ -15,22 +15,13 @@ class _OldAbstractDeprecatorMixin: def __call__(self, *args, **kwargs): if self.cutoff_version: - assert self.current_version < self.cutoff_version, '{} decorator is still used in version {} when old {} name {!r} was scheduled to be dropped in {}. It is time to remove the mapping.'.format( - self.__class__.__name__, - self.current_version, - self.WHAT, - self.source, - self.cutoff_version, - ) + assert ( + self.current_version < self.cutoff_version + ), f'{self.__class__.__name__} decorator is still used in version {self.current_version} when old {self.WHAT} name {self.source!r} was scheduled to be dropped in {self.cutoff_version}. It is time to remove the mapping.' ret = super().__call__(*args, **kwargs) - assert self.changed_version <= self.current_version, '{} decorator indicates that the replacement of {} {!r} should take place in the future version {}, while the current version is {}. It looks like should be _discouraged_ at this point and not _deprecated_ yet. Consider using {!r} decorator instead.'.format( - self.__class__.__name__, - self.WHAT, - self.source, - self.changed_version, - self.cutoff_version, - self.ALTERNATIVE_DECORATOR, - ) + assert ( + self.changed_version <= self.current_version + ), f'{self.__class__.__name__} decorator indicates that the replacement of {self.WHAT} {self.source!r} should take place in the future version {self.changed_version}, while the current version is {self.cutoff_version}. It looks like should be _discouraged_ at this point and not _deprecated_ yet. Consider using {self.ALTERNATIVE_DECORATOR!r} decorator instead.' return ret diff --git a/b2sdk/version.py b/b2sdk/version.py index d734eba93..241bce2ec 100644 --- a/b2sdk/version.py +++ b/b2sdk/version.py @@ -13,13 +13,13 @@ from sys import version_info as _version_info __all__ = [ - "VERSION", - "PYTHON_VERSION", - "USER_AGENT", + 'VERSION', + 'PYTHON_VERSION', + 'USER_AGENT', ] -VERSION = _version("b2sdk") +VERSION = _version('b2sdk') -PYTHON_VERSION = ".".join(map(str, _version_info[:3])) # something like: 3.9.1 +PYTHON_VERSION = '.'.join(map(str, _version_info[:3])) # something like: 3.9.1 -USER_AGENT = f"backblaze-b2/{VERSION} python/{PYTHON_VERSION}" +USER_AGENT = f'backblaze-b2/{VERSION} python/{PYTHON_VERSION}' diff --git a/doc/source/conf.py b/doc/source/conf.py index 77f1c0960..3329cf45f 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -22,15 +22,14 @@ # # All configuration values have a default; values that are commented out # serve to show the default. - # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # - import datetime import os import sys + sys.path.append(os.path.abspath('../..')) from b2sdk.version import VERSION # noqa: E402 @@ -71,10 +70,10 @@ master_doc = 'index' # General information about the project. -project = u'b2-sdk-python' +project = 'b2-sdk-python' -year = datetime.date.today().strftime("%Y") -author = u'Backblaze' +year = datetime.date.today().strftime('%Y') +author = 'Backblaze' copyright = f'{year}, {author}' # The version info for the project you're documenting, acts as replacement for @@ -107,12 +106,12 @@ # -- Options for HTML output ---------------------------------------------- html_context = { - "display_github": True, # Add 'Edit on Github' link instead of 'View page source' - "github_user": "Backblaze", - "github_repo": project, - "github_version": "master", - "conf_py_path": "/doc/source/", - "source_suffix": source_suffix, + 'display_github': True, # Add 'Edit on Github' link instead of 'View page source' + 'github_user': 'Backblaze', + 'github_repo': project, + 'github_version': 'master', + 'conf_py_path': '/doc/source/', + 'source_suffix': source_suffix, } # The theme to use for HTML and HTML Help pages. See the documentation for @@ -134,14 +133,14 @@ 'exclude-members': '__weakref__, _abc_cache, _abc_negative_cache, _abc_negative_cache_version, _abc_registry, _abc_impl', 'members': True, 'undoc-members': True, -} # yapf: disable +} always_document_param_types = True # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -#html_static_path = ['_static'] +# html_static_path = ['_static'] # Custom sidebar templates, must be a dictionary that maps document names # to template names. @@ -149,14 +148,13 @@ # This is required for the alabaster theme # refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars html_sidebars = { - '**': - [ - 'about.html', - 'navigation.html', - 'relations.html', # needs 'show_related': True theme option to display - 'searchbox.html', - 'donate.html', - ] + '**': [ + 'about.html', + 'navigation.html', + 'relations.html', # needs 'show_related': True theme option to display + 'searchbox.html', + 'donate.html', + ] } # -- Options for HTMLHelp output ------------------------------------------ @@ -170,15 +168,12 @@ # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. # # 'preamble': '', - # Latex figure (float) alignment # # 'figure_align': 'htbp', @@ -188,14 +183,14 @@ # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (master_doc, 'B2_Python_SDK.tex', u'B2\\_Python\\_SDK', u'Backblaze', 'manual'), + (master_doc, 'B2_Python_SDK.tex', 'B2\\_Python\\_SDK', 'Backblaze', 'manual'), ] # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). -man_pages = [(master_doc, 'b2_python_sdk', u'B2 Python SDK Documentation', [author], 1)] +man_pages = [(master_doc, 'b2_python_sdk', 'B2 Python SDK Documentation', [author], 1)] # -- Options for Texinfo output ------------------------------------------- @@ -204,8 +199,13 @@ # dir menu entry, description, category) texinfo_documents = [ ( - master_doc, 'B2_Python_SDK', u'B2 Python SDK Documentation', author, 'B2_Python_SDK', - 'Backblaze Python SDK', 'Miscellaneous' + master_doc, + 'B2_Python_SDK', + 'B2 Python SDK Documentation', + author, + 'B2_Python_SDK', + 'Backblaze Python SDK', + 'Miscellaneous', ), ] diff --git a/noxfile.py b/noxfile.py index fa29ece9a..166f57758 100644 --- a/noxfile.py +++ b/noxfile.py @@ -20,38 +20,42 @@ UPSTREAM_REPO_URL = 'git@github.com:Backblaze/b2-sdk-python.git' # Required for PDM to use nox's virtualenvs -os.environ.update({"PDM_IGNORE_SAVED_PYTHON": "1"}) +os.environ.update({'PDM_IGNORE_SAVED_PYTHON': '1'}) CI = os.environ.get('CI') is not None NOX_PYTHONS = os.environ.get('NOX_PYTHONS') _NOX_EXTRAS = os.environ.get('NOX_EXTRAS') NOX_EXTRAS = [[]] if _NOX_EXTRAS is None else list(filter(None, [_NOX_EXTRAS.split(',')])) -PYTHON_VERSIONS = [ - 'pypy3.9', - 'pypy3.10', - '3.7', - '3.8', - '3.9', - '3.10', - '3.11', - '3.12', - '3.13', -] if NOX_PYTHONS is None else NOX_PYTHONS.split(',') +PYTHON_VERSIONS = ( + [ + 'pypy3.9', + 'pypy3.10', + '3.7', + '3.8', + '3.9', + '3.10', + '3.11', + '3.12', + '3.13', + ] + if NOX_PYTHONS is None + else NOX_PYTHONS.split(',') +) def _detect_python_nox_id() -> str: major, minor, *_ = platform.python_version_tuple() - python_nox_id = f"{major}.{minor}" + python_nox_id = f'{major}.{minor}' if platform.python_implementation() == 'PyPy': - python_nox_id = f"pypy{python_nox_id}" + python_nox_id = f'pypy{python_nox_id}' return python_nox_id if CI and not NOX_PYTHONS: # this is done to allow it to work even if `nox -p` was passed to nox PYTHON_VERSIONS = [_detect_python_nox_id()] - print(f"CI job mode; using provided interpreter only; PYTHON_VERSIONS={PYTHON_VERSIONS!r}") + print(f'CI job mode; using provided interpreter only; PYTHON_VERSIONS={PYTHON_VERSIONS!r}') PYTHON_DEFAULT_VERSION = PYTHON_VERSIONS[-2] if len(PYTHON_VERSIONS) > 1 else PYTHON_VERSIONS[0] @@ -117,7 +121,7 @@ def lint(session): @nox.session(python=PYTHON_VERSIONS) -@nox.parametrize("extras", NOX_EXTRAS) +@nox.parametrize('extras', NOX_EXTRAS) def unit(session, extras): """Run unit tests.""" pdm_install(session, 'test', *extras) @@ -137,7 +141,7 @@ def unit(session, extras): @nox.session(python=PYTHON_VERSIONS) -@nox.parametrize("extras", NOX_EXTRAS) +@nox.parametrize('extras', NOX_EXTRAS) def integration(session, extras): """Run integration tests.""" pdm_install(session, 'test', *extras) @@ -199,7 +203,14 @@ def doc(session): session.notify('doc_cover') else: sphinx_args[-2:-2] = [ - '-E', '--open-browser', '--watch', '../b2sdk', '--ignore', '*.pyc', '--ignore', '*~' + '-E', + '--open-browser', + '--watch', + '../b2sdk', + '--ignore', + '*.pyc', + '--ignore', + '*~', ] session.run('sphinx-autobuild', *sphinx_args) @@ -257,12 +268,14 @@ def make_release_commit(session): ) -def load_allowed_change_types(project_toml: pathlib.Path = pathlib.Path('./pyproject.toml') - ) -> set[str]: +def load_allowed_change_types( + project_toml: pathlib.Path = pathlib.Path('./pyproject.toml'), +) -> set[str]: """ Load the list of allowed change types from the pyproject.toml file. """ import tomllib + configuration = tomllib.loads(project_toml.read_text()) return set(entry['directory'] for entry in configuration['tool']['towncrier']['type']) @@ -279,7 +292,7 @@ def is_changelog_filename_valid(filename: str, allowed_change_types: set[str]) - description, change_type, extension = filename.rsplit('.', maxsplit=2) except ValueError: # Not enough values to unpack. - return False, "Doesn't follow the \"..md\" pattern." + return False, 'Doesn\'t follow the "..md" pattern.' # Check whether the filename ends with .md. if extension != wanted_extension: diff --git a/pyproject.toml b/pyproject.toml index 48855358b..4ddae3eaa 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -84,10 +84,14 @@ ignore = [ "D100", "D105", "D107", "D200", "D202", "D203", "D205", "D212", "D400", "D401", "D415", "D101", "D102","D103", "D104", # TODO remove once we have docstring for all public methods "E501", # TODO: remove E501 once docstrings are formatted + "UP031", ] line-length = 100 target-version = "py37" +[tool.ruff.format] +quote-style = "single" + [tool.ruff.per-file-ignores] "__init__.py" = ["I", "F401"] "b2sdk/_v3/__init__.py" = ["E402"] diff --git a/test/integration/base.py b/test/integration/base.py index 99e7a7553..a67baf969 100644 --- a/test/integration/base.py +++ b/test/integration/base.py @@ -9,25 +9,24 @@ ###################################################################### from __future__ import annotations +import pytest + +from b2sdk.v2 import B2Api, current_time_millis +from b2sdk.v2.exception import DuplicateBucketName from test.integration.bucket_cleaner import BucketCleaner from test.integration.helpers import ( BUCKET_CREATED_AT_MILLIS, random_bucket_name, ) -import pytest - -from b2sdk.v2 import B2Api, current_time_millis -from b2sdk.v2.exception import DuplicateBucketName - -@pytest.mark.usefixtures("cls_setup") +@pytest.mark.usefixtures('cls_setup') class IntegrationTestBase: b2_api: B2Api this_run_bucket_name_prefix: str bucket_cleaner: BucketCleaner - @pytest.fixture(autouse=True, scope="class") + @pytest.fixture(autouse=True, scope='class') def cls_setup(self, request, b2_api, b2_auth_data, bucket_name_prefix, bucket_cleaner): cls = request.cls cls.b2_auth_data = b2_auth_data @@ -60,7 +59,7 @@ def create_bucket(self): bucket = self.b2_api.create_bucket( bucket_name, 'allPublic', - bucket_info={BUCKET_CREATED_AT_MILLIS: str(current_time_millis())} + bucket_info={BUCKET_CREATED_AT_MILLIS: str(current_time_millis())}, ) except DuplicateBucketName: self._duplicated_bucket_name_debug_info(bucket_name) diff --git a/test/integration/bucket_cleaner.py b/test/integration/bucket_cleaner.py index 522f9e720..909941ca8 100644 --- a/test/integration/bucket_cleaner.py +++ b/test/integration/bucket_cleaner.py @@ -43,8 +43,10 @@ def _should_remove_bucket(self, bucket: Bucket): return False if bucket.name.startswith(GENERAL_BUCKET_NAME_PREFIX): if BUCKET_CREATED_AT_MILLIS in bucket.bucket_info: - if int(bucket.bucket_info[BUCKET_CREATED_AT_MILLIS] - ) < current_time_millis() - ONE_HOUR_MILLIS: + if ( + int(bucket.bucket_info[BUCKET_CREATED_AT_MILLIS]) + < current_time_millis() - ONE_HOUR_MILLIS + ): return True return False @@ -76,11 +78,16 @@ def cleanup_bucket(self, bucket: Bucket): 'Removing retention from file version: %s', file_version_info.id_ ) b2_api.update_file_retention( - file_version_info.id_, file_version_info.file_name, - NO_RETENTION_FILE_SETTING, True + file_version_info.id_, + file_version_info.file_name, + NO_RETENTION_FILE_SETTING, + True, ) elif file_version_info.file_retention.mode == RetentionMode.COMPLIANCE: - if file_version_info.file_retention.retain_until > current_time_millis(): # yapf: disable + if ( + file_version_info.file_retention.retain_until + > current_time_millis() + ): logger.info( 'File version: %s cannot be removed due to compliance mode retention', file_version_info.id_, diff --git a/test/integration/cleanup_buckets.py b/test/integration/cleanup_buckets.py index 220aaf570..520f02199 100755 --- a/test/integration/cleanup_buckets.py +++ b/test/integration/cleanup_buckets.py @@ -17,5 +17,6 @@ if __name__ == '__main__': cleanup_old_buckets() - BucketCleaner(dont_cleanup_old_buckets=False, - b2_api=authorize(get_b2_auth_data())[0]).cleanup_buckets() + BucketCleaner( + dont_cleanup_old_buckets=False, b2_api=authorize(get_b2_auth_data())[0] + ).cleanup_buckets() diff --git a/test/integration/conftest.py b/test/integration/conftest.py index aacb348cd..9e84a3fdd 100644 --- a/test/integration/conftest.py +++ b/test/integration/conftest.py @@ -13,6 +13,10 @@ import http.client import os import secrets + +import pytest + +from b2sdk._internal.utils import current_time_millis from test.integration import get_b2_auth_data from test.integration.bucket_cleaner import BucketCleaner from test.integration.helpers import ( @@ -22,32 +26,28 @@ random_bucket_name, ) -import pytest - -from b2sdk._internal.utils import current_time_millis - def pytest_addoption(parser): """Add a flag for not cleaning up old buckets""" parser.addoption( - "--dont-cleanup-old-buckets", - action="store_true", + '--dont-cleanup-old-buckets', + action='store_true', default=False, ) -@pytest.fixture(scope="session") +@pytest.fixture(scope='session') def dont_cleanup_old_buckets(request): - return request.config.getoption("--dont-cleanup-old-buckets") + return request.config.getoption('--dont-cleanup-old-buckets') -@pytest.fixture(autouse=True, scope="session") +@pytest.fixture(autouse=True, scope='session') def set_http_debug(): - if os.environ.get("B2_DEBUG_HTTP"): + if os.environ.get('B2_DEBUG_HTTP'): http.client.HTTPConnection.debuglevel = 1 -@pytest.fixture(scope="session") +@pytest.fixture(scope='session') def b2_auth_data(): try: return get_b2_auth_data() @@ -55,18 +55,18 @@ def b2_auth_data(): pytest.fail(ex.args[0]) -@pytest.fixture(scope="session") +@pytest.fixture(scope='session') def bucket_name_prefix(): return get_bucket_name_prefix(8) -@pytest.fixture(scope="session") +@pytest.fixture(scope='session') def _b2_api(b2_auth_data): b2_api, _ = authorize(b2_auth_data) return b2_api -@pytest.fixture(scope="session") +@pytest.fixture(scope='session') def bucket_cleaner(bucket_name_prefix, dont_cleanup_old_buckets, _b2_api): cleaner = BucketCleaner( dont_cleanup_old_buckets, @@ -77,7 +77,7 @@ def bucket_cleaner(bucket_name_prefix, dont_cleanup_old_buckets, _b2_api): cleaner.cleanup_buckets() -@pytest.fixture(scope="session") +@pytest.fixture(scope='session') def b2_api(_b2_api, bucket_cleaner): return _b2_api @@ -86,9 +86,9 @@ def b2_api(_b2_api, bucket_cleaner): def bucket(b2_api, bucket_name_prefix, bucket_cleaner): bucket = b2_api.create_bucket( random_bucket_name(bucket_name_prefix), - "allPrivate", + 'allPrivate', bucket_info={ - "created_by": "b2-sdk integration test", + 'created_by': 'b2-sdk integration test', BUCKET_CREATED_AT_MILLIS: str(current_time_millis()), }, ) @@ -98,5 +98,5 @@ def bucket(b2_api, bucket_name_prefix, bucket_cleaner): @pytest.fixture def b2_subfolder(bucket, request): - subfolder_name = f"{request.node.name}_{secrets.token_urlsafe(4)}" - return f"b2://{bucket.name}/{subfolder_name}" + subfolder_name = f'{request.node.name}_{secrets.token_urlsafe(4)}' + return f'b2://{bucket.name}/{subfolder_name}' diff --git a/test/integration/test_bucket.py b/test/integration/test_bucket.py index e9b46d3c5..2e18487d0 100644 --- a/test/integration/test_bucket.py +++ b/test/integration/test_bucket.py @@ -7,10 +7,10 @@ # License https://www.backblaze.com/using_b2_code.html # ###################################################################### -from test.helpers import assert_dict_equal_ignore_extra - import pytest +from test.helpers import assert_dict_equal_ignore_extra + def test_bucket_notification_rules(bucket, b2_api): if 'writeBucketNotifications' not in b2_api.account_info.get_allowed()['capabilities']: @@ -20,37 +20,33 @@ def test_bucket_notification_rules(bucket, b2_api): assert bucket.get_notification_rules() == [] notification_rule = { - "eventTypes": ["b2:ObjectCreated:*"], - "isEnabled": True, - "name": "test-rule", - "objectNamePrefix": "", - "targetConfiguration": - { - "customHeaders": [], - "targetType": "webhook", - "url": "https://example.com/webhook", - "hmacSha256SigningSecret": "stringOf32AlphaNumericCharacters", - } + 'eventTypes': ['b2:ObjectCreated:*'], + 'isEnabled': True, + 'name': 'test-rule', + 'objectNamePrefix': '', + 'targetConfiguration': { + 'customHeaders': [], + 'targetType': 'webhook', + 'url': 'https://example.com/webhook', + 'hmacSha256SigningSecret': 'stringOf32AlphaNumericCharacters', + }, } set_notification_rules = bucket.set_notification_rules([notification_rule]) assert set_notification_rules == bucket.get_notification_rules() assert_dict_equal_ignore_extra( set_notification_rules, - [{ - **notification_rule, "isSuspended": False, - "suspensionReason": "" - }], + [{**notification_rule, 'isSuspended': False, 'suspensionReason': ''}], ) assert bucket.set_notification_rules([]) == [] def test_bucket_update__lifecycle_rules(bucket, b2_api): lifecycle_rule = { - "daysFromHidingToDeleting": 1, - "daysFromUploadingToHiding": 1, - "daysFromStartingToCancelingUnfinishedLargeFiles": 1, - "fileNamePrefix": "", + 'daysFromHidingToDeleting': 1, + 'daysFromUploadingToHiding': 1, + 'daysFromStartingToCancelingUnfinishedLargeFiles': 1, + 'fileNamePrefix': '', } old_rules_list = bucket.lifecycle_rules diff --git a/test/integration/test_download.py b/test/integration/test_download.py index ef02ac802..ee7a8fafa 100644 --- a/test/integration/test_download.py +++ b/test/integration/test_download.py @@ -45,9 +45,8 @@ def test_large_file(self): max_chunk_size=download_manager.MAX_CHUNK_SIZE, thread_pool=download_manager._thread_pool, ) - ] + ], ): - # let's check that small file downloads do not fail with these settings small_file_version = bucket.upload_bytes(b'0', 'a_single_char') with io.BytesIO() as io_: @@ -57,14 +56,17 @@ def test_large_file(self): f, sha1 = self._file_helper(bucket) if small_file_version._type() != 'large': # if we are here, that's not the production server! - assert f.download_version.content_sha1_verified # large files don't have sha1, lets not check + assert ( + f.download_version.content_sha1_verified + ) # large files don't have sha1, lets not check file_info = f.download_version.file_info assert LARGE_FILE_SHA1 in file_info assert file_info[LARGE_FILE_SHA1] == sha1 - def _file_helper(self, bucket, sha1_sum=None, - bytes_to_write: int | None = None) -> tuple[DownloadVersion, Sha1HexDigest]: + def _file_helper( + self, bucket, sha1_sum=None, bytes_to_write: int | None = None + ) -> tuple[DownloadVersion, Sha1HexDigest]: bytes_to_write = bytes_to_write or int(self.info.get_absolute_minimum_part_size()) * 2 + 1 with tempfile.TemporaryDirectory() as temp_dir: temp_dir = pathlib.Path(temp_dir) @@ -98,7 +100,7 @@ def test_small_unverified(self): assert not f.download_version.content_sha1_verified -@pytest.mark.parametrize("size_multiplier", [1, 100]) +@pytest.mark.parametrize('size_multiplier', [1, 100]) def test_gzip(b2_auth_data, bucket, tmp_path, b2_api, size_multiplier): """Test downloading gzipped files of varius sizes with and without content-encoding.""" source_file = tmp_path / 'compressed_file.gz' @@ -159,12 +161,12 @@ def binary_cap(request): For Windows we need capsys as capfd fails, while on any other (i.e. POSIX systems) we need capfd. This is sadly tied directly to how .save_to() is implemented, as Windows required special handling. """ - cap = request.getfixturevalue("capsysbinary" if _IS_WINDOWS else "capfdbinary") + cap = request.getfixturevalue('capsysbinary' if _IS_WINDOWS else 'capfdbinary') yield cap def test_download_to_stdout(bucket, source_file, uploaded_source_file_version, binary_cap): - output_file = "CON" if _IS_WINDOWS else "/dev/stdout" + output_file = 'CON' if _IS_WINDOWS else '/dev/stdout' bucket.download_file_by_id(file_id=uploaded_source_file_version.id_).save_to(output_file) diff --git a/test/integration/test_file_version_attributes.py b/test/integration/test_file_version_attributes.py index 4f137eebf..f128b05c8 100644 --- a/test/integration/test_file_version_attributes.py +++ b/test/integration/test_file_version_attributes.py @@ -32,8 +32,8 @@ def test_file_info_b2_attributes(self): 'content_language': 'en', } kwargs = { - **expected_attributes, 'expires': - dt.datetime(2105, 10, 21, 7, 28, tzinfo=dt.timezone.utc) + **expected_attributes, + 'expires': dt.datetime(2105, 10, 21, 7, 28, tzinfo=dt.timezone.utc), } file_version = bucket.upload_bytes(b'0', 'file', **kwargs) @@ -49,12 +49,8 @@ def test_file_info_b2_attributes(self): file_version.id_, 'file_copy', content_type='text/plain', - **{ - **kwargs, 'content_language': 'de' - } + **{**kwargs, 'content_language': 'de'}, ) self._assert_object_has_attributes( - copied_version, { - **expected_attributes, 'content_language': 'de' - } + copied_version, {**expected_attributes, 'content_language': 'de'} ) diff --git a/test/integration/test_raw_api.py b/test/integration/test_raw_api.py index 12b48f1ed..2ed205196 100644 --- a/test/integration/test_raw_api.py +++ b/test/integration/test_raw_api.py @@ -16,7 +16,6 @@ import sys import time import traceback -from test.helpers import assert_dict_equal_ignore_extra, type_validator_factory from typing import List import pytest @@ -44,6 +43,7 @@ from b2sdk._internal.replication.setting import ReplicationConfiguration, ReplicationRule from b2sdk._internal.replication.types import ReplicationStatus from b2sdk._internal.utils import hex_sha1_of_stream +from test.helpers import assert_dict_equal_ignore_extra, type_validator_factory # TODO: rewrite to separate test cases after introduction of reusable bucket @@ -116,12 +116,12 @@ def raw_api_test_helper(raw_api, should_cleanup_old_buckets): 'writeBucketNotifications', } missing_capabilities = ( - set(ALL_CAPABILITIES) - {'readBuckets', 'listAllBucketNames'} - preview_feature_caps - - set(auth_dict['allowed']['capabilities']) - ) - assert not missing_capabilities, 'it appears that the raw_api integration test is being run with a non-full key. Missing capabilities: {}'.format( - missing_capabilities, + set(ALL_CAPABILITIES) + - {'readBuckets', 'listAllBucketNames'} + - preview_feature_caps + - set(auth_dict['allowed']['capabilities']) ) + assert not missing_capabilities, f'it appears that the raw_api integration test is being run with a non-full key. Missing capabilities: {missing_capabilities}' account_id = auth_dict['accountId'] account_auth_token = auth_dict['authorizationToken'] @@ -155,11 +155,13 @@ def raw_api_test_helper(raw_api, should_cleanup_old_buckets): # other accounts. print('b2_create_bucket') bucket_name = 'test-raw-api-%s-%d-%d' % ( - account_id, int(time.time()), random.randint(1000, 9999) + account_id, + int(time.time()), + random.randint(1000, 9999), ) # very verbose http debug - #import http.client; http.client.HTTPConnection.debuglevel = 1 + # import http.client; http.client.HTTPConnection.debuglevel = 1 bucket_dict = raw_api.create_bucket( api_url, @@ -197,7 +199,9 @@ def raw_api_test_helper(raw_api, should_cleanup_old_buckets): try: # in order to test replication, we need to create a second bucket replication_source_bucket_name = 'test-raw-api-%s-%d-%d' % ( - account_id, int(time.time()), random.randint(1000, 9999) + account_id, + int(time.time()), + random.randint(1000, 9999), ) replication_source_bucket_dict = raw_api.create_bucket( api_url, @@ -220,25 +224,22 @@ def raw_api_test_helper(raw_api, should_cleanup_old_buckets): assert 'replicationConfiguration' in replication_source_bucket_dict assert replication_source_bucket_dict['replicationConfiguration'] == { 'isClientAuthorizedToRead': True, - 'value': - { - "asReplicationSource": + 'value': { + 'asReplicationSource': { + 'replicationRules': [ { - "replicationRules": - [ - { - "destinationBucketId": bucket_id, - "fileNamePrefix": "", - "includeExistingFiles": True, - "isEnabled": True, - "priority": 128, - "replicationRuleName": "test-rule" - }, - ], - "sourceApplicationKeyId": replication_source_key, + 'destinationBucketId': bucket_id, + 'fileNamePrefix': '', + 'includeExistingFiles': True, + 'isEnabled': True, + 'priority': 128, + 'replicationRuleName': 'test-rule', }, - "asReplicationDestination": None, + ], + 'sourceApplicationKeyId': replication_source_key, }, + 'asReplicationDestination': None, + }, } # 3) upload test file and check replication status @@ -259,8 +260,9 @@ def raw_api_test_helper(raw_api, should_cleanup_old_buckets): io.BytesIO(file_contents), ) - assert ReplicationStatus[file_dict['replicationStatus'].upper() - ] == ReplicationStatus.PENDING + assert ( + ReplicationStatus[file_dict['replicationStatus'].upper()] == ReplicationStatus.PENDING + ) finally: raw_api.delete_key(api_url, account_auth_token, replication_source_key) @@ -294,17 +296,14 @@ def raw_api_test_helper(raw_api, should_cleanup_old_buckets): ) assert bucket_dict['replicationConfiguration'] == { 'isClientAuthorizedToRead': True, - 'value': - { - 'asReplicationDestination': - { - 'sourceToDestinationKeyMapping': - { - replication_source_key: replication_destination_key, - }, - }, - 'asReplicationSource': None, + 'value': { + 'asReplicationDestination': { + 'sourceToDestinationKeyMapping': { + replication_source_key: replication_destination_key, + }, }, + 'asReplicationSource': None, + }, } finally: raw_api.delete_key( @@ -345,7 +344,7 @@ def raw_api_test_helper(raw_api, should_cleanup_old_buckets): for encryption_setting, default_retention in [ ( sse_none, - BucketRetentionSetting(mode=RetentionMode.GOVERNANCE, period=RetentionPeriod(days=1)) + BucketRetentionSetting(mode=RetentionMode.GOVERNANCE, period=RetentionPeriod(days=1)), ), (sse_b2_aes, None), (sse_b2_aes, BucketRetentionSetting(RetentionMode.NONE)), @@ -363,7 +362,7 @@ def raw_api_test_helper(raw_api, should_cleanup_old_buckets): # b2_list_buckets print('b2_list_buckets') bucket_list_dict = raw_api.list_buckets(api_url, account_auth_token, account_id) - #print(bucket_list_dict) + # print(bucket_list_dict) # b2_get_upload_url print('b2_get_upload_url') @@ -383,17 +382,14 @@ def raw_api_test_helper(raw_api, should_cleanup_old_buckets): len(file_contents), 'text/plain', file_sha1, - { - 'color': 'blue', - 'b2-cache-control': 'private, max-age=2222' - }, + {'color': 'blue', 'b2-cache-control': 'private, max-age=2222'}, io.BytesIO(file_contents), server_side_encryption=sse_b2_aes, - #custom_upload_timestamp=12345, + # custom_upload_timestamp=12345, file_retention=FileRetentionSetting( RetentionMode.GOVERNANCE, int(time.time() + 100) * 1000, - ) + ), ) file_id = file_dict['fileId'] @@ -518,8 +514,12 @@ def raw_api_test_helper(raw_api, should_cleanup_old_buckets): part_contents = b'hello part' part_sha1 = hex_sha1_of_stream(io.BytesIO(part_contents), len(part_contents)) raw_api.upload_part( - upload_part_url, upload_path_auth, 1, len(part_contents), part_sha1, - io.BytesIO(part_contents) + upload_part_url, + upload_path_auth, + 1, + len(part_contents), + part_sha1, + io.BytesIO(part_contents), ) # b2_copy_part @@ -607,12 +607,11 @@ def _subtest_bucket_notification_rules(raw_api, auth_dict, api_url, account_auth 'isEnabled': False, 'name': 'test-notification-rule', 'objectNamePrefix': 'test/object/prefix/', - 'targetConfiguration': - { - 'targetType': 'webhook', - 'url': 'https://example.com/webhook', - 'hmacSha256SigningSecret': 'a' * 32, - }, + 'targetConfiguration': { + 'targetType': 'webhook', + 'url': 'https://example.com/webhook', + 'hmacSha256SigningSecret': 'a' * 32, + }, } notification_rules_response_list = raw_api.set_bucket_notification_rules( @@ -624,14 +623,14 @@ def _subtest_bucket_notification_rules(raw_api, auth_dict, api_url, account_auth notification_rule_response_list_validate(notification_rules_response_list) expected_notification_rule_response_list = [ { - **notification_rule, 'isSuspended': False, + **notification_rule, + 'isSuspended': False, 'suspensionReason': '', - 'targetConfiguration': - { - **notification_rule['targetConfiguration'], - 'customHeaders': None, - 'hmacSha256SigningSecret': 'a' * 32, - } + 'targetConfiguration': { + **notification_rule['targetConfiguration'], + 'customHeaders': None, + 'hmacSha256SigningSecret': 'a' * 32, + }, } ] assert_dict_equal_ignore_extra( @@ -676,15 +675,18 @@ def _clean_and_delete_bucket(raw_api, api_url, account_auth_token, account_id, b action = version_dict['action'] if action in ['hide', 'upload']: print('b2_delete_file', file_name, action) - if action == 'upload' and version_dict[ - 'fileRetention'] and version_dict['fileRetention']['value']['mode'] is not None: + if ( + action == 'upload' + and version_dict['fileRetention'] + and version_dict['fileRetention']['value']['mode'] is not None + ): raw_api.update_file_retention( api_url, account_auth_token, file_id, file_name, NO_RETENTION_FILE_SETTING, - bypass_governance=True + bypass_governance=True, ) raw_api.delete_file_version(api_url, account_auth_token, file_id, file_name) else: diff --git a/test/integration/test_sync.py b/test/integration/test_sync.py index 7366e4f52..b74701998 100644 --- a/test/integration/test_sync.py +++ b/test/integration/test_sync.py @@ -25,13 +25,13 @@ @pytest.fixture def local_folder_with_files(tmp_path): - folder = tmp_path / "test" + folder = tmp_path / 'test' folder.mkdir() - (folder / "a").mkdir() - (folder / "a" / "foo").write_bytes(b"foo") + (folder / 'a').mkdir() + (folder / 'a' / 'foo').write_bytes(b'foo') # space in the name is important as it influences lexicographical sorting used by B2 - (folder / "a b").mkdir() - (folder / "a b" / "bar").write_bytes(b"bar") + (folder / 'a b').mkdir() + (folder / 'a b' / 'bar').write_bytes(b'bar') return folder diff --git a/test/unit/account_info/fixtures.py b/test/unit/account_info/fixtures.py index 45d7b928e..d3efebfec 100644 --- a/test/unit/account_info/fixtures.py +++ b/test/unit/account_info/fixtures.py @@ -83,18 +83,22 @@ def sqlite_account_info(sqlite_account_info_factory): return sqlite_account_info_factory() -@pytest.fixture(params=[ - lf('in_memory_account_info_factory'), - lf('sqlite_account_info_factory'), -]) +@pytest.fixture( + params=[ + lf('in_memory_account_info_factory'), + lf('sqlite_account_info_factory'), + ] +) def account_info_factory(request): return request.param -@pytest.fixture(params=[ - lf('in_memory_account_info'), - lf('sqlite_account_info'), -]) +@pytest.fixture( + params=[ + lf('in_memory_account_info'), + lf('sqlite_account_info'), + ] +) def account_info(request): return request.param diff --git a/test/unit/account_info/test_account_info.py b/test/unit/account_info/test_account_info.py index b332ffd71..143122e83 100644 --- a/test/unit/account_info/test_account_info.py +++ b/test/unit/account_info/test_account_info.py @@ -101,7 +101,7 @@ def test_is_same_account(self, account_id, realm, expected): @pytest.mark.parametrize( 's3_api_url', - ('https://s3.us-east-123.backblazeb2.com', 'https://s3.us-west-321.backblazeb2.com') + ('https://s3.us-east-123.backblazeb2.com', 'https://s3.us-west-321.backblazeb2.com'), ) def test_s3_api_url(self, s3_api_url): account_info = self.account_info_factory() @@ -199,10 +199,12 @@ def test_set_auth_data_compatibility(self, account_info_default_data): capabilities=['readFiles'], namePrefix=None, ) - account_info.set_auth_data(**{ - **account_info_default_data, - 'allowed': allowed, - }) + account_info.set_auth_data( + **{ + **account_info_default_data, + 'allowed': allowed, + } + ) assert allowed == account_info.get_allowed() def test_clear_bucket_upload_data(self): @@ -234,10 +236,12 @@ def test_bucket(self): assert 'bucket-0' == account_info.get_bucket_id_or_none_from_bucket_name('my-bucket') assert 'my-bucket' == account_info.get_bucket_name_or_none_from_bucket_id('bucket-0') if self.PERSISTENCE: - assert 'bucket-0' == self._make_info( - ).get_bucket_id_or_none_from_bucket_name('my-bucket') - assert 'my-bucket' == self._make_info( - ).get_bucket_name_or_none_from_bucket_id('bucket-0') + assert 'bucket-0' == self._make_info().get_bucket_id_or_none_from_bucket_name( + 'my-bucket' + ) + assert 'my-bucket' == self._make_info().get_bucket_name_or_none_from_bucket_id( + 'bucket-0' + ) assert ('my-bucket', 'bucket-0') in account_info.list_bucket_names_ids() account_info.remove_bucket_name('my-bucket') assert account_info.get_bucket_id_or_none_from_bucket_name('my-bucket') is None @@ -270,7 +274,7 @@ def test_account_info_up_to_v1(self): 100, 'app_key', 'realm', - application_key_id='key_id' + application_key_id='key_id', ) object_instances = [account_info] @@ -348,7 +352,8 @@ def setUp(self, request): yield for cleanup_method in [ - lambda: os.unlink(self.db_path), lambda: shutil.rmtree(self.test_home) + lambda: os.unlink(self.db_path), + lambda: shutil.rmtree(self.test_home), ]: try: cleanup_method() @@ -363,7 +368,9 @@ def test_permissions(self): """ Test that a new database won't be readable by just any user """ - SqliteAccountInfo(file_name=self.db_path,) + SqliteAccountInfo( + file_name=self.db_path, + ) mode = os.stat(self.db_path).st_mode assert stat.filemode(mode) == '-rw-------' @@ -379,7 +386,7 @@ def test_corrupted(self): @pytest.mark.skipif( platform.system() == 'Windows', - reason='it fails to upgrade on Windows, not worth to fix it anymore' + reason='it fails to upgrade on Windows, not worth to fix it anymore', ) def test_convert_from_json(self): """ @@ -393,7 +400,7 @@ def test_convert_from_json(self): application_key='application_key', download_url='download_url', minimum_part_size=5000, - realm='production' + realm='production', ) with open(self.db_path, 'wb') as f: f.write(json.dumps(data).encode('utf-8')) diff --git a/test/unit/account_info/test_sqlite_account_info.py b/test/unit/account_info/test_sqlite_account_info.py index bea09b91c..d7ed1f4a8 100644 --- a/test/unit/account_info/test_sqlite_account_info.py +++ b/test/unit/account_info/test_sqlite_account_info.py @@ -60,7 +60,7 @@ def test_migrate_to_4(self): with new_account_info._get_connection() as conn: sizes = conn.execute( - "SELECT recommended_part_size, absolute_minimum_part_size from account" + 'SELECT recommended_part_size, absolute_minimum_part_size from account' ).fetchone() assert (100, 5000000) == sizes @@ -71,7 +71,7 @@ def setup(self, monkeypatch, tmpdir): monkeypatch.setenv( 'HOME', str(tmpdir) ) # this affects .expanduser() and protects the real HOME folder - monkeypatch.setenv("USERPROFILE", str(tmpdir)) # same as HOME, but for Windows + monkeypatch.setenv('USERPROFILE', str(tmpdir)) # same as HOME, but for Windows monkeypatch.delenv(B2_ACCOUNT_INFO_ENV_VAR, raising=False) monkeypatch.delenv(XDG_CONFIG_HOME_ENV_VAR, raising=False) diff --git a/test/unit/api/test_api.py b/test/unit/api/test_api.py index 0cafb0456..22aeeacb9 100644 --- a/test/unit/api/test_api.py +++ b/test/unit/api/test_api.py @@ -82,7 +82,7 @@ def test_get_file_info(self): self._authorize_account() bucket = self.api.create_bucket('bucket1', 'allPrivate') created_file = bucket.upload_bytes( - b'hello world', 'file', cache_control="private, max-age=3600" + b'hello world', 'file', cache_control='private, max-age=3600' ) result = self.api.get_file_info(created_file.id_) @@ -103,19 +103,9 @@ def test_get_file_info(self): 'b2-cache-control': 'private, max-age=3600', }, 'fileName': 'file', - 'fileRetention': { - 'isClientAuthorizedToRead': True, - 'value': { - 'mode': None - } - }, - 'legalHold': { - 'isClientAuthorizedToRead': True, - 'value': None - }, - 'serverSideEncryption': { - 'mode': 'none' - }, + 'fileRetention': {'isClientAuthorizedToRead': True, 'value': {'mode': None}}, + 'legalHold': {'isClientAuthorizedToRead': True, 'value': None}, + 'serverSideEncryption': {'mode': 'none'}, 'uploadTimestamp': 5000, } else: @@ -135,14 +125,9 @@ def test_get_file_info_by_name(self): 'fileId': '9999', 'fileName': 'file', 'fileInfo': {}, - 'serverSideEncryption': { - 'mode': 'none' - }, + 'serverSideEncryption': {'mode': 'none'}, 'legalHold': None, - 'fileRetention': { - 'mode': None, - 'retainUntilTimestamp': None - }, + 'fileRetention': {'mode': None, 'retainUntilTimestamp': None}, 'size': 11, 'uploadTimestamp': 5000, 'contentType': 'b2/x-auto', @@ -151,11 +136,13 @@ def test_get_file_info_by_name(self): } if apiver_deps.V <= 1: - expected_result.update({ - 'accountId': None, - 'action': 'upload', - 'bucketId': None, - }) + expected_result.update( + { + 'accountId': None, + 'action': 'upload', + 'bucketId': None, + } + ) assert result.as_dict() == expected_result @@ -173,14 +160,9 @@ def test_get_hidden_file_info_by_name(self): 'fileId': '9998', 'fileName': 'hidden-file.txt', 'fileInfo': {}, - 'serverSideEncryption': { - 'mode': 'none' - }, + 'serverSideEncryption': {'mode': 'none'}, 'legalHold': None, - 'fileRetention': { - 'mode': None, - 'retainUntilTimestamp': None - }, + 'fileRetention': {'mode': None, 'retainUntilTimestamp': None}, 'size': 0, 'uploadTimestamp': 5001, 'contentSha1': 'none', @@ -188,11 +170,13 @@ def test_get_hidden_file_info_by_name(self): } if apiver_deps.V <= 1: - expected_result.update({ - 'accountId': None, - 'action': 'upload', - 'bucketId': None, - }) + expected_result.update( + { + 'accountId': None, + 'action': 'upload', + 'bucketId': None, + } + ) assert result.as_dict() == expected_result @@ -216,7 +200,7 @@ def test_get_file_info_by_name_with_properties(self): 'file', encryption=encr_setting, legal_hold=lh_setting, - file_retention=retention_setting + file_retention=retention_setting, ) result = self.api.get_file_info_by_name('bucket1', 'file') @@ -241,27 +225,19 @@ def test_get_file_info_by_name_with_properties(self): 'lifecycleRules': [], 'options': set(), 'revision': 1, - 'defaultServerSideEncryption': - { - 'isClientAuthorizedToRead': True, - 'value': { - 'mode': 'none' - }, - }, - 'fileLockConfiguration': - { - 'isClientAuthorizedToRead': True, - 'value': - { - 'defaultRetention': { - 'mode': None, - 'period': None - }, - 'isFileLockEnabled': None - } + 'defaultServerSideEncryption': { + 'isClientAuthorizedToRead': True, + 'value': {'mode': 'none'}, + }, + 'fileLockConfiguration': { + 'isClientAuthorizedToRead': True, + 'value': { + 'defaultRetention': {'mode': None, 'period': None}, + 'isFileLockEnabled': None, }, + }, }, - marks=pytest.mark.apiver(to_ver=0) + marks=pytest.mark.apiver(to_ver=0), ), ], ) @@ -285,7 +261,7 @@ def test_list_buckets_with_name(self): @pytest.mark.apiver(from_ver=3) def test_list_buckets_from_cache(self): - bucket = type("bucket", (), {"name": "bucket", "id_": "ID-0"}) + bucket = type('bucket', (), {'name': 'bucket', 'id_': 'ID-0'}) self._authorize_account() self.cache.set_bucket_name_cache([bucket]) @@ -294,10 +270,10 @@ def list_buckets(*args, **kwargs): return [(b.name, b.id_) for b in buckets] assert list_buckets(use_cache=True) == [('bucket', 'ID-0')] - assert list_buckets(bucket_name="bucket", use_cache=True) == [('bucket', 'ID-0')] - assert list_buckets(bucket_name="bucket2", use_cache=True) == [] - assert list_buckets(bucket_id="ID-0", use_cache=True) == [('bucket', 'ID-0')] - assert list_buckets(bucket_id="ID-2", use_cache=True) == [] + assert list_buckets(bucket_name='bucket', use_cache=True) == [('bucket', 'ID-0')] + assert list_buckets(bucket_name='bucket2', use_cache=True) == [] + assert list_buckets(bucket_id='ID-0', use_cache=True) == [('bucket', 'ID-0')] + assert list_buckets(bucket_id='ID-2', use_cache=True) == [] assert self.api.list_buckets() == [] def test_buckets_with_encryption(self): @@ -306,8 +282,12 @@ def test_buckets_with_encryption(self): mode=EncryptionMode.SSE_B2, algorithm=EncryptionAlgorithm.AES256, ) - no_encryption = EncryptionSetting(mode=EncryptionMode.NONE,) - unknown_encryption = EncryptionSetting(mode=EncryptionMode.UNKNOWN,) + no_encryption = EncryptionSetting( + mode=EncryptionMode.NONE, + ) + unknown_encryption = EncryptionSetting( + mode=EncryptionMode.UNKNOWN, + ) b1 = self.api.create_bucket( 'bucket1', @@ -359,7 +339,9 @@ def _verify_if_bucket_is_encrypted(self, bucket, should_be_encrypted): mode=EncryptionMode.SSE_B2, algorithm=EncryptionAlgorithm.AES256, ) - no_encryption = EncryptionSetting(mode=EncryptionMode.NONE,) + no_encryption = EncryptionSetting( + mode=EncryptionMode.NONE, + ) if not should_be_encrypted: assert bucket.default_server_side_encryption == no_encryption else: @@ -528,9 +510,13 @@ def test_cancel_large_file_v1(self): @pytest.mark.apiver(to_ver=1) def test_provide_raw_api_v1(self): from apiver_deps import B2RawApi # test for legacy name + old_style_api = B2Api(raw_api=B2RawApi(B2Http(user_agent_append='test append'))) new_style_api = B2Api(api_config=B2HttpApiConfig(user_agent_append='test append')) - assert old_style_api.session.raw_api.b2_http.user_agent == new_style_api.session.raw_api.b2_http.user_agent + assert ( + old_style_api.session.raw_api.b2_http.user_agent + == new_style_api.session.raw_api.b2_http.user_agent + ) with pytest.raises(InvalidArgument): B2Api( raw_api=B2RawApi(B2Http(user_agent_append='test append')), @@ -572,8 +558,11 @@ def test_create_and_delete_key_v2(self): assert create_result.key_name == 'testkey' assert create_result.capabilities == ['readFiles'] assert create_result.account_id == self.account_info.get_account_id() - assert (now + 100 - - 10) * 1000 < create_result.expiration_timestamp_millis < (now + 100 + 10) * 1000 + assert ( + (now + 100 - 10) * 1000 + < create_result.expiration_timestamp_millis + < (now + 100 + 10) * 1000 + ) assert create_result.bucket_id == bucket.id_ assert create_result.name_prefix == 'name' # assert create_result.options == ... TODO @@ -600,7 +589,10 @@ def assertDeleteAndCreateResult(self, create_result, delete_result): assert delete_result.key_name == create_result.key_name assert delete_result.capabilities == create_result.capabilities assert delete_result.account_id == create_result.account_id - assert delete_result.expiration_timestamp_millis == create_result.expiration_timestamp_millis + assert ( + delete_result.expiration_timestamp_millis + == create_result.expiration_timestamp_millis + ) assert delete_result.bucket_id == create_result.bucket_id assert delete_result.name_prefix == create_result.name_prefix @@ -621,7 +613,8 @@ def test_list_keys_v1(self): 'expirationTimestamp': None, 'keyName': f'testkey{ind}', 'namePrefix': None, - } for ind in [ + } + for ind in [ 0, 1, 10, @@ -691,8 +684,7 @@ def test_delete_file_version_bypass_governance(self): created_file = bucket.upload_bytes( b'hello world', 'file', - file_retention=FileRetentionSetting(RetentionMode.GOVERNANCE, - int(time.time()) + 100), + file_retention=FileRetentionSetting(RetentionMode.GOVERNANCE, int(time.time()) + 100), ) with pytest.raises(AccessDenied): diff --git a/test/unit/b2http/test_b2http.py b/test/unit/b2http/test_b2http.py index 40079ea46..7f3020c51 100644 --- a/test/unit/b2http/test_b2http.py +++ b/test/unit/b2http/test_b2http.py @@ -62,7 +62,7 @@ def test_broken_pipe(self): def fcn(): raise requests.ConnectionError( requests.packages.urllib3.exceptions.ProtocolError( - "dummy", OSError(20, 'Broken pipe') + 'dummy', OSError(20, 'Broken pipe') ) ) @@ -108,7 +108,9 @@ def test_too_many_requests(self): response = MagicMock() response.status_code = 429 response.headers = {'retry-after': 1} - response.content = b'{"status": 429, "code": "Too Many requests", "message": "retry after some time"}' + response.content = ( + b'{"status": 429, "code": "Too Many requests", "message": "retry after some time"}' + ) with pytest.raises(TooManyRequests): B2Http._translate_errors(lambda: response) @@ -121,7 +123,7 @@ def test_invalid_json(self): with pytest.raises(BadRequest) as exc_info: B2Http._translate_errors(lambda: response) - assert str(exc_info.value) == f"{response.content.decode()} (non_json_response)" + assert str(exc_info.value) == f'{response.content.decode()} (non_json_response)' def test_potential_s3_endpoint_passed_as_realm(self): response = MagicMock() @@ -218,7 +220,8 @@ def test_never_works(self): fcn.side_effect = [ ServiceError('a'), ServiceError('a'), - ServiceError('a'), self.response + ServiceError('a'), + self.response, ] with self.assertRaises(ServiceError): B2Http._translate_and_retry(fcn, 3) @@ -272,14 +275,14 @@ def test_too_many_requests_retry_header_combination_two(self): class TestB2Http(TestBase): - URL = 'http://example.com' UA_APPEND = None HEADERS = dict(my_header='my_value') EXPECTED_HEADERS = {'my_header': 'my_value', 'User-Agent': USER_AGENT} EXPECTED_JSON_HEADERS = { - **EXPECTED_HEADERS, 'Content-Type': 'application/json', - 'Accept': 'application/json' + **EXPECTED_HEADERS, + 'Content-Type': 'application/json', + 'Accept': 'application/json', } PARAMS = dict(fileSize=100) PARAMS_JSON_BYTES = b'{"fileSize": 100}' @@ -300,7 +303,7 @@ def setUp(self): B2HttpApiConfig( requests.Session, install_clock_skew_hook=False, - user_agent_append=self.UA_APPEND + user_agent_append=self.UA_APPEND, ) ) @@ -350,7 +353,7 @@ def test_get_content(self): def test_head_content(self): self.session.request.return_value = self.response self.response.status_code = 200 - self.response.headers = {"color": "blue"} + self.response.headers = {'color': 'blue'} response = self.b2_http.head_content(self.URL, self.HEADERS) @@ -361,11 +364,11 @@ def test_head_content(self): class TestB2HttpUserAgentAppend(TestB2Http): - UA_APPEND = 'ua_extra_string' EXPECTED_HEADERS = {**TestB2Http.EXPECTED_HEADERS, 'User-Agent': f'{USER_AGENT} {UA_APPEND}'} EXPECTED_JSON_HEADERS = { - **TestB2Http.EXPECTED_JSON_HEADERS, 'User-Agent': EXPECTED_HEADERS['User-Agent'] + **TestB2Http.EXPECTED_JSON_HEADERS, + 'User-Agent': EXPECTED_HEADERS['User-Agent'], } diff --git a/test/unit/bucket/test_bucket.py b/test/unit/bucket/test_bucket.py index 92639e5ce..62d100a38 100644 --- a/test/unit/bucket/test_bucket.py +++ b/test/unit/bucket/test_bucket.py @@ -21,7 +21,6 @@ import unittest.mock as mock from contextlib import suppress from io import BytesIO -from test.helpers import NonSeekableIO, assert_dict_equal_ignore_extra import apiver_deps import pytest @@ -49,13 +48,18 @@ UnsatisfiableRange, ) +from test.helpers import NonSeekableIO, assert_dict_equal_ignore_extra + from ..test_base import TestBase, create_key if apiver_deps.V <= 1: from apiver_deps import DownloadDestBytes, PreSeekedDownloadDest from apiver_deps import FileVersionInfo as VFileVersionInfo else: - DownloadDestBytes, PreSeekedDownloadDest = None, None # these classes are not present, thus not needed, in v2 + DownloadDestBytes, PreSeekedDownloadDest = ( + None, + None, + ) # these classes are not present, thus not needed, in v2 from apiver_deps import FileVersion as VFileVersionInfo from apiver_deps import ( LARGE_FILE_SHA1, @@ -147,8 +151,8 @@ ], source_key_id='10053d55ae26b790000000006', source_to_destination_key_mapping={ - "10053d55ae26b790000000045": "10053d55ae26b790000000004", - "10053d55ae26b790000000046": "10053d55ae26b790030000004", + '10053d55ae26b790000000045': '10053d55ae26b790000000004', + '10053d55ae26b790000000046': '10053d55ae26b790030000004', }, ) @@ -269,9 +273,12 @@ def test_bucket_ls__matches_exact_filename(bucket, exact_filename_match_ls_setup # hidden file should not be returned unless latest_only is False assert len(list(bucket.ls(exact_filename_match_ls_setup[1].file_name, **kwargs))) == 0 - assert len( - list(bucket.ls(exact_filename_match_ls_setup[1].file_name, **kwargs, latest_only=False)) - ) == 2 + assert ( + len( + list(bucket.ls(exact_filename_match_ls_setup[1].file_name, **kwargs, latest_only=False)) + ) + == 2 + ) @pytest.mark.apiver(from_ver=2) @@ -288,9 +295,12 @@ def test_bucket_ls__matches_exact_filename__wildcard( # hidden file should not be returned unless latest_only is False assert len(list(bucket.ls(exact_filename_match_ls_setup[1].file_name, **kwargs))) == 0 - assert len( - list(bucket.ls(exact_filename_match_ls_setup[1].file_name, **kwargs, latest_only=False)) - ) == 2 + assert ( + len( + list(bucket.ls(exact_filename_match_ls_setup[1].file_name, **kwargs, latest_only=False)) + ) + == 2 + ) class TestCaseWithBucket(TestBase): @@ -432,8 +442,11 @@ def test_error_in_state(self): large_file_upload_state.set_error('test error') try: self.api.services.upload_manager.upload_part( - self.bucket_id, file1.file_id, UploadSourceBytes(content), 1, - large_file_upload_state + self.bucket_id, + file1.file_id, + UploadSourceBytes(content), + 1, + large_file_upload_state, ).result() self.fail('should have thrown') except AlreadyFailed: @@ -465,10 +478,12 @@ def test_prefix(self): file3 = self.bucket.start_large_file('fileABC', 'text/plain', {}) self.assertEqual( [file2, file3], - list(self.bucket.list_unfinished_large_files( - batch_size=1, - prefix='fileAB', - ),), + list( + self.bucket.list_unfinished_large_files( + batch_size=1, + prefix='fileAB', + ), + ), ) def _make_file(self, file_id, file_name): @@ -524,7 +539,7 @@ def test_version_by_name_file_lock(self): 'listBuckets', 'listFiles', 'readFiles', - ] + ], ) low_perm_api.authorize_account( @@ -549,8 +564,12 @@ def test_version_by_id(self): self.assertIsInstance(info, VFileVersionInfo) expected = (b_id, 'b', 11, 'upload', 'b2/x-auto', 'none') actual = ( - info.id_, info.file_name, info.size, info.action, info.content_type, - info.server_side_encryption.mode.value + info.id_, + info.file_name, + info.size, + info.action, + info.content_type, + info.server_side_encryption.mode.value, ) self.assertEqual(expected, actual) @@ -571,7 +590,9 @@ def test_three_files_at_root(self): self.bucket.upload_bytes(data, 'bb') self.bucket.upload_bytes(data, 'ccc') expected = [ - ('a', 11, 'upload', None), ('bb', 11, 'upload', None), ('ccc', 11, 'upload', None) + ('a', 11, 'upload', None), + ('bb', 11, 'upload', None), + ('ccc', 11, 'upload', None), ] self.assertBucketContents(expected, '') @@ -584,8 +605,9 @@ def test_three_files_in_dir(self): self.bucket.upload_bytes(data, 'bb/3') self.bucket.upload_bytes(data, 'ccc') expected = [ - ('bb/1', 11, 'upload', None), ('bb/2/sub1', 11, 'upload', 'bb/2/'), - ('bb/3', 11, 'upload', None) + ('bb/1', 11, 'upload', None), + ('bb/2/sub1', 11, 'upload', 'bb/2/'), + ('bb/3', 11, 'upload', None), ] self.assertBucketContents(expected, 'bb', fetch_count=1) @@ -650,8 +672,7 @@ def test_delete_file_version_bypass_governance(self): file_id = self.bucket.upload_bytes( data, 'hello.txt', - file_retention=FileRetentionSetting(RetentionMode.GOVERNANCE, - int(time.time()) + 100), + file_retention=FileRetentionSetting(RetentionMode.GOVERNANCE, int(time.time()) + 100), ).id_ with pytest.raises(AccessDenied): @@ -751,8 +772,7 @@ def test_wildcard_matching_directory(self): ] actual = [ (info.file_name, info.size, info.action, folder) - for (info, - folder) in self.bucket_ls('b/*/test.txt', recursive=True, with_wildcard=True) + for (info, folder) in self.bucket_ls('b/*/test.txt', recursive=True, with_wildcard=True) ] self.assertEqual(expected, actual) @@ -768,8 +788,7 @@ def test_single_character_matching(self): ] actual = [ (info.file_name, info.size, info.action, folder) - for (info, - folder) in self.bucket_ls('b/2/test.?sv', recursive=True, with_wildcard=True) + for (info, folder) in self.bucket_ls('b/2/test.?sv', recursive=True, with_wildcard=True) ] self.assertEqual(expected, actual) @@ -785,8 +804,9 @@ def test_sequence_matching(self): ] actual = [ (info.file_name, info.size, info.action, folder) - for (info, - folder) in self.bucket_ls('b/2/test.[tc]sv', recursive=True, with_wildcard=True) + for (info, folder) in self.bucket_ls( + 'b/2/test.[tc]sv', recursive=True, with_wildcard=True + ) ] self.assertEqual(expected, actual) @@ -801,8 +821,9 @@ def test_negative_sequence_matching(self): ] actual = [ (info.file_name, info.size, info.action, folder) - for (info, - folder) in self.bucket_ls('b/2/test.[!ck]sv', recursive=True, with_wildcard=True) + for (info, folder) in self.bucket_ls( + 'b/2/test.[!ck]sv', recursive=True, with_wildcard=True + ) ] self.assertEqual(expected, actual) @@ -872,10 +893,11 @@ def test_filters_wildcard_matching(self): ('b/3/test-5.txt', len(data), 'upload', None), ] actual = [ - (info.file_name, info.size, info.action, folder) for (info, folder) in self.bucket_ls( + (info.file_name, info.size, info.action, folder) + for (info, folder) in self.bucket_ls( 'b/', recursive=True, - filters=[Filter.include("*.txt")], + filters=[Filter.include('*.txt')], ) ] self.assertEqual(expected, actual) @@ -925,7 +947,8 @@ def test_filters_single_character_matching(self): ('b/2/test.tsv', len(data), 'upload', None), ] actual = [ - (info.file_name, info.size, info.action, folder) for (info, folder) in self.bucket_ls( + (info.file_name, info.size, info.action, folder) + for (info, folder) in self.bucket_ls( recursive=True, filters=[Filter.include('b/2/test.?sv')], ) @@ -937,7 +960,8 @@ def test_filters_single_character_matching(self): ('b/2/test.txt', len(data), 'upload', None), ] actual = [ - (info.file_name, info.size, info.action, folder) for (info, folder) in self.bucket_ls( + (info.file_name, info.size, info.action, folder) + for (info, folder) in self.bucket_ls( recursive=True, filters=[Filter.exclude('b/2/test.?sv')], ) @@ -956,7 +980,8 @@ def test_filters_sequence_matching(self): ('b/2/test.tsv', len(data), 'upload', None), ] actual = [ - (info.file_name, info.size, info.action, folder) for (info, folder) in self.bucket_ls( + (info.file_name, info.size, info.action, folder) + for (info, folder) in self.bucket_ls( recursive=True, filters=[Filter.include('b/2/test.[tc]sv')], ) @@ -968,7 +993,8 @@ def test_filters_sequence_matching(self): ('b/2/test.ksv', len(data), 'upload', None), ] actual = [ - (info.file_name, info.size, info.action, folder) for (info, folder) in self.bucket_ls( + (info.file_name, info.size, info.action, folder) + for (info, folder) in self.bucket_ls( recursive=True, filters=[Filter.exclude('b/2/test.[tc]sv')], ) @@ -986,7 +1012,8 @@ def test_filters_negative_sequence_matching(self): ('b/2/test.tsv', len(data), 'upload', None), ] actual = [ - (info.file_name, info.size, info.action, folder) for (info, folder) in self.bucket_ls( + (info.file_name, info.size, info.action, folder) + for (info, folder) in self.bucket_ls( recursive=True, filters=[Filter.include('b/2/test.[!ck]sv')], ) @@ -999,7 +1026,8 @@ def test_filters_negative_sequence_matching(self): ('b/2/test.ksv', len(data), 'upload', None), ] actual = [ - (info.file_name, info.size, info.action, folder) for (info, folder) in self.bucket_ls( + (info.file_name, info.size, info.action, folder) + for (info, folder) in self.bucket_ls( recursive=True, filters=[Filter.exclude('b/2/test.[!ck]sv')], ) @@ -1015,7 +1043,8 @@ def test_filters_matching_exact_filename(self): ('b/a.txt', len(data), 'upload', None), ] actual = [ - (info.file_name, info.size, info.action, folder) for (info, folder) in self.bucket_ls( + (info.file_name, info.size, info.action, folder) + for (info, folder) in self.bucket_ls( recursive=True, filters=[Filter.include('b/a.txt')], ) @@ -1026,7 +1055,8 @@ def test_filters_matching_exact_filename(self): ('b/b.txt', len(data), 'upload', None), ] actual = [ - (info.file_name, info.size, info.action, folder) for (info, folder) in self.bucket_ls( + (info.file_name, info.size, info.action, folder) + for (info, folder) in self.bucket_ls( recursive=True, filters=[Filter.exclude('b/a.txt')], ) @@ -1053,7 +1083,8 @@ def test_filters_mixed_with_wildcards(self): ('b/a.txt', len(data), 'upload', None), ] actual = [ - (info.file_name, info.size, info.action, folder) for (info, folder) in self.bucket_ls( + (info.file_name, info.size, info.action, folder) + for (info, folder) in self.bucket_ls( '*.txt', recursive=True, with_wildcard=True, @@ -1067,11 +1098,12 @@ def test_filters_mixed_with_wildcards(self): ('b/a-1.txt', len(data), 'upload', None), ] actual = [ - (info.file_name, info.size, info.action, folder) for (info, folder) in self.bucket_ls( + (info.file_name, info.size, info.action, folder) + for (info, folder) in self.bucket_ls( 'b/?-[1234567890].*', recursive=True, with_wildcard=True, - filters=[Filter.exclude('*-2.*')] + filters=[Filter.exclude('*-2.*')], ) ] self.assertEqual(expected, actual) @@ -1087,11 +1119,10 @@ def test_filters_combination(self): ('b/a-1.csv', len(data), 'upload', None), ] actual = [ - (info.file_name, info.size, info.action, folder) for (info, folder) in self.bucket_ls( + (info.file_name, info.size, info.action, folder) + for (info, folder) in self.bucket_ls( recursive=True, - filters=[Filter.include('b/*'), - Filter.exclude('*.txt'), - Filter.include('a.txt')], + filters=[Filter.include('b/*'), Filter.exclude('*.txt'), Filter.include('a.txt')], ) ] self.assertEqual(expected, actual) @@ -1151,7 +1182,9 @@ def test_multiple_version(self): a_id3 = self.bucket.upload_bytes(b'last version', 'a').id_ expected = [ - (a_id3, 'a', 12, 'upload'), (a_id2, 'a', 14, 'upload'), (a_id1, 'a', 13, 'upload') + (a_id3, 'a', 12, 'upload'), + (a_id2, 'a', 14, 'upload'), + (a_id1, 'a', 13, 'upload'), ] actual = [ (info.id_, info.file_name, info.size, info.action) @@ -1265,7 +1298,7 @@ def test_copy_without_optional_params(self): def test_copy_with_range(self): file_id = self._make_file() - #data = b'hello world' + # data = b'hello world' # 3456789 if apiver_deps.V <= 1: self.bucket.copy_file( @@ -1391,7 +1424,7 @@ def test_copy_retention(self): ) self.assertEqual( FileRetentionSetting(RetentionMode.COMPLIANCE, 100), - resulting_file_version.file_retention + resulting_file_version.file_retention, ) self.assertEqual(LegalHold.ON, resulting_file_version.legal_hold) @@ -1417,16 +1450,18 @@ def test_copy_encryption(self): file_id=a_id, destination_encryption=SSE_C_AES, file_info={'new': 'value'}, - content_type='text/plain' - ), SSE_C_AES_NO_SECRET + content_type='text/plain', + ), + SSE_C_AES_NO_SECRET, ), ( dict( file_id=a_id, destination_encryption=SSE_C_AES, source_file_info={'old': 'value'}, - source_content_type='text/plain' - ), SSE_C_AES_NO_SECRET + source_content_type='text/plain', + ), + SSE_C_AES_NO_SECRET, ), (dict(file_id=b_id), SSE_NONE), (dict(file_id=b_id, source_encryption=SSE_B2_AES), SSE_NONE), @@ -1434,8 +1469,9 @@ def test_copy_encryption(self): dict( file_id=b_id, source_encryption=SSE_B2_AES, - destination_encryption=SSE_B2_AES - ), SSE_B2_AES + destination_encryption=SSE_B2_AES, + ), + SSE_B2_AES, ), ( dict( @@ -1443,8 +1479,9 @@ def test_copy_encryption(self): source_encryption=SSE_B2_AES, destination_encryption=SSE_C_AES, file_info={'new': 'value'}, - content_type='text/plain' - ), SSE_C_AES_NO_SECRET + content_type='text/plain', + ), + SSE_C_AES_NO_SECRET, ), ( dict( @@ -1452,29 +1489,33 @@ def test_copy_encryption(self): source_encryption=SSE_B2_AES, destination_encryption=SSE_C_AES, source_file_info={'old': 'value'}, - source_content_type='text/plain' - ), SSE_C_AES_NO_SECRET + source_content_type='text/plain', + ), + SSE_C_AES_NO_SECRET, ), ( dict( file_id=c_id, source_encryption=SSE_C_AES, file_info={'new': 'value'}, - content_type='text/plain' - ), SSE_NONE + content_type='text/plain', + ), + SSE_NONE, ), ( dict( file_id=c_id, source_encryption=SSE_C_AES, source_file_info={'old': 'value'}, - source_content_type='text/plain' - ), SSE_NONE + source_content_type='text/plain', + ), + SSE_NONE, ), ( dict( file_id=c_id, source_encryption=SSE_C_AES, destination_encryption=SSE_C_AES - ), SSE_C_AES_NO_SECRET + ), + SSE_C_AES_NO_SECRET, ), ( dict( @@ -1482,8 +1523,9 @@ def test_copy_encryption(self): source_encryption=SSE_C_AES, destination_encryption=SSE_B2_AES, source_file_info={'old': 'value'}, - source_content_type='text/plain' - ), SSE_B2_AES + source_content_type='text/plain', + ), + SSE_B2_AES, ), ( dict( @@ -1491,8 +1533,9 @@ def test_copy_encryption(self): source_encryption=SSE_C_AES, destination_encryption=SSE_B2_AES, file_info={'new': 'value'}, - content_type='text/plain' - ), SSE_B2_AES + content_type='text/plain', + ), + SSE_B2_AES, ), ( dict( @@ -1500,8 +1543,9 @@ def test_copy_encryption(self): source_encryption=SSE_C_AES, destination_encryption=SSE_C_AES_2, source_file_info={'old': 'value'}, - source_content_type='text/plain' - ), SSE_C_AES_2_NO_SECRET + source_content_type='text/plain', + ), + SSE_C_AES_2_NO_SECRET, ), ( dict( @@ -1509,8 +1553,9 @@ def test_copy_encryption(self): source_encryption=SSE_C_AES, destination_encryption=SSE_C_AES_2, file_info={'new': 'value'}, - content_type='text/plain' - ), SSE_C_AES_2_NO_SECRET + content_type='text/plain', + ), + SSE_C_AES_2_NO_SECRET, ), ]: with self.subTest(kwargs=kwargs, length=length, data=data): @@ -1530,9 +1575,7 @@ def test_update(self): bucket_type='allPrivate', bucket_info={'info': 'o'}, cors_rules={'andrea': 'corr'}, - lifecycle_rules=[{ - 'fileNamePrefix': 'is_life' - }], + lifecycle_rules=[{'fileNamePrefix': 'is_life'}], default_server_side_encryption=SSE_B2_AES, default_retention=BucketRetentionSetting( RetentionMode.COMPLIANCE, RetentionPeriod(years=7) @@ -1547,48 +1590,33 @@ def test_update(self): { 'accountId': 'account-0', 'bucketId': 'bucket_0', - 'bucketInfo': { - 'info': 'o' - }, + 'bucketInfo': {'info': 'o'}, 'bucketName': 'my-bucket', 'bucketType': 'allPrivate', - 'corsRules': { - 'andrea': 'corr' + 'corsRules': {'andrea': 'corr'}, + 'defaultServerSideEncryption': { + 'isClientAuthorizedToRead': True, + 'value': {'algorithm': 'AES256', 'mode': 'SSE-B2'}, }, - 'defaultServerSideEncryption': - { - 'isClientAuthorizedToRead': True, - 'value': { - 'algorithm': 'AES256', - 'mode': 'SSE-B2' - } - }, - 'fileLockConfiguration': - { - 'isClientAuthorizedToRead': True, - 'value': - { - 'defaultRetention': - { - 'mode': 'compliance', - 'period': { - 'unit': 'years', - 'duration': 7 - } - }, - 'isFileLockEnabled': None - } + 'fileLockConfiguration': { + 'isClientAuthorizedToRead': True, + 'value': { + 'defaultRetention': { + 'mode': 'compliance', + 'period': {'unit': 'years', 'duration': 7}, + }, + 'isFileLockEnabled': None, }, - 'lifecycleRules': [{ - 'fileNamePrefix': 'is_life' - }], + }, + 'lifecycleRules': [{'fileNamePrefix': 'is_life'}], 'options': set(), - 'revision': 2 - }, result + 'revision': 2, + }, + result, ) else: self.assertIsInstance(result, Bucket) - assertions_mapping = { # yapf: disable + assertions_mapping = { 'id_': self.bucket.id_, 'name': self.bucket.name, 'type_': 'allPrivate', @@ -1597,7 +1625,9 @@ def test_update(self): 'lifecycle_rules': [{'fileNamePrefix': 'is_life'}], 'options_set': set(), 'default_server_side_encryption': SSE_B2_AES, - 'default_retention': BucketRetentionSetting(RetentionMode.COMPLIANCE, RetentionPeriod(years=7)), + 'default_retention': BucketRetentionSetting( + RetentionMode.COMPLIANCE, RetentionPeriod(years=7) + ), 'replication': REPLICATION, } for attr_name, attr_value in assertions_mapping.items(): @@ -1620,9 +1650,7 @@ def test_empty_replication(self): def test_update_if_revision_is(self): current_revision = self.bucket.revision self.bucket.update( - lifecycle_rules=[{ - 'fileNamePrefix': 'is_life' - }], + lifecycle_rules=[{'fileNamePrefix': 'is_life'}], if_revision_is=current_revision, ) updated_bucket = self.api.get_bucket_by_name(self.bucket.name) @@ -1630,9 +1658,7 @@ def test_update_if_revision_is(self): try: self.bucket.update( - lifecycle_rules=[{ - 'fileNamePrefix': 'is_life' - }], + lifecycle_rules=[{'fileNamePrefix': 'is_life'}], if_revision_is=current_revision, # this is now the old revision ) except Exception: @@ -1753,7 +1779,7 @@ def test_upload_local_file_retention(self): 'file1', encryption=SSE_C_AES, file_retention=retention, - legal_hold=LegalHold.ON + legal_hold=LegalHold.ON, ) self._check_file_contents('file1', data) self.assertEqual(retention, file_info.file_retention) @@ -1817,21 +1843,24 @@ def test_upload_local_file_incremental(self): for data in DATA: # figure out if this particular upload should be incremental should_be_incremental = ( - last_data and data.startswith(last_data) and - len(last_data) >= self.simulator.MIN_PART_SIZE + last_data + and data.startswith(last_data) + and len(last_data) >= self.simulator.MIN_PART_SIZE ) # if it's incremental, then there should be two sources concatenated, otherwise one expected_source_count = 2 if should_be_incremental else 1 # is the result file expected to be a large file - expected_large_file = \ - should_be_incremental or \ - len(data) > self.simulator.MIN_PART_SIZE + expected_large_file = ( + should_be_incremental or len(data) > self.simulator.MIN_PART_SIZE + ) - expected_parts_sizes = \ - [len(last_data), len(data) - len(last_data)] \ - if should_be_incremental else [len(data)] + expected_parts_sizes = ( + [len(last_data), len(data) - len(last_data)] + if should_be_incremental + else [len(data)] + ) write_file(path, data) with mock.patch.object( @@ -1986,8 +2015,8 @@ def test_upload_large_resume_all_parts_there(self): data = self._make_data(part_size * 3) large_file_id = self._start_large_file('file1') self._upload_part(large_file_id, 1, data[:part_size]) - self._upload_part(large_file_id, 2, data[part_size:2 * part_size]) - self._upload_part(large_file_id, 3, data[2 * part_size:]) + self._upload_part(large_file_id, 2, data[part_size : 2 * part_size]) + self._upload_part(large_file_id, 3, data[2 * part_size :]) progress_listener = StubProgressListener() file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener) self.assertEqual(large_file_id, file_info.id_) @@ -2009,7 +2038,7 @@ def test_upload_large_resume_wrong_part_size(self): part_size = self.simulator.MIN_PART_SIZE data = self._make_data(part_size * 3) large_file_id = self._start_large_file('file1') - self._upload_part(large_file_id, 1, data[:part_size + 1]) # one byte to much + self._upload_part(large_file_id, 1, data[: part_size + 1]) # one byte to much progress_listener = StubProgressListener() file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener) self.assertNotEqual(large_file_id, file_info.id_) @@ -2082,8 +2111,12 @@ def _upload_part(self, large_file_id, part_number, part_data): self.api_url, self.account_auth_token, large_file_id ) self.simulator.upload_part( - upload_info['uploadUrl'], upload_info['authorizationToken'], part_number, - len(part_data), hex_sha1_of_bytes(part_data), part_stream + upload_info['uploadUrl'], + upload_info['authorizationToken'], + part_number, + len(part_data), + hex_sha1_of_bytes(part_data), + part_stream, ) @@ -2102,14 +2135,18 @@ def upload_part( content_length, sha1_sum, input_stream, - server_side_encryption=None + server_side_encryption=None, ): if self._raise_count < self._raise_until: self._raise_count += 1 raise B2ConnectionError() return super().upload_part( - file_id, part_number, content_length, sha1_sum, input_stream, - server_side_encryption + file_id, + part_number, + content_length, + sha1_sum, + input_stream, + server_side_encryption, ) class B2ApiPatched(B2Api): @@ -2179,12 +2216,14 @@ def test_create_remote(self): UploadSourceLocalFile(path), CopySource(f2_id, length=len(data), offset=0), ], - file_name='created_file' + file_name='created_file', ) self.assertIsInstance(created_file, VFileVersionInfo) actual = ( - created_file.id_, created_file.file_name, created_file.size, - created_file.server_side_encryption + created_file.id_, + created_file.file_name, + created_file.size, + created_file.server_side_encryption, ) expected = ('9997', 'created_file', 33, SSE_NONE) self.assertEqual(expected, actual) @@ -2203,19 +2242,21 @@ def test_create_remote_encryption(self): CopySource(f2_id, length=len(data), offset=0, encryption=SSE_C_AES_2), ], file_name=f'created_file_{len(data)}', - encryption=SSE_C_AES + encryption=SSE_C_AES, ) self.assertIsInstance(created_file, VFileVersionInfo) actual = ( - created_file.id_, created_file.file_name, created_file.size, - created_file.server_side_encryption + created_file.id_, + created_file.file_name, + created_file.size, + created_file.server_side_encryption, ) expected = ( mock.ANY, f'created_file_{len(data)}', mock.ANY, # FIXME: this should be equal to len(data) * 3, # but there is a problem in the simulator/test code somewhere - SSE_C_AES_NO_SECRET + SSE_C_AES_NO_SECRET, ) self.assertEqual(expected, actual) @@ -2225,7 +2266,7 @@ def _create_remote(self, sources, file_name, encryption=None): return self.bucket.create_file( [wi for wi in WriteIntent.wrap_sources_iterator(sources)], file_name=file_name, - encryption=encryption + encryption=encryption, ) @@ -2239,7 +2280,7 @@ def _create_remote(self, sources, file_name, encryption=None): return self.bucket.create_file_stream( [wi for wi in WriteIntent.wrap_sources_iterator(sources)], file_name=file_name, - encryption=encryption + encryption=encryption, ) @@ -2362,27 +2403,26 @@ def test_v2_return_types(self): self.DATA.encode(), 'enc_file2', encryption=SSE_C_AES ) other_properties = { - 'download_version': - DownloadVersion( - api=self.api, - id_=file_version.id_, - file_name=file_version.file_name, - size=len(self.DATA), - content_type=file_version.content_type, - content_sha1=file_version.content_sha1, - file_info=file_version.file_info, - upload_timestamp=file_version.upload_timestamp, - server_side_encryption=file_version.server_side_encryption, - range_=Range(7, 18), - content_disposition=None, - content_length=12, - content_language=None, - expires=None, - cache_control=None, - content_encoding=None, - file_retention=file_version.file_retention, - legal_hold=file_version.legal_hold, - ), + 'download_version': DownloadVersion( + api=self.api, + id_=file_version.id_, + file_name=file_version.file_name, + size=len(self.DATA), + content_type=file_version.content_type, + content_sha1=file_version.content_sha1, + file_info=file_version.file_info, + upload_timestamp=file_version.upload_timestamp, + server_side_encryption=file_version.server_side_encryption, + range_=Range(7, 18), + content_disposition=None, + content_length=12, + content_language=None, + expires=None, + cache_control=None, + content_encoding=None, + file_retention=file_version.file_retention, + legal_hold=file_version.legal_hold, + ), } ret = self.bucket.download_file_by_id(file_version.id_, **download_kwargs) assert isinstance(ret, DownloadedFile), type(ret) @@ -2408,7 +2448,7 @@ def test_v1_return_types(self): 'contentType': 'b2/x-auto', 'fileId': '9999', 'fileInfo': {}, - 'fileName': 'file1' + 'fileName': 'file1', } ret = self.bucket.download_file_by_id(self.file_version.id_, self.download_dest) assert ret == expected @@ -2608,12 +2648,17 @@ def test_download_to_non_seekable_file(self): file_version = self.bucket.upload_bytes(self.DATA.encode(), 'file1') non_seekable_strategies = [ - strat for strat in self.bucket.api.services.download_manager.strategies + strat + for strat in self.bucket.api.services.download_manager.strategies if not isinstance(strat, ParallelDownloader) ] - context = contextlib.nullcontext() if non_seekable_strategies else pytest.raises( - ValueError, - match='no strategy suitable for download was found!', + context = ( + contextlib.nullcontext() + if non_seekable_strategies + else pytest.raises( + ValueError, + match='no strategy suitable for download was found!', + ) ) output_file = NonSeekableIO() with context: @@ -2628,12 +2673,17 @@ def test_download_to_seekable_but_no_read_file(self): file_version = self.bucket.upload_bytes(self.DATA.encode(), 'file1') non_seekable_strategies = [ - strat for strat in self.bucket.api.services.download_manager.strategies + strat + for strat in self.bucket.api.services.download_manager.strategies if not isinstance(strat, ParallelDownloader) ] - context = contextlib.nullcontext() if non_seekable_strategies else pytest.raises( - ValueError, - match='no strategy suitable for download was found!', + context = ( + contextlib.nullcontext() + if non_seekable_strategies + else pytest.raises( + ValueError, + match='no strategy suitable for download was found!', + ) ) output_file = io.BytesIO() seekable_but_not_readable = io.BufferedWriter(output_file) @@ -2656,7 +2706,7 @@ def test_download_to_seekable_but_no_read_file(self): class EmptyFileDownloadScenarioMixin: - """ use with DownloadTests, but not for TestDownloadParallel as it does not like empty files """ + """use with DownloadTests, but not for TestDownloadParallel as it does not like empty files""" def test_download_by_name_empty_file(self): self.file_version = self.bucket.upload_bytes(b'', 'empty') @@ -2665,7 +2715,7 @@ def test_download_by_name_empty_file(self): class UnverifiedChecksumDownloadScenarioMixin: - """ use with DownloadTests """ + """use with DownloadTests""" def test_download_by_name_unverified_checksum(self): with tempfile.TemporaryDirectory() as d: @@ -2673,10 +2723,11 @@ def test_download_by_name_unverified_checksum(self): data = b'hello world' write_file(path, data) file_info = self.bucket.upload_local_file(path, 'file1') - simulated_file = list(self.simulator.bucket_name_to_bucket.values() - )[0].file_id_to_file[file_info.id_] + simulated_file = list(self.simulator.bucket_name_to_bucket.values())[0].file_id_to_file[ + file_info.id_ + ] simulated_file.content_sha1 = 'unverified:' + simulated_file.content_sha1 - #, sha1_sum='unverified:2aae6c35c94fcfb415dbe95f408b9ce91ee846ed') + # , sha1_sum='unverified:2aae6c35c94fcfb415dbe95f408b9ce91ee846ed') self.download_file_by_name('file1', progress_listener=self.progress_listener) @@ -2975,21 +3026,21 @@ def download_file_by_name(self, file_name, download_dest=None, **kwargs): class DecodeTests(DecodeTestsBase, TestCaseWithBucket): def test_file_content_1(self): self.download_file_by_name('test.txt?foo=bar', progress_listener=self.progress_listener) - self._verify("Test File 1") + self._verify('Test File 1') def test_file_content_2(self): self.download_file_by_name('test.txt%3Ffoo=bar', progress_listener=self.progress_listener) - self._verify("Test File 2") + self._verify('Test File 2') def test_file_content_3(self): self.download_file_by_name('test.txt%3Ffoo%3Dbar', progress_listener=self.progress_listener) - self._verify("Test File 3") + self._verify('Test File 3') def test_file_content_4(self): self.download_file_by_name( 'test.txt%253Ffoo%253Dbar', progress_listener=self.progress_listener ) - self._verify("Test File 4") + self._verify('Test File 4') def test_file_info_1(self): download_version = self.bucket.get_file_info_by_name('test.txt?foo=bar') @@ -3315,37 +3366,30 @@ def test_bucket_notification_rules(bucket, b2api_simulator): assert bucket.get_notification_rules() == [] notification_rule = { - "eventTypes": ["b2:ObjectCreated:*"], - "isEnabled": True, - "name": "test-rule", - "objectNamePrefix": "", - "targetConfiguration": - { - "customHeaders": [], - "targetType": "webhook", - "url": "https://example.com/webhook", - } + 'eventTypes': ['b2:ObjectCreated:*'], + 'isEnabled': True, + 'name': 'test-rule', + 'objectNamePrefix': '', + 'targetConfiguration': { + 'customHeaders': [], + 'targetType': 'webhook', + 'url': 'https://example.com/webhook', + }, } set_notification_rules = bucket.set_notification_rules([notification_rule]) assert set_notification_rules == bucket.get_notification_rules() assert_dict_equal_ignore_extra( set_notification_rules, - [{ - **notification_rule, "isSuspended": False, - "suspensionReason": "" - }], + [{**notification_rule, 'isSuspended': False, 'suspensionReason': ''}], ) b2api_simulator.bucket_id_to_bucket[bucket.id_].simulate_notification_rule_suspension( - notification_rule["name"], "simulated suspension" + notification_rule['name'], 'simulated suspension' ) assert_dict_equal_ignore_extra( bucket.get_notification_rules(), - [{ - **notification_rule, "isSuspended": True, - "suspensionReason": "simulated suspension" - }], + [{**notification_rule, 'isSuspended': True, 'suspensionReason': 'simulated suspension'}], ) assert bucket.set_notification_rules([]) == [] diff --git a/test/unit/conftest.py b/test/unit/conftest.py index e8305def5..cbf31e357 100644 --- a/test/unit/conftest.py +++ b/test/unit/conftest.py @@ -53,7 +53,7 @@ def pytest_configure(config): sys.path.insert(0, str(Path(__file__).parent / config.getoption('--api') / 'apiver')) config.addinivalue_line( 'markers', - 'apiver(*args, *, from_ver=0, to_ver=sys.maxsize): mark test to run only for specific apivers' + 'apiver(*args, *, from_ver=0, to_ver=sys.maxsize): mark test to run only for specific apivers', ) @@ -221,7 +221,7 @@ class WindowsPermTool(PermTool): def __init__(self): self.user_sid = win32security.GetTokenInformation( win32security.OpenProcessToken(win32api.GetCurrentProcess(), win32security.TOKEN_QUERY), - win32security.TokenUser + win32security.TokenUser, )[0] def allow_access(self, path): diff --git a/test/unit/file_version/test_file_version.py b/test/unit/file_version/test_file_version.py index f7c4bfca7..7674720c5 100644 --- a/test/unit/file_version/test_file_version.py +++ b/test/unit/file_version/test_file_version.py @@ -105,7 +105,8 @@ def test_clone_file_version_and_download_version(self): cloned = initial_file_version._clone(legal_hold=LegalHold.OFF) assert isinstance(cloned, VFileVersion) assert cloned.as_dict() == { - **initial_file_version.as_dict(), 'legalHold': LegalHold.OFF.value + **initial_file_version.as_dict(), + 'legalHold': LegalHold.OFF.value, } download_version = self.api.download_file_by_id( @@ -148,8 +149,7 @@ def test_delete_bypass_governance(self): locked_file_version = self.bucket.upload_bytes( b'nothing', 'test_file_with_governance', - file_retention=FileRetentionSetting(RetentionMode.GOVERNANCE, - int(time.time()) + 100), + file_retention=FileRetentionSetting(RetentionMode.GOVERNANCE, int(time.time()) + 100), ) with pytest.raises(AccessDenied): @@ -175,7 +175,9 @@ def test_file_version_upload_headers(self): ), ) - assert file_version._get_upload_headers() == """ + assert ( + file_version._get_upload_headers() + == """ Authorization: auth_token_0 Content-Length: 7 X-Bz-File-Name: test_file @@ -188,7 +190,12 @@ def test_file_version_upload_headers(self): X-Bz-File-Legal-Hold: off X-Bz-File-Retention-Mode: None X-Bz-File-Retention-Retain-Until-Timestamp: None - """.strip().replace(': ', '').replace(' ', '').replace('\n', '').encode('utf8') + """.strip() + .replace(': ', '') + .replace(' ', '') + .replace('\n', '') + .encode('utf8') + ) assert not file_version.has_large_header diff --git a/test/unit/filter/test_filter.py b/test/unit/filter/test_filter.py index 42b2c4d70..c67713cf0 100644 --- a/test/unit/filter/test_filter.py +++ b/test/unit/filter/test_filter.py @@ -16,25 +16,23 @@ @pytest.mark.parametrize( - ("filters", "expr", "expected"), + ('filters', 'expr', 'expected'), ( - ([], "a", True), - ([Filter.exclude("*")], "something", False), - ([Filter.include("a-*")], "a-", True), - ([Filter.include("a-*")], "b-", False), - ([Filter.exclude("*.txt")], "a.txt", False), - ([Filter.exclude("*.txt")], "a.csv", True), - ([Filter.exclude("*"), Filter.include("*.[ct]sv")], "a.csv", True), - ([Filter.exclude("*"), Filter.include("*.[ct]sv")], "a.tsv", True), - ([Filter.exclude("*"), Filter.include("*.[ct]sv")], "a.ksv", False), + ([], 'a', True), + ([Filter.exclude('*')], 'something', False), + ([Filter.include('a-*')], 'a-', True), + ([Filter.include('a-*')], 'b-', False), + ([Filter.exclude('*.txt')], 'a.txt', False), + ([Filter.exclude('*.txt')], 'a.csv', True), + ([Filter.exclude('*'), Filter.include('*.[ct]sv')], 'a.csv', True), + ([Filter.exclude('*'), Filter.include('*.[ct]sv')], 'a.tsv', True), + ([Filter.exclude('*'), Filter.include('*.[ct]sv')], 'a.ksv', False), ( - [Filter.exclude("*"), - Filter.include("*.[ct]sv"), - Filter.exclude("a.csv")], "a.csv", False + [Filter.exclude('*'), Filter.include('*.[ct]sv'), Filter.exclude('a.csv')], + 'a.csv', + False, ), - ([Filter.exclude("*"), - Filter.include("*.[ct]sv"), - Filter.exclude("a.csv")], "b.csv", True), + ([Filter.exclude('*'), Filter.include('*.[ct]sv'), Filter.exclude('a.csv')], 'b.csv', True), ), ) def test_filter_matcher(filters, expr, expected): diff --git a/test/unit/fixtures/folder.py b/test/unit/fixtures/folder.py index d6183c196..af27a7fd3 100644 --- a/test/unit/fixtures/folder.py +++ b/test/unit/fixtures/folder.py @@ -60,8 +60,9 @@ def _file_versions(self, name, mod_times, size=10): content_type='text/plain', content_sha1='content_sha1', **mandatory_kwargs, - ) for mod_time in mod_times - ] # yapf disable + ) + for mod_time in mod_times + ] class FakeLocalFolder(LocalFolder): diff --git a/test/unit/fixtures/raw_api.py b/test/unit/fixtures/raw_api.py index 19109d76c..1aa434aaf 100644 --- a/test/unit/fixtures/raw_api.py +++ b/test/unit/fixtures/raw_api.py @@ -33,7 +33,7 @@ def fake_b2_raw_api_responses(): 'recommendedPartSize': 100000000, 's3ApiUrl': 'https://s3.us-west-000.backblazeb2.xyz:8180', } - } # yapf: disable + } @pytest.fixture diff --git a/test/unit/internal/test_emerge_planner.py b/test/unit/internal/test_emerge_planner.py index 1510d5e4c..833bab4da 100644 --- a/test/unit/internal/test_emerge_planner.py +++ b/test/unit/internal/test_emerge_planner.py @@ -49,7 +49,7 @@ def __init__(self, length): def part(source_or_def_list, *offset_len): - """ Helper for building emerge parts from outbound sources defs. Makes planner tests easier to read. + """Helper for building emerge parts from outbound sources defs. Makes planner tests easier to read. Possible "def" structures: @@ -112,8 +112,6 @@ def _get_emerge_planner(self): max_part_size=self.max_size, ) - # yapf: disable - def test_part_sizes(self): self.assertGreater(self.min_size, 0) self.assertGreaterEqual(self.recommended_size, self.min_size) @@ -173,8 +171,7 @@ def test_single_multipart_copy(self): source = CopySource(5 * self.max_size) self.verify_emerge_plan_for_write_intents( - [WriteIntent(source)], - self.split_source_to_part_defs(source, [self.max_size] * 5) + [WriteIntent(source)], self.split_source_to_part_defs(source, [self.max_size] * 5) ) def test_single_multipart_copy_remainder(self): @@ -184,9 +181,8 @@ def test_single_multipart_copy_remainder(self): expected_part_count = 7 base_part_size = int(source.get_content_length() / expected_part_count) size_remainder = source.get_content_length() % expected_part_count - expected_part_sizes = ( - [base_part_size + 1] * size_remainder + - [base_part_size] * (expected_part_count - size_remainder) + expected_part_sizes = [base_part_size + 1] * size_remainder + [base_part_size] * ( + expected_part_count - size_remainder ) self.verify_emerge_plan_for_write_intents( @@ -229,11 +225,13 @@ def test_small_copy_then_copy(self): self.verify_emerge_plan_for_write_intents( write_intents, [ - part([ - source_small_copy, - (source_copy, 0, MEGABYTE), - ]), - part(source_copy, MEGABYTE, self.recommended_size - MEGABYTE) + part( + [ + source_small_copy, + (source_copy, 0, MEGABYTE), + ] + ), + part(source_copy, MEGABYTE, self.recommended_size - MEGABYTE), ], ) @@ -241,17 +239,21 @@ def test_upload_small_copy_then_copy(self): source_upload = UploadSource(self.recommended_size) source_small_copy = CopySource(self.min_size - 1) source_copy = CopySource(self.recommended_size) - write_intents = WriteIntent.wrap_sources_iterator([source_upload, source_small_copy, source_copy]) + write_intents = WriteIntent.wrap_sources_iterator( + [source_upload, source_small_copy, source_copy] + ) self.verify_emerge_plan_for_write_intents( write_intents, [ - part([ - source_upload, - source_small_copy, - ]), + part( + [ + source_upload, + source_small_copy, + ] + ), part(source_copy), - ] + ], ) def test_upload_small_copy_x2_then_copy(self): @@ -267,10 +269,12 @@ def test_upload_small_copy_x2_then_copy(self): write_intents, [ part(source_upload), - part([ - source_small_copy1, - source_small_copy2, - ]), + part( + [ + source_small_copy1, + source_small_copy2, + ] + ), part(source_copy), ], ) @@ -280,30 +284,33 @@ def test_upload_multiple_sources(self): unit_part_size = int(self.recommended_size / 8) uneven_part_size = 3 * unit_part_size - sources = [ - UploadSource(uneven_part_size) - for i in range(8) - ] + sources = [UploadSource(uneven_part_size) for i in range(8)] self.verify_emerge_plan_for_write_intents( WriteIntent.wrap_sources_iterator(sources), [ - part([ - sources[0], - sources[1], - (sources[2], 0, 2 * unit_part_size), - ]), - part([ - (sources[2], 2 * unit_part_size, unit_part_size), - sources[3], - sources[4], - (sources[5], 0, unit_part_size), - ]), - part([ - (sources[5], unit_part_size, 2 * unit_part_size), - sources[6], - sources[7], - ]), + part( + [ + sources[0], + sources[1], + (sources[2], 0, 2 * unit_part_size), + ] + ), + part( + [ + (sources[2], 2 * unit_part_size, unit_part_size), + sources[3], + sources[4], + (sources[5], 0, unit_part_size), + ] + ), + part( + [ + (sources[5], unit_part_size, 2 * unit_part_size), + sources[6], + sources[7], + ] + ), ], ) @@ -317,17 +324,21 @@ def test_small_upload_not_enough_copy_then_upload(self): write_intents = WriteIntent.wrap_sources_iterator( [source_small_upload, source_copy, source_upload] ) - small_parts_len = source_small_upload.get_content_length() + source_copy.get_content_length() + small_parts_len = ( + source_small_upload.get_content_length() + source_copy.get_content_length() + ) source_upload_split_offset = self.recommended_size - small_parts_len self.verify_emerge_plan_for_write_intents( write_intents, [ - part([ - source_small_upload, - source_copy, - (source_upload, 0, source_upload_split_offset), - ]), + part( + [ + source_small_upload, + source_copy, + (source_upload, 0, source_upload_split_offset), + ] + ), part(source_upload, source_upload_split_offset, small_parts_len), ], ) @@ -342,8 +353,8 @@ def test_basic_local_overlap(self): self.verify_emerge_plan_for_write_intents( write_intents, - [part(source1, 0, self.recommended_size)] + - self.split_source_to_part_defs(source2, [self.recommended_size] * 2), + [part(source1, 0, self.recommended_size)] + + self.split_source_to_part_defs(source2, [self.recommended_size] * 2), ) def test_local_stairs_overlap(self): @@ -358,8 +369,7 @@ def test_local_stairs_overlap(self): shift = int(self.recommended_size / 4) sources = [UploadSource(self.recommended_size) for i in range(4)] write_intents = [ - WriteIntent(source, destination_offset=i * shift) - for i, source in enumerate(sources) + WriteIntent(source, destination_offset=i * shift) for i, source in enumerate(sources) ] three_quarters = int(3 * self.recommended_size / 4) @@ -371,10 +381,7 @@ def test_local_stairs_overlap(self): self.verify_emerge_plan_for_write_intents( write_intents, [ - part([ - (sources[0], 0, three_quarters), - (sources[-1], 0, shift) - ]), + part([(sources[0], 0, three_quarters), (sources[-1], 0, shift)]), part(sources[-1], shift, three_quarters), ], ) @@ -467,9 +474,15 @@ def test_overlap_cause_small_copy_remainder_2_intent_case(self): write_intents, [ part(source_copy1, 0, copy_size), - part([ - (source_copy2, 2 * MEGABYTE, self.min_size - 2 * MEGABYTE), # this means: download and then upload - ]), + part( + [ + ( + source_copy2, + 2 * MEGABYTE, + self.min_size - 2 * MEGABYTE, + ), # this means: download and then upload + ] + ), ], ) @@ -497,9 +510,15 @@ def test_overlap_cause_small_copy_remainder_3_intent_case(self): [ part(source_copy1, 0, self.min_size), part(source_copy2, MEGABYTE, self.min_size), - part([ - (source_copy3, 2 * MEGABYTE, copy_overlap_offset), # this means: download and then upload - ]), + part( + [ + ( + source_copy3, + 2 * MEGABYTE, + copy_overlap_offset, + ), # this means: download and then upload + ] + ), ], ) @@ -575,11 +594,10 @@ def test_raise_on_hole(self): WriteIntent(source_copy2, destination_offset=self.recommended_size + 3 * MEGABYTE), ] - hole_msg = ('Cannot emerge file with holes. ' - 'Found hole range: ({}, {})'.format( - write_intents[2].destination_end_offset, - write_intents[1].destination_offset, - )) + hole_msg = ( + 'Cannot emerge file with holes. ' + f'Found hole range: ({write_intents[2].destination_end_offset}, {write_intents[1].destination_offset})' + ) with self.assertRaises(ValueError, hole_msg): self.planner.get_emerge_plan(write_intents) @@ -590,8 +608,6 @@ def test_empty_upload(self): [part(source_upload)], ) - # yapf: enable - def verify_emerge_plan_for_write_intents(self, write_intents, expected_part_defs): emerge_plan = self.planner.get_emerge_plan(write_intents) @@ -651,20 +667,21 @@ def test_emerge_planner_from_account_info(account_info): @pytest.mark.parametrize( 'min_part_size, recommended_upload_part_size, max_part_size, expected', [ - (100 * MEGABYTE, None, None, { - 'min_part_size': 100 * MEGABYTE - }), + (100 * MEGABYTE, None, None, {'min_part_size': 100 * MEGABYTE}), ( - GIGABYTE, GIGABYTE, None, { - 'min_part_size': GIGABYTE, - 'recommended_upload_part_size': GIGABYTE - } + GIGABYTE, + GIGABYTE, + None, + {'min_part_size': GIGABYTE, 'recommended_upload_part_size': GIGABYTE}, ), ( - None, None, DEFAULT_RECOMMENDED_UPLOAD_PART_SIZE // 2, { + None, + None, + DEFAULT_RECOMMENDED_UPLOAD_PART_SIZE // 2, + { 'recommended_upload_part_size': DEFAULT_RECOMMENDED_UPLOAD_PART_SIZE // 2, - 'max_part_size': DEFAULT_RECOMMENDED_UPLOAD_PART_SIZE // 2 - } + 'max_part_size': DEFAULT_RECOMMENDED_UPLOAD_PART_SIZE // 2, + }, ), ], ) @@ -675,7 +692,7 @@ def test_emerge_planner_from_account_info__with_explicitly_set_params( account_info=account_info, min_part_size=min_part_size, recommended_upload_part_size=recommended_upload_part_size, - max_part_size=max_part_size + max_part_size=max_part_size, ) assert planner.min_part_size == expected.get('min_part_size', DEFAULT_MIN_PART_SIZE) assert planner.recommended_upload_part_size == expected.get( diff --git a/test/unit/internal/test_unbound_write_intent.py b/test/unit/internal/test_unbound_write_intent.py index 6ecc0035f..7be6ce977 100644 --- a/test/unit/internal/test_unbound_write_intent.py +++ b/test/unit/internal/test_unbound_write_intent.py @@ -78,7 +78,7 @@ def _get_iterator(self, buffer_and_read_size: int = 1, data: bytes | None = None io.BytesIO(data), buffer_size_bytes=buffer_and_read_size, read_size=buffer_and_read_size, - **self.kwargs + **self.kwargs, ) return generator.iterator() @@ -124,7 +124,7 @@ def test_larger_buffer_size(self): for write_intent in self._get_iterator(read_size, data): read_data = self._read_write_intent(write_intent, full_read_size=read_size) offset = write_intent.destination_offset - expected_data = data[offset:offset + read_size] + expected_data = data[offset : offset + read_size] self.assertEqual(expected_data, read_data) def test_single_buffer_delivered(self): diff --git a/test/unit/internal/transfer/downloader/test_parallel.py b/test/unit/internal/transfer/downloader/test_parallel.py index 2be32f449..cac4ebf28 100644 --- a/test/unit/internal/transfer/downloader/test_parallel.py +++ b/test/unit/internal/transfer/downloader/test_parallel.py @@ -19,9 +19,9 @@ def mock_download_response_factory(apiver_module, bucket, file_size: int = 0): hasher = hashlib.sha1() - dummy_data = b"dummy" + dummy_data = b'dummy' file_content = (dummy_data * (file_size // len(dummy_data) + 1))[:file_size] - file_version = bucket.upload_bytes(file_content, f"dummy_file_{file_size}.txt") + file_version = bucket.upload_bytes(file_content, f'dummy_file_{file_size}.txt') hasher.update(file_content) url = bucket.api.session.get_download_url_by_name(bucket.name, file_version.file_name) @@ -64,8 +64,8 @@ def test_download_empty_file(apiver_module, b2api, bucket, downloader, output_fi ) assert bytes_written == file_size - assert hash_hex == "da39a3ee5e6b4b0d3255bfef95601890afd80709" - assert output_file.getvalue() == b"" + assert hash_hex == 'da39a3ee5e6b4b0d3255bfef95601890afd80709' + assert output_file.getvalue() == b'' mock_response.close.assert_called_once() @@ -81,8 +81,8 @@ def test_download_file(apiver_module, b2api, bucket, downloader, output_file): ) assert bytes_written == file_size - assert hash_hex == "7804df8c623573ccfc1993e04981006e5bc30383" - assert output_file.getvalue() == b"dummy" * 20 + assert hash_hex == '7804df8c623573ccfc1993e04981006e5bc30383' + assert output_file.getvalue() == b'dummy' * 20 mock_response.close.assert_called_once() @@ -98,8 +98,8 @@ def test_download_file__data_stream_error__in_first_response( ) def iter_content(chunk_size=1, decode_unicode=False): - yield b"DUMMY" - raise RequestException("stream error") + yield b'DUMMY' + raise RequestException('stream error') yield # noqa mock_response.iter_content = iter_content @@ -109,7 +109,7 @@ def iter_content(chunk_size=1, decode_unicode=False): ) assert bytes_written == file_size - assert output_file.getvalue() == b"DUMMY" + b"dummy" * 19 + assert output_file.getvalue() == b'DUMMY' + b'dummy' * 19 def test_download_file__data_stream_error__persistent_errors( @@ -122,8 +122,8 @@ def test_download_file__data_stream_error__persistent_errors( # Ensure that follow-up requests also return errors def iter_content(chunk_size=1, decode_unicode=False): - yield b"d" - raise RequestException("stream error") + yield b'd' + raise RequestException('stream error') mock_response.iter_content = iter_content @@ -145,7 +145,7 @@ def test_download_file__data_stream_error__multiple_errors_recovery( def first_iter_content(chunk_size=1, decode_unicode=False): yield mock_response.raw.read(1) - raise RequestException("stream error") + raise RequestException('stream error') mock_response.iter_content = first_iter_content @@ -156,7 +156,7 @@ def download_func_mock(*args, **kwargs): def iter_content(chunk_size=1, decode_unicode=False): yield response.raw.read(1).upper() - raise RequestException("stream error") + raise RequestException('stream error') response.iter_content = iter_content return response @@ -168,4 +168,4 @@ def iter_content(chunk_size=1, decode_unicode=False): ) assert bytes_written == file_size - assert output_file.getvalue() == b"dUMMY" + b"DUMMY" * 19 + assert output_file.getvalue() == b'dUMMY' + b'DUMMY' * 19 diff --git a/test/unit/replication/test_monitoring.py b/test/unit/replication/test_monitoring.py index a6c280572..a035e68aa 100644 --- a/test/unit/replication/test_monitoring.py +++ b/test/unit/replication/test_monitoring.py @@ -42,7 +42,6 @@ def test_iter_pairs(source_bucket, destination_bucket, test_file, monitor): - source_file = source_bucket.upload_local_file(test_file, 'folder/test.txt') source_subfolder_file = source_bucket.upload_local_file(test_file, 'folder/subfolder/test.txt') @@ -57,7 +56,8 @@ def test_iter_pairs(source_bucket, destination_bucket, test_file, monitor): ( source_path and 'folder/' + source_path.relative_path, destination_path and 'folder/' + destination_path.relative_path, - ) for source_path, destination_path in monitor.iter_pairs() + ) + for source_path, destination_path in monitor.iter_pairs() ] assert set(pairs) == { @@ -73,15 +73,16 @@ def test_scan_source(source_bucket, test_file, monitor): source_bucket.upload_local_file(test_file, 'folder/test-1-1.txt'), source_bucket.upload_local_file(test_file, 'folder/test-1-2.txt'), source_bucket.upload_local_file(test_file, 'folder/test-2.txt', encryption=SSE_B2_AES), - source_bucket.upload_local_file(test_file, - 'not-in-folder.txt'), # monitor should ignore this + source_bucket.upload_local_file( + test_file, 'not-in-folder.txt' + ), # monitor should ignore this source_bucket.upload_local_file(test_file, 'folder/test-3.txt', encryption=SSE_C_AES), source_bucket.upload_local_file(test_file, 'folder/test-4.txt', encryption=SSE_C_AES), source_bucket.upload_local_file( test_file, 'folder/subfolder/test-5.txt', encryption=SSE_C_AES, - file_retention=RETENTION_GOVERNANCE + file_retention=RETENTION_GOVERNANCE, ), source_bucket.upload_local_file( test_file, @@ -103,50 +104,87 @@ def test_scan_source(source_bucket, test_file, monitor): assert report.counter_by_status[ReplicationScanResult(**DEFAULT_REPLICATION_RESULT)] == 2 - assert report.counter_by_status[ReplicationScanResult( - **{ - **DEFAULT_REPLICATION_RESULT, - 'source_encryption_mode': EncryptionMode.SSE_B2, - } - )] == 1 - - assert report.counter_by_status[ReplicationScanResult( - **{ - **DEFAULT_REPLICATION_RESULT, - 'source_encryption_mode': EncryptionMode.SSE_C, - } - )] == 2 - - assert report.counter_by_status[ReplicationScanResult( - **{ - **DEFAULT_REPLICATION_RESULT, - 'source_encryption_mode': EncryptionMode.SSE_C, - 'source_has_file_retention': True, - } - )] == 1 - - assert report.counter_by_status[ReplicationScanResult( - **{ - **DEFAULT_REPLICATION_RESULT, - 'source_has_large_metadata': True, - } - )] == 1 - - assert report.counter_by_status[ReplicationScanResult( - **{ - **DEFAULT_REPLICATION_RESULT, - 'source_encryption_mode': EncryptionMode.SSE_C, - 'source_has_large_metadata': True, - } - )] == 1 + assert ( + report.counter_by_status[ + ReplicationScanResult( + **{ + **DEFAULT_REPLICATION_RESULT, + 'source_encryption_mode': EncryptionMode.SSE_B2, + } + ) + ] + == 1 + ) + + assert ( + report.counter_by_status[ + ReplicationScanResult( + **{ + **DEFAULT_REPLICATION_RESULT, + 'source_encryption_mode': EncryptionMode.SSE_C, + } + ) + ] + == 2 + ) + + assert ( + report.counter_by_status[ + ReplicationScanResult( + **{ + **DEFAULT_REPLICATION_RESULT, + 'source_encryption_mode': EncryptionMode.SSE_C, + 'source_has_file_retention': True, + } + ) + ] + == 1 + ) + + assert ( + report.counter_by_status[ + ReplicationScanResult( + **{ + **DEFAULT_REPLICATION_RESULT, + 'source_has_large_metadata': True, + } + ) + ] + == 1 + ) + + assert ( + report.counter_by_status[ + ReplicationScanResult( + **{ + **DEFAULT_REPLICATION_RESULT, + 'source_encryption_mode': EncryptionMode.SSE_C, + 'source_has_large_metadata': True, + } + ) + ] + == 1 + ) # ---- first and last ---- - assert report.samples_by_status_first[ReplicationScanResult(**DEFAULT_REPLICATION_RESULT,) - ][0] == files[0] + assert ( + report.samples_by_status_first[ + ReplicationScanResult( + **DEFAULT_REPLICATION_RESULT, + ) + ][0] + == files[0] + ) - assert report.samples_by_status_last[ReplicationScanResult(**DEFAULT_REPLICATION_RESULT,) - ][0] == files[1] + assert ( + report.samples_by_status_last[ + ReplicationScanResult( + **DEFAULT_REPLICATION_RESULT, + ) + ][0] + == files[1] + ) def test_scan_source_and_destination( @@ -156,25 +194,25 @@ def test_scan_source_and_destination( # match source_bucket.upload_local_file(test_file, 'folder/test-1.txt'), destination_bucket.upload_local_file(test_file, 'folder/test-1.txt'), - # missing on destination source_bucket.upload_local_file(test_file, 'folder/test-2.txt'), - # missing on source destination_bucket.upload_local_file(test_file, 'folder/test-3.txt'), - # metadata differs source_bucket.upload_local_file( - test_file, 'folder/test-4.txt', file_info={ + test_file, + 'folder/test-4.txt', + file_info={ 'haha': 'hoho', - } + }, ), destination_bucket.upload_local_file( - test_file, 'folder/test-4.txt', file_info={ + test_file, + 'folder/test-4.txt', + file_info={ 'hehe': 'hihi', - } + }, ), - # hash differs source_bucket.upload_local_file(test_file, 'folder/test-5.txt'), destination_bucket.upload_local_file(test_file_reversed, 'folder/test-5.txt'), @@ -183,49 +221,74 @@ def test_scan_source_and_destination( report = monitor.scan(scan_destination=True) # match - assert report.counter_by_status[ReplicationScanResult( - **{ - **DEFAULT_REPLICATION_RESULT, - 'metadata_differs': False, - 'hash_differs': False, - } - )] == 1 + assert ( + report.counter_by_status[ + ReplicationScanResult( + **{ + **DEFAULT_REPLICATION_RESULT, + 'metadata_differs': False, + 'hash_differs': False, + } + ) + ] + == 1 + ) # missing on destination - assert report.counter_by_status[ReplicationScanResult( - **{ - **DEFAULT_REPLICATION_RESULT, - 'destination_replication_status': None, - } - )] == 1 + assert ( + report.counter_by_status[ + ReplicationScanResult( + **{ + **DEFAULT_REPLICATION_RESULT, + 'destination_replication_status': None, + } + ) + ] + == 1 + ) # missing on source - assert report.counter_by_status[ReplicationScanResult( - **{ - **DEFAULT_REPLICATION_RESULT, - 'source_replication_status': None, - 'source_has_hide_marker': None, - 'source_encryption_mode': None, - 'source_has_large_metadata': None, - 'source_has_file_retention': None, - 'source_has_legal_hold': None, - } - )] == 1 + assert ( + report.counter_by_status[ + ReplicationScanResult( + **{ + **DEFAULT_REPLICATION_RESULT, + 'source_replication_status': None, + 'source_has_hide_marker': None, + 'source_encryption_mode': None, + 'source_has_large_metadata': None, + 'source_has_file_retention': None, + 'source_has_legal_hold': None, + } + ) + ] + == 1 + ) # metadata differs - assert report.counter_by_status[ReplicationScanResult( - **{ - **DEFAULT_REPLICATION_RESULT, - 'metadata_differs': True, - 'hash_differs': False, - } - )] == 1 + assert ( + report.counter_by_status[ + ReplicationScanResult( + **{ + **DEFAULT_REPLICATION_RESULT, + 'metadata_differs': True, + 'hash_differs': False, + } + ) + ] + == 1 + ) # hash differs - assert report.counter_by_status[ReplicationScanResult( - **{ - **DEFAULT_REPLICATION_RESULT, - 'metadata_differs': False, - 'hash_differs': True, - } - )] == 1 + assert ( + report.counter_by_status[ + ReplicationScanResult( + **{ + **DEFAULT_REPLICATION_RESULT, + 'metadata_differs': False, + 'hash_differs': True, + } + ) + ] + == 1 + ) diff --git a/test/unit/scan/test_folder_traversal.py b/test/unit/scan/test_folder_traversal.py index 2fd9be5db..4971b08f7 100644 --- a/test/unit/scan/test_folder_traversal.py +++ b/test/unit/scan/test_folder_traversal.py @@ -28,7 +28,6 @@ class TestFolderTraversal: def test_flat_folder(self, tmp_path): - # Create a directory structure below with initial scanning point at tmp_path/dir: # tmp_path # └── dir @@ -36,20 +35,20 @@ def test_flat_folder(self, tmp_path): # ├── file2.txt # └── file3.txt - (tmp_path / "dir").mkdir(parents=True) + (tmp_path / 'dir').mkdir(parents=True) - (tmp_path / "dir" / "file1.txt").write_text("content1") - (tmp_path / "dir" / "file2.txt").write_text("content2") - (tmp_path / "dir" / "file3.txt").write_text("content3") + (tmp_path / 'dir' / 'file1.txt').write_text('content1') + (tmp_path / 'dir' / 'file2.txt').write_text('content2') + (tmp_path / 'dir' / 'file3.txt').write_text('content3') - folder = LocalFolder(str(tmp_path / "dir")) + folder = LocalFolder(str(tmp_path / 'dir')) local_paths = folder.all_files(reporter=MagicMock()) absolute_paths = [path.absolute_path for path in list(local_paths)] assert absolute_paths == [ - fix_windows_path_limit(str(tmp_path / "dir" / "file1.txt")), - fix_windows_path_limit(str(tmp_path / "dir" / "file2.txt")), - fix_windows_path_limit(str(tmp_path / "dir" / "file3.txt")), + fix_windows_path_limit(str(tmp_path / 'dir' / 'file1.txt')), + fix_windows_path_limit(str(tmp_path / 'dir' / 'file2.txt')), + fix_windows_path_limit(str(tmp_path / 'dir' / 'file3.txt')), ] @pytest.mark.skipif( @@ -57,7 +56,6 @@ def test_flat_folder(self, tmp_path): reason="Windows doesn't allow / or \\ in filenames", ) def test_invalid_name(self, tmp_path): - # Create a directory structure below with initial scanning point at tmp_path/dir: # tmp_path # └── dir @@ -67,15 +65,15 @@ def test_invalid_name(self, tmp_path): # ├── file\bad.txt # └── file[DEL]bad.txt - (tmp_path / "dir" / "subdir").mkdir(parents=True) + (tmp_path / 'dir' / 'subdir').mkdir(parents=True) - (tmp_path / "dir" / "file1.txt").write_text("content1") - (tmp_path / "dir" / "subdir" / "file2.txt").write_text("content2") - (tmp_path / "dir" / "file\\bad.txt").write_text("bad1") - (tmp_path / "dir" / "file\x7fbad.txt").write_text("bad2") + (tmp_path / 'dir' / 'file1.txt').write_text('content1') + (tmp_path / 'dir' / 'subdir' / 'file2.txt').write_text('content2') + (tmp_path / 'dir' / 'file\\bad.txt').write_text('bad1') + (tmp_path / 'dir' / 'file\x7fbad.txt').write_text('bad2') reporter = ProgressReport(sys.stdout, False) - folder = LocalFolder(str(tmp_path / "dir")) + folder = LocalFolder(str(tmp_path / 'dir')) local_paths = folder.all_files(reporter=reporter) absolute_paths = [path.absolute_path for path in list(local_paths)] @@ -88,17 +86,17 @@ def test_invalid_name(self, tmp_path): reporter.close() assert absolute_paths == [ - fix_windows_path_limit(str(tmp_path / "dir" / "file1.txt")), - fix_windows_path_limit(str(tmp_path / "dir" / "subdir" / "file2.txt")), + fix_windows_path_limit(str(tmp_path / 'dir' / 'file1.txt')), + fix_windows_path_limit(str(tmp_path / 'dir' / 'subdir' / 'file2.txt')), ] @pytest.mark.skipif( - platform.system() == 'Windows' and - (platform.python_implementation() == 'PyPy' or sys.version_info >= (3, 13)), + platform.system() == 'Windows' + and (platform.python_implementation() == 'PyPy' or sys.version_info >= (3, 13)), reason=( - "PyPy on Windows force-decodes non-UTF-8 filenames, which makes it impossible to test this case. " - "Python 3.13 does so similarly on Windows." - ) + 'PyPy on Windows force-decodes non-UTF-8 filenames, which makes it impossible to test this case. ' + 'Python 3.13 does so similarly on Windows.' + ), ) def test_invalid_unicode_filename(self, tmp_path): # Create a directory structure below with initial scanning point at tmp_path/dir: @@ -107,34 +105,34 @@ def test_invalid_unicode_filename(self, tmp_path): # ├── file1.txt # └── XXX (invalid utf-8 filename) - (tmp_path / "dir").mkdir(parents=True) - (tmp_path / "dir" / "file1.txt").write_text("content1") + (tmp_path / 'dir').mkdir(parents=True) + (tmp_path / 'dir' / 'file1.txt').write_text('content1') - foreign_encoding = "euc_jp" + foreign_encoding = 'euc_jp' # test sanity check - assert codecs.lookup(foreign_encoding).name != codecs.lookup( - sys.getfilesystemencoding() - ).name + assert ( + codecs.lookup(foreign_encoding).name != codecs.lookup(sys.getfilesystemencoding()).name + ) - invalid_utf8_path = os.path.join(bytes(tmp_path), b"dir", 'てすと'.encode(foreign_encoding)) + invalid_utf8_path = os.path.join(bytes(tmp_path), b'dir', 'てすと'.encode(foreign_encoding)) try: - with open(invalid_utf8_path, "wb") as f: - f.write(b"content2") + with open(invalid_utf8_path, 'wb') as f: + f.write(b'content2') except (OSError, UnicodeDecodeError): - pytest.skip("Cannot create invalid UTF-8 filename on this platform") + pytest.skip('Cannot create invalid UTF-8 filename on this platform') reporter = ProgressReport(sys.stdout, False) - folder = LocalFolder(str(tmp_path / "dir")) + folder = LocalFolder(str(tmp_path / 'dir')) local_paths = folder.all_files(reporter=reporter) absolute_paths = [path.absolute_path for path in list(local_paths)] assert absolute_paths == [ - fix_windows_path_limit(str(tmp_path / "dir" / "file1.txt")), + fix_windows_path_limit(str(tmp_path / 'dir' / 'file1.txt')), ] assert reporter.has_errors_or_warnings() assert re.match( r"WARNING: '.+/dir/.+' path contains invalid name " - r"\(file name must be valid Unicode, check locale\)\. Skipping\.", + r'\(file name must be valid Unicode, check locale\)\. Skipping\.', reporter.warnings[0], ) assert len(reporter.warnings) == 1 @@ -146,7 +144,6 @@ def test_invalid_unicode_filename(self, tmp_path): reason="Windows doesn't allow / or \\ in filenames", ) def test_invalid_directory_name(self, tmp_path): - # Create a directory structure below with initial scanning point at tmp_path/dir: # tmp_path # └── dir @@ -154,13 +151,13 @@ def test_invalid_directory_name(self, tmp_path): # └── dir\bad # └── file2.txt - (tmp_path / "dir").mkdir(parents=True) - (tmp_path / "dir" / "file1.txt").write_text("content1") - (tmp_path / "dir" / "dir\\bad").mkdir(parents=True) - (tmp_path / "dir" / "dir\\bad" / "file2.txt").write_text("content2") + (tmp_path / 'dir').mkdir(parents=True) + (tmp_path / 'dir' / 'file1.txt').write_text('content1') + (tmp_path / 'dir' / 'dir\\bad').mkdir(parents=True) + (tmp_path / 'dir' / 'dir\\bad' / 'file2.txt').write_text('content2') reporter = ProgressReport(sys.stdout, False) - folder = LocalFolder(str(tmp_path / "dir")) + folder = LocalFolder(str(tmp_path / 'dir')) local_paths = folder.all_files(reporter=reporter) absolute_paths = [path.absolute_path for path in list(local_paths)] @@ -171,11 +168,10 @@ def test_invalid_directory_name(self, tmp_path): reporter.close() assert absolute_paths == [ - fix_windows_path_limit(str(tmp_path / "dir" / "file1.txt")), + fix_windows_path_limit(str(tmp_path / 'dir' / 'file1.txt')), ] def test_folder_with_subfolders(self, tmp_path): - # Create a directory structure below with initial scanning point at tmp_path: # tmp_path # ├── dir1 @@ -185,45 +181,44 @@ def test_folder_with_subfolders(self, tmp_path): # ├── file3.txt # └── file4.txt - d1 = tmp_path / "dir1" + d1 = tmp_path / 'dir1' d1.mkdir() - (d1 / "file1.txt").write_text("content1") - (d1 / "file2.txt").write_text("content2") + (d1 / 'file1.txt').write_text('content1') + (d1 / 'file2.txt').write_text('content2') - d2 = tmp_path / "dir2" + d2 = tmp_path / 'dir2' d2.mkdir() - (d2 / "file3.txt").write_text("content3") - (d2 / "file4.txt").write_text("content4") + (d2 / 'file3.txt').write_text('content3') + (d2 / 'file4.txt').write_text('content4') folder = LocalFolder(str(tmp_path)) local_paths = folder.all_files(reporter=MagicMock()) absolute_paths = [path.absolute_path for path in list(local_paths)] assert absolute_paths == [ - fix_windows_path_limit(str(d1 / "file1.txt")), - fix_windows_path_limit(str(d1 / "file2.txt")), - fix_windows_path_limit(str(d2 / "file3.txt")), - fix_windows_path_limit(str(d2 / "file4.txt")), + fix_windows_path_limit(str(d1 / 'file1.txt')), + fix_windows_path_limit(str(d1 / 'file2.txt')), + fix_windows_path_limit(str(d2 / 'file3.txt')), + fix_windows_path_limit(str(d2 / 'file4.txt')), ] @pytest.mark.skipif( platform.system() == 'Windows' and platform.python_implementation() == 'PyPy', - reason="Symlinks not supported on PyPy/Windows", + reason='Symlinks not supported on PyPy/Windows', ) def test_folder_with_symlink_to_file(self, tmp_path): - # Create a directory structure below with initial scanning point at tmp_path: # tmp_path # ├── dir # │ └── file.txt # └── symlink_file.txt -> dir/file.txt - (tmp_path / "dir").mkdir() + (tmp_path / 'dir').mkdir() - file = tmp_path / "dir" / "file.txt" - file.write_text("content") + file = tmp_path / 'dir' / 'file.txt' + file.write_text('content') - symlink_file = tmp_path / "symlink_file.txt" + symlink_file = tmp_path / 'symlink_file.txt' symlink_file.symlink_to(file) folder = LocalFolder(str(tmp_path)) @@ -233,26 +228,25 @@ def test_folder_with_symlink_to_file(self, tmp_path): assert absolute_paths == [ fix_windows_path_limit(str(file)), - fix_windows_path_limit(str(symlink_file)) + fix_windows_path_limit(str(symlink_file)), ] @pytest.mark.skipif( platform.system() == 'Windows' and platform.python_implementation() == 'PyPy', - reason="Symlinks not supported on PyPy/Windows", + reason='Symlinks not supported on PyPy/Windows', ) @pytest.mark.timeout(5) def test_folder_with_circular_symlink(self, tmp_path): - # Create a directory structure below with initial scanning point at tmp_path: # tmp_path # ├── dir # │ └── file.txt # └── symlink_dir -> dir - (tmp_path / "dir").mkdir() - (tmp_path / "dir" / "file1.txt").write_text("content1") - symlink_dir = tmp_path / "dir" / "symlink_dir" - symlink_dir.symlink_to(tmp_path / "dir", target_is_directory=True) + (tmp_path / 'dir').mkdir() + (tmp_path / 'dir' / 'file1.txt').write_text('content1') + symlink_dir = tmp_path / 'dir' / 'symlink_dir' + symlink_dir.symlink_to(tmp_path / 'dir', target_is_directory=True) folder = LocalFolder(str(tmp_path)) @@ -260,17 +254,16 @@ def test_folder_with_circular_symlink(self, tmp_path): absolute_paths = [path.absolute_path for path in list(local_paths)] assert absolute_paths == [ - fix_windows_path_limit(str(tmp_path / "dir" / "file1.txt")), - fix_windows_path_limit(str(tmp_path / "dir" / "symlink_dir" / "file1.txt")), + fix_windows_path_limit(str(tmp_path / 'dir' / 'file1.txt')), + fix_windows_path_limit(str(tmp_path / 'dir' / 'symlink_dir' / 'file1.txt')), ] @pytest.mark.skipif( platform.system() == 'Windows' and platform.python_implementation() == 'PyPy', - reason="Symlinks not supported on PyPy/Windows", + reason='Symlinks not supported on PyPy/Windows', ) @pytest.mark.timeout(5) def test_folder_with_symlink_to_parent(self, tmp_path): - # Create a directory structure below with the scanning point at tmp_path/parent/child/: # tmp_path # ├── parent @@ -283,40 +276,62 @@ def test_folder_with_symlink_to_parent(self, tmp_path): # ├── file1.txt # └── file2.txt - (tmp_path / "parent" / "child" / "grandchild").mkdir(parents=True) - (tmp_path / "file1.txt").write_text("content1") - (tmp_path / "file2.txt").write_text("content2") - (tmp_path / "parent" / "file3.txt").write_text("content3") - (tmp_path / "parent" / "child" / "file4.txt").write_text("content4") - (tmp_path / "parent" / "child" / "grandchild" / "file5.txt").write_text("content5") - symlink_dir = tmp_path / "parent" / "child" / "grandchild" / "symlink_dir" - symlink_dir.symlink_to(tmp_path / "parent", target_is_directory=True) + (tmp_path / 'parent' / 'child' / 'grandchild').mkdir(parents=True) + (tmp_path / 'file1.txt').write_text('content1') + (tmp_path / 'file2.txt').write_text('content2') + (tmp_path / 'parent' / 'file3.txt').write_text('content3') + (tmp_path / 'parent' / 'child' / 'file4.txt').write_text('content4') + (tmp_path / 'parent' / 'child' / 'grandchild' / 'file5.txt').write_text('content5') + symlink_dir = tmp_path / 'parent' / 'child' / 'grandchild' / 'symlink_dir' + symlink_dir.symlink_to(tmp_path / 'parent', target_is_directory=True) - folder = LocalFolder(str(tmp_path / "parent" / "child")) + folder = LocalFolder(str(tmp_path / 'parent' / 'child')) local_paths = folder.all_files(reporter=MagicMock()) absolute_paths = [path.absolute_path for path in list(local_paths)] assert absolute_paths == [ - fix_windows_path_limit(str(tmp_path / "parent" / "child" / "file4.txt")), - fix_windows_path_limit(str(tmp_path / "parent" / "child" / "grandchild" / "file5.txt")), - fix_windows_path_limit(str(tmp_path / "parent" / "child" / "grandchild" / "symlink_dir" / "child" / "file4.txt")), - fix_windows_path_limit(str(tmp_path / "parent" / "child" / "grandchild" / "symlink_dir" / "child" / "grandchild" / "file5.txt")), - fix_windows_path_limit(str(tmp_path / "parent" / "child" / "grandchild" / "symlink_dir" / "file3.txt")), - ] # yapf: disable + fix_windows_path_limit(str(tmp_path / 'parent' / 'child' / 'file4.txt')), + fix_windows_path_limit(str(tmp_path / 'parent' / 'child' / 'grandchild' / 'file5.txt')), + fix_windows_path_limit( + str( + tmp_path + / 'parent' + / 'child' + / 'grandchild' + / 'symlink_dir' + / 'child' + / 'file4.txt' + ) + ), + fix_windows_path_limit( + str( + tmp_path + / 'parent' + / 'child' + / 'grandchild' + / 'symlink_dir' + / 'child' + / 'grandchild' + / 'file5.txt' + ) + ), + fix_windows_path_limit( + str(tmp_path / 'parent' / 'child' / 'grandchild' / 'symlink_dir' / 'file3.txt') + ), + ] @pytest.mark.skipif( platform.system() == 'Windows' and platform.python_implementation() == 'PyPy', - reason="Symlinks not supported on PyPy/Windows", + reason='Symlinks not supported on PyPy/Windows', ) @pytest.mark.timeout(5) def test_root_short_loop(self, tmp_path): - # Create a symlink to the tmp_path directory itself # tmp_path # └── tmp_path_symlink -> tmp_path - tmp_path_symlink = tmp_path / "tmp_path_symlink" + tmp_path_symlink = tmp_path / 'tmp_path_symlink' tmp_path_symlink.symlink_to(tmp_path, target_is_directory=True) folder = LocalFolder(str(tmp_path_symlink)) @@ -328,37 +343,35 @@ def test_root_short_loop(self, tmp_path): @pytest.mark.skipif( platform.system() == 'Windows' and platform.python_implementation() == 'PyPy', - reason="Symlinks not supported on PyPy/Windows", + reason='Symlinks not supported on PyPy/Windows', ) @pytest.mark.timeout(5) def test_root_parent_loop(self, tmp_path): - # Create a symlink that points to the parent of the initial scanning point # tmp_path # └── start # ├── file.txt # └── symlink -> tmp_path - (tmp_path / "start").mkdir() - (tmp_path / "start" / "file.txt").write_text("content") - (tmp_path / "start" / "symlink").symlink_to(tmp_path, target_is_directory=True) + (tmp_path / 'start').mkdir() + (tmp_path / 'start' / 'file.txt').write_text('content') + (tmp_path / 'start' / 'symlink').symlink_to(tmp_path, target_is_directory=True) - folder = LocalFolder(str(tmp_path / "start")) + folder = LocalFolder(str(tmp_path / 'start')) local_paths = folder.all_files(reporter=MagicMock()) absolute_paths = [path.absolute_path for path in list(local_paths)] assert absolute_paths == [ - fix_windows_path_limit(str(tmp_path / "start" / "file.txt")), - fix_windows_path_limit(str(tmp_path / "start" / "symlink" / "start" / "file.txt")), + fix_windows_path_limit(str(tmp_path / 'start' / 'file.txt')), + fix_windows_path_limit(str(tmp_path / 'start' / 'symlink' / 'start' / 'file.txt')), ] @pytest.mark.skipif( platform.system() == 'Windows' and platform.python_implementation() == 'PyPy', - reason="Symlinks not supported on PyPy/Windows", + reason='Symlinks not supported on PyPy/Windows', ) def test_symlink_that_points_deeper(self, tmp_path): - # Create a directory structure with a symlink that points to a deeper directory # tmp_path # ├── a @@ -374,16 +387,16 @@ def test_symlink_that_points_deeper(self, tmp_path): # │ └── f.txt # └── symlink -> b/d/e - (tmp_path / "a").mkdir() - (tmp_path / "a" / "a.txt").write_text("a") - (tmp_path / "b" / "c").mkdir(parents=True) - (tmp_path / "b" / "c" / "c.txt").write_text("c") - (tmp_path / "b" / "d" / "e").mkdir(parents=True) - (tmp_path / "b" / "d" / "d.txt").write_text("d") - (tmp_path / "b" / "d" / "e" / "e.txt").write_text("e") - (tmp_path / "f").mkdir() - (tmp_path / "f" / "f.txt").write_text("f") - (tmp_path / "symlink").symlink_to(tmp_path / "b" / "d" / "e", target_is_directory=True) + (tmp_path / 'a').mkdir() + (tmp_path / 'a' / 'a.txt').write_text('a') + (tmp_path / 'b' / 'c').mkdir(parents=True) + (tmp_path / 'b' / 'c' / 'c.txt').write_text('c') + (tmp_path / 'b' / 'd' / 'e').mkdir(parents=True) + (tmp_path / 'b' / 'd' / 'd.txt').write_text('d') + (tmp_path / 'b' / 'd' / 'e' / 'e.txt').write_text('e') + (tmp_path / 'f').mkdir() + (tmp_path / 'f' / 'f.txt').write_text('f') + (tmp_path / 'symlink').symlink_to(tmp_path / 'b' / 'd' / 'e', target_is_directory=True) folder = LocalFolder(str(tmp_path)) @@ -391,20 +404,19 @@ def test_symlink_that_points_deeper(self, tmp_path): absolute_paths = [path.absolute_path for path in list(local_paths)] assert absolute_paths == [ - fix_windows_path_limit(str(tmp_path / "a" / "a.txt")), - fix_windows_path_limit(str(tmp_path / "b" / "c" / "c.txt")), - fix_windows_path_limit(str(tmp_path / "b" / "d" / "d.txt")), - fix_windows_path_limit(str(tmp_path / "b" / "d" / "e" / "e.txt")), - fix_windows_path_limit(str(tmp_path / "f" / "f.txt")), - fix_windows_path_limit(str(tmp_path / "symlink" / "e.txt")), + fix_windows_path_limit(str(tmp_path / 'a' / 'a.txt')), + fix_windows_path_limit(str(tmp_path / 'b' / 'c' / 'c.txt')), + fix_windows_path_limit(str(tmp_path / 'b' / 'd' / 'd.txt')), + fix_windows_path_limit(str(tmp_path / 'b' / 'd' / 'e' / 'e.txt')), + fix_windows_path_limit(str(tmp_path / 'f' / 'f.txt')), + fix_windows_path_limit(str(tmp_path / 'symlink' / 'e.txt')), ] @pytest.mark.skipif( platform.system() == 'Windows' and platform.python_implementation() == 'PyPy', - reason="Symlinks not supported on PyPy/Windows" + reason='Symlinks not supported on PyPy/Windows', ) def test_symlink_that_points_up(self, tmp_path): - # Create a directory structure with a symlink that points to a upper directory # tmp_path # ├── a @@ -418,14 +430,16 @@ def test_symlink_that_points_up(self, tmp_path): # ├── symlink -> ../../a # └── e.txt - (tmp_path / "a").mkdir() - (tmp_path / "a" / "a.txt").write_text("a") - (tmp_path / "b" / "c").mkdir(parents=True) - (tmp_path / "b" / "c" / "c.txt").write_text("c") - (tmp_path / "b" / "d" / "e").mkdir(parents=True) - (tmp_path / "b" / "d" / "d.txt").write_text("d") - (tmp_path / "b" / "d" / "e" / "e.txt").write_text("e") - (tmp_path / "b" / "d" / "e" / "symlink").symlink_to(tmp_path / "a", target_is_directory=True) # yapf: disable + (tmp_path / 'a').mkdir() + (tmp_path / 'a' / 'a.txt').write_text('a') + (tmp_path / 'b' / 'c').mkdir(parents=True) + (tmp_path / 'b' / 'c' / 'c.txt').write_text('c') + (tmp_path / 'b' / 'd' / 'e').mkdir(parents=True) + (tmp_path / 'b' / 'd' / 'd.txt').write_text('d') + (tmp_path / 'b' / 'd' / 'e' / 'e.txt').write_text('e') + (tmp_path / 'b' / 'd' / 'e' / 'symlink').symlink_to( + tmp_path / 'a', target_is_directory=True + ) folder = LocalFolder(str(tmp_path)) @@ -433,20 +447,19 @@ def test_symlink_that_points_up(self, tmp_path): absolute_paths = [path.absolute_path for path in list(local_paths)] assert absolute_paths == [ - fix_windows_path_limit(str(tmp_path / "a" / "a.txt")), - fix_windows_path_limit(str(tmp_path / "b" / "c" / "c.txt")), - fix_windows_path_limit(str(tmp_path / "b" / "d" / "d.txt")), - fix_windows_path_limit(str(tmp_path / "b" / "d" / "e" / "e.txt")), - fix_windows_path_limit(str(tmp_path / "b" / "d" / "e" / "symlink" / "a.txt")), + fix_windows_path_limit(str(tmp_path / 'a' / 'a.txt')), + fix_windows_path_limit(str(tmp_path / 'b' / 'c' / 'c.txt')), + fix_windows_path_limit(str(tmp_path / 'b' / 'd' / 'd.txt')), + fix_windows_path_limit(str(tmp_path / 'b' / 'd' / 'e' / 'e.txt')), + fix_windows_path_limit(str(tmp_path / 'b' / 'd' / 'e' / 'symlink' / 'a.txt')), ] @pytest.mark.skipif( platform.system() == 'Windows' and platform.python_implementation() == 'PyPy', - reason="Symlinks not supported on PyPy/Windows" + reason='Symlinks not supported on PyPy/Windows', ) @pytest.mark.timeout(5) def test_elaborate_infinite_loop(self, tmp_path): - # Create a directory structure with an elaborate infinite loop of symlinks # tmp_path # ├── a @@ -458,14 +471,14 @@ def test_elaborate_infinite_loop(self, tmp_path): # └── f # └── f.txt - (tmp_path / "a").mkdir() - (tmp_path / "a" / "a.txt").write_text("a") - (tmp_path / "b").symlink_to("c") - (tmp_path / "c").symlink_to("d") - (tmp_path / "d").symlink_to("e") - (tmp_path / "e").symlink_to("b") - (tmp_path / "f").mkdir() - (tmp_path / "f" / "f.txt").write_text("f") + (tmp_path / 'a').mkdir() + (tmp_path / 'a' / 'a.txt').write_text('a') + (tmp_path / 'b').symlink_to('c') + (tmp_path / 'c').symlink_to('d') + (tmp_path / 'd').symlink_to('e') + (tmp_path / 'e').symlink_to('b') + (tmp_path / 'f').mkdir() + (tmp_path / 'f' / 'f.txt').write_text('f') folder = LocalFolder(str(tmp_path)) @@ -473,16 +486,15 @@ def test_elaborate_infinite_loop(self, tmp_path): absolute_paths = [path.absolute_path for path in list(local_paths)] assert absolute_paths == [ - fix_windows_path_limit(str(tmp_path / "a" / "a.txt")), - fix_windows_path_limit(str(tmp_path / "f" / "f.txt")), + fix_windows_path_limit(str(tmp_path / 'a' / 'a.txt')), + fix_windows_path_limit(str(tmp_path / 'f' / 'f.txt')), ] @pytest.mark.skipif( platform.system() == 'Windows' and platform.python_implementation() == 'PyPy', - reason="Symlinks not supported on PyPy/Windows", + reason='Symlinks not supported on PyPy/Windows', ) def test_valid_symlink_pattern_where_the_link_goes_down_and_up(self, tmp_path): - # tmp_path # ├── a # │ └── a.txt @@ -496,17 +508,17 @@ def test_valid_symlink_pattern_where_the_link_goes_down_and_up(self, tmp_path): # └── f # └── f.txt - (tmp_path / "a").mkdir() - (tmp_path / "a" / "a.txt").write_text("a") - (tmp_path / "b").symlink_to(tmp_path / "c" / "d", target_is_directory=True) # yapf: disable - (tmp_path / "c").mkdir() - (tmp_path / "c" / "d").mkdir() - (tmp_path / "c" / "d" / "b.txt").write_text("b") - (tmp_path / "d").symlink_to(tmp_path / "e", target_is_directory=True) - (tmp_path / "e").mkdir() - (tmp_path / "e" / "e.txt").write_text("e") - (tmp_path / "f").mkdir() - (tmp_path / "f" / "f.txt").write_text("f") + (tmp_path / 'a').mkdir() + (tmp_path / 'a' / 'a.txt').write_text('a') + (tmp_path / 'b').symlink_to(tmp_path / 'c' / 'd', target_is_directory=True) + (tmp_path / 'c').mkdir() + (tmp_path / 'c' / 'd').mkdir() + (tmp_path / 'c' / 'd' / 'b.txt').write_text('b') + (tmp_path / 'd').symlink_to(tmp_path / 'e', target_is_directory=True) + (tmp_path / 'e').mkdir() + (tmp_path / 'e' / 'e.txt').write_text('e') + (tmp_path / 'f').mkdir() + (tmp_path / 'f' / 'f.txt').write_text('f') folder = LocalFolder(str(tmp_path)) @@ -514,20 +526,19 @@ def test_valid_symlink_pattern_where_the_link_goes_down_and_up(self, tmp_path): absolute_paths = [path.absolute_path for path in list(local_paths)] assert absolute_paths == [ - fix_windows_path_limit(str(tmp_path / "a" / "a.txt")), - fix_windows_path_limit(str(tmp_path / "b" / "b.txt")), - fix_windows_path_limit(str(tmp_path / "c" / "d" / "b.txt")), - fix_windows_path_limit(str(tmp_path / "d" / "e.txt")), - fix_windows_path_limit(str(tmp_path / "e" / "e.txt")), - fix_windows_path_limit(str(tmp_path / "f" / "f.txt")), + fix_windows_path_limit(str(tmp_path / 'a' / 'a.txt')), + fix_windows_path_limit(str(tmp_path / 'b' / 'b.txt')), + fix_windows_path_limit(str(tmp_path / 'c' / 'd' / 'b.txt')), + fix_windows_path_limit(str(tmp_path / 'd' / 'e.txt')), + fix_windows_path_limit(str(tmp_path / 'e' / 'e.txt')), + fix_windows_path_limit(str(tmp_path / 'f' / 'f.txt')), ] @pytest.mark.skipif( platform.system() == 'Windows' and platform.python_implementation() == 'PyPy', - reason="Symlinks not supported on PyPy/Windows", + reason='Symlinks not supported on PyPy/Windows', ) def test_valid_symlink_pattern_where_the_link_goes_up_and_down(self, tmp_path): - # Create a directory structure with a valid symlink pattern where the link goes up and down # tmp_path # ├── a @@ -540,15 +551,15 @@ def test_valid_symlink_pattern_where_the_link_goes_up_and_down(self, tmp_path): # │ └── f.txt # └── t.txt - (tmp_path / "a").mkdir() - (tmp_path / "a" / "a.txt").write_text("a") - (tmp_path / "b").mkdir() - (tmp_path / "b" / "c").symlink_to(tmp_path / "d", target_is_directory=True) - (tmp_path / "d").mkdir() - (tmp_path / "d" / "e").mkdir() - (tmp_path / "d" / "e" / "f").mkdir() - (tmp_path / "d" / "e" / "f" / "f.txt").write_text("f") - (tmp_path / "t.txt").write_text("t") + (tmp_path / 'a').mkdir() + (tmp_path / 'a' / 'a.txt').write_text('a') + (tmp_path / 'b').mkdir() + (tmp_path / 'b' / 'c').symlink_to(tmp_path / 'd', target_is_directory=True) + (tmp_path / 'd').mkdir() + (tmp_path / 'd' / 'e').mkdir() + (tmp_path / 'd' / 'e' / 'f').mkdir() + (tmp_path / 'd' / 'e' / 'f' / 'f.txt').write_text('f') + (tmp_path / 't.txt').write_text('t') folder = LocalFolder(str(tmp_path)) @@ -556,19 +567,18 @@ def test_valid_symlink_pattern_where_the_link_goes_up_and_down(self, tmp_path): absolute_paths = [path.absolute_path for path in list(local_paths)] assert absolute_paths == [ - fix_windows_path_limit(str(tmp_path / "a" / "a.txt")), - fix_windows_path_limit(str(tmp_path / "b" / "c" / "e" / "f" / "f.txt")), - fix_windows_path_limit(str(tmp_path / "d" / "e" / "f" / "f.txt")), - fix_windows_path_limit(str(tmp_path / "t.txt")), + fix_windows_path_limit(str(tmp_path / 'a' / 'a.txt')), + fix_windows_path_limit(str(tmp_path / 'b' / 'c' / 'e' / 'f' / 'f.txt')), + fix_windows_path_limit(str(tmp_path / 'd' / 'e' / 'f' / 'f.txt')), + fix_windows_path_limit(str(tmp_path / 't.txt')), ] @pytest.mark.skipif( platform.system() == 'Windows' and platform.python_implementation() == 'PyPy', - reason="Symlinks not supported on PyPy/Windows", + reason='Symlinks not supported on PyPy/Windows', ) @pytest.mark.timeout(5) def test_loop_that_goes_down_and_up(self, tmp_path): - # Create a directory structure with a loop that goes down and up # tmp_path # ├── a @@ -580,14 +590,14 @@ def test_loop_that_goes_down_and_up(self, tmp_path): # └── f # └── f.txt - (tmp_path / "a").mkdir() - (tmp_path / "a" / "a.txt").write_text("a") - (tmp_path / "b").symlink_to(tmp_path / "c" / "d", target_is_directory=True) - (tmp_path / "c").mkdir() - (tmp_path / "c" / "d").symlink_to(tmp_path / "e", target_is_directory=True) - (tmp_path / "e").symlink_to("b") - (tmp_path / "f").mkdir() - (tmp_path / "f" / "f.txt").write_text("f") + (tmp_path / 'a').mkdir() + (tmp_path / 'a' / 'a.txt').write_text('a') + (tmp_path / 'b').symlink_to(tmp_path / 'c' / 'd', target_is_directory=True) + (tmp_path / 'c').mkdir() + (tmp_path / 'c' / 'd').symlink_to(tmp_path / 'e', target_is_directory=True) + (tmp_path / 'e').symlink_to('b') + (tmp_path / 'f').mkdir() + (tmp_path / 'f' / 'f.txt').write_text('f') folder = LocalFolder(str(tmp_path)) @@ -595,17 +605,16 @@ def test_loop_that_goes_down_and_up(self, tmp_path): absolute_paths = [path.absolute_path for path in list(local_paths)] assert absolute_paths == [ - fix_windows_path_limit(str(tmp_path / "a" / "a.txt")), - fix_windows_path_limit(str(tmp_path / "f" / "f.txt")), + fix_windows_path_limit(str(tmp_path / 'a' / 'a.txt')), + fix_windows_path_limit(str(tmp_path / 'f' / 'f.txt')), ] @pytest.mark.skipif( platform.system() == 'Windows' and platform.python_implementation() == 'PyPy', - reason="Symlinks not supported on PyPy/Windows" + reason='Symlinks not supported on PyPy/Windows', ) @pytest.mark.timeout(5) def test_loop_that_goes_up_and_down(self, tmp_path): - # Create a directory structure with a loop that goes up and down # tmp_path # ├── a @@ -618,15 +627,15 @@ def test_loop_that_goes_up_and_down(self, tmp_path): # └── g # └── g.txt - (tmp_path / "a").mkdir() - (tmp_path / "a" / "a.txt").write_text("a") - (tmp_path / "b").mkdir() - (tmp_path / "b" / "c").symlink_to(tmp_path / "d", target_is_directory=True) - (tmp_path / "d").mkdir() - (tmp_path / "d" / "e").mkdir() - (tmp_path / "d" / "e" / "f").symlink_to(tmp_path / "b" / "c", target_is_directory=True) - (tmp_path / "g").mkdir() - (tmp_path / "g" / "g.txt").write_text("g") + (tmp_path / 'a').mkdir() + (tmp_path / 'a' / 'a.txt').write_text('a') + (tmp_path / 'b').mkdir() + (tmp_path / 'b' / 'c').symlink_to(tmp_path / 'd', target_is_directory=True) + (tmp_path / 'd').mkdir() + (tmp_path / 'd' / 'e').mkdir() + (tmp_path / 'd' / 'e' / 'f').symlink_to(tmp_path / 'b' / 'c', target_is_directory=True) + (tmp_path / 'g').mkdir() + (tmp_path / 'g' / 'g.txt').write_text('g') folder = LocalFolder(str(tmp_path)) @@ -634,46 +643,46 @@ def test_loop_that_goes_up_and_down(self, tmp_path): absolute_paths = [path.absolute_path for path in list(local_paths)] assert absolute_paths == [ - fix_windows_path_limit(str(tmp_path / "a" / "a.txt")), - fix_windows_path_limit(str(tmp_path / "g" / "g.txt")), + fix_windows_path_limit(str(tmp_path / 'a' / 'a.txt')), + fix_windows_path_limit(str(tmp_path / 'g' / 'g.txt')), ] def test_folder_all_files__dir_excluded_by_regex(self, tmp_path): """ bar$ regex should exclude bar directory and all files inside it """ - d1_dir = tmp_path / "d1" + d1_dir = tmp_path / 'd1' d1_dir.mkdir() - (d1_dir / "file1.txt").touch() + (d1_dir / 'file1.txt').touch() - bar_dir = tmp_path / "bar" + bar_dir = tmp_path / 'bar' bar_dir.mkdir() - (bar_dir / "file2.txt").touch() + (bar_dir / 'file2.txt').touch() - scan_policy = ScanPoliciesManager(exclude_dir_regexes=["bar$"]) + scan_policy = ScanPoliciesManager(exclude_dir_regexes=['bar$']) folder = LocalFolder(tmp_path) local_paths = folder.all_files(reporter=None, policies_manager=scan_policy) absolute_paths = [path.absolute_path for path in local_paths] assert absolute_paths == [ - fix_windows_path_limit(str(d1_dir / "file1.txt")), + fix_windows_path_limit(str(d1_dir / 'file1.txt')), ] def test_excluded_no_access_check(self, tmp_path): """Test that a directory/file is not checked for access if it is excluded.""" # Create directories and files - excluded_dir = tmp_path / "excluded_dir" + excluded_dir = tmp_path / 'excluded_dir' excluded_dir.mkdir() - excluded_file = excluded_dir / "excluded_file.txt" + excluded_file = excluded_dir / 'excluded_file.txt' excluded_file.touch() - included_dir = tmp_path / "included_dir" + included_dir = tmp_path / 'included_dir' included_dir.mkdir() - (included_dir / "excluded_file.txt").touch() + (included_dir / 'excluded_file.txt').touch() # Setup exclusion regex that matches the excluded directory/file name scan_policy = ScanPoliciesManager( - exclude_dir_regexes=[r"excluded_dir$"], exclude_file_regexes=[r'.*excluded_file.txt'] + exclude_dir_regexes=[r'excluded_dir$'], exclude_file_regexes=[r'.*excluded_file.txt'] ) reporter = ProgressReport(sys.stdout, False) @@ -689,14 +698,14 @@ def test_excluded_no_access_check(self, tmp_path): @pytest.mark.skipif( platform.system() == 'Windows', - reason="Unix-only filesystem permissions are tested", + reason='Unix-only filesystem permissions are tested', ) def test_dir_without_exec_permission(self, tmp_path, fs_perm_tool): """Test that a excluded directory/file without permissions emits warnings.""" - no_perm_dir = tmp_path / "no_perm_dir" + no_perm_dir = tmp_path / 'no_perm_dir' no_perm_dir.mkdir() - (no_perm_dir / "file.txt").touch() - (no_perm_dir / "file2.txt").touch() + (no_perm_dir / 'file.txt').touch() + (no_perm_dir / 'file2.txt').touch() # chmod -x no_perm_dir no_perm_dir.chmod(0o600) @@ -718,17 +727,17 @@ def test_dir_without_exec_permission(self, tmp_path, fs_perm_tool): def test_without_permissions(self, tmp_path, fs_perm_tool): """Test that a excluded directory/file without permissions emits warnings.""" - no_perm_dir = tmp_path / "no_perm_dir" + no_perm_dir = tmp_path / 'no_perm_dir' no_perm_dir.mkdir() - (no_perm_dir / "file.txt").touch() + (no_perm_dir / 'file.txt').touch() - included_dir = tmp_path / "included_dir" + included_dir = tmp_path / 'included_dir' included_dir.mkdir() - (included_dir / "no_perm_file.txt").touch() - (included_dir / "included_file.txt").touch() + (included_dir / 'no_perm_file.txt').touch() + (included_dir / 'included_file.txt').touch() # Modify directory permissions to simulate lack of access - fs_perm_tool.deny_access(included_dir / "no_perm_file.txt") + fs_perm_tool.deny_access(included_dir / 'no_perm_file.txt') fs_perm_tool.deny_access(no_perm_dir) scan_policy = ScanPoliciesManager() @@ -739,7 +748,7 @@ def test_without_permissions(self, tmp_path, fs_perm_tool): absolute_paths = [pathlib.Path(path.absolute_path) for path in local_paths] # Check that only included_dir/included_file.txt was return - assert {path.name for path in absolute_paths} == {"included_file.txt"} + assert {path.name for path in absolute_paths} == {'included_file.txt'} def s(p): # shlex.quote works differently depending if its on windows or unix @@ -748,28 +757,28 @@ def s(p): # Check that no access warnings are issued for the excluded directory/file assert set(reporter.warnings) == { f'WARNING: {s(tmp_path / "no_perm_dir")} could not be accessed (no permissions to read?)', - f'WARNING: {s(tmp_path / "included_dir/no_perm_file.txt")} could not be accessed (no permissions to read?)' + f'WARNING: {s(tmp_path / "included_dir/no_perm_file.txt")} could not be accessed (no permissions to read?)', } reporter.close() def test_excluded_without_permissions(self, tmp_path, fs_perm_tool): """Test that a excluded directory/file without permissions is not processed and no warning is issued.""" - no_perm_dir = tmp_path / "no_perm_dir" + no_perm_dir = tmp_path / 'no_perm_dir' no_perm_dir.mkdir() - (no_perm_dir / "file.txt").touch() + (no_perm_dir / 'file.txt').touch() - included_dir = tmp_path / "included_dir" + included_dir = tmp_path / 'included_dir' included_dir.mkdir() - (included_dir / "no_perm_file.txt").touch() - (included_dir / "included_file.txt").touch() + (included_dir / 'no_perm_file.txt').touch() + (included_dir / 'included_file.txt').touch() # Modify directory permissions to simulate lack of access - fs_perm_tool.deny_access(included_dir / "no_perm_file.txt") + fs_perm_tool.deny_access(included_dir / 'no_perm_file.txt') fs_perm_tool.deny_access(no_perm_dir) scan_policy = ScanPoliciesManager( - exclude_dir_regexes=[r"no_perm_dir$"], exclude_file_regexes=[r'.*no_perm_file.txt'] + exclude_dir_regexes=[r'no_perm_dir$'], exclude_file_regexes=[r'.*no_perm_file.txt'] ) reporter = ProgressReport(sys.stdout, False) diff --git a/test/unit/scan/test_scan_policies.py b/test/unit/scan/test_scan_policies.py index 2c6cd2fc0..3e229f415 100644 --- a/test/unit/scan/test_scan_policies.py +++ b/test/unit/scan/test_scan_policies.py @@ -74,11 +74,11 @@ def test_illegal_timestamp(self, param, exception): @pytest.mark.apiver(from_ver=2) def test_re_pattern_argument_support(self): kwargs = { - param: (re.compile(r".*"),) + param: (re.compile(r'.*'),) for param in ( - "exclude_dir_regexes", - "exclude_file_regexes", - "include_file_regexes", + 'exclude_dir_regexes', + 'exclude_file_regexes', + 'include_file_regexes', ) } ScanPoliciesManager(**kwargs) diff --git a/test/unit/stream/test_progress.py b/test/unit/stream/test_progress.py index b4b6c88b1..c6d47f122 100644 --- a/test/unit/stream/test_progress.py +++ b/test/unit/stream/test_progress.py @@ -14,12 +14,12 @@ def test_reading_stream_with_progress(tmp_path): - stream = io.BytesIO(b"1234567890") + stream = io.BytesIO(b'1234567890') progress_listener = Mock() with ReadingStreamWithProgress(stream, progress_listener=progress_listener) as wrapped_stream: - assert wrapped_stream.read(1) == b"1" - assert wrapped_stream.read(2) == b"23" - assert wrapped_stream.read(3) == b"456" + assert wrapped_stream.read(1) == b'1' + assert wrapped_stream.read(2) == b'23' + assert wrapped_stream.read(3) == b'456' assert progress_listener.bytes_completed.call_count == 3 assert wrapped_stream.bytes_completed == 6 @@ -28,7 +28,7 @@ def test_reading_stream_with_progress(tmp_path): def test_reading_stream_with_progress__not_closing_wrapped_stream(tmp_path): - stream = io.BytesIO(b"1234567890") + stream = io.BytesIO(b'1234567890') progress_listener = Mock() with ReadingStreamWithProgress(stream, progress_listener=progress_listener) as wrapped_stream: assert wrapped_stream.read() @@ -44,7 +44,7 @@ def test_reading_stream_with_progress__closed_proxy(tmp_path): 'Exception ignored in: ' messages. """ - stream = io.BytesIO(b"1234567890") + stream = io.BytesIO(b'1234567890') progress_listener = Mock() wrapped_stream = ReadingStreamWithProgress(stream, progress_listener=progress_listener) diff --git a/test/unit/sync/fixtures.py b/test/unit/sync/fixtures.py index 7d6c01383..4fb56a05a 100644 --- a/test/unit/sync/fixtures.py +++ b/test/unit/sync/fixtures.py @@ -39,8 +39,10 @@ def get_synchronizer( ): kwargs = {} if apiver_deps.V < 2: - assert upload_mode == UploadMode.FULL, "upload_mode not supported in apiver < 2" - assert absolute_minimum_part_size is None, "absolute_minimum_part_size not supported in apiver < 2" + assert upload_mode == UploadMode.FULL, 'upload_mode not supported in apiver < 2' + assert ( + absolute_minimum_part_size is None + ), 'absolute_minimum_part_size not supported in apiver < 2' else: kwargs = dict( upload_mode=upload_mode, @@ -58,7 +60,7 @@ def get_synchronizer( compare_version_mode=compare_version_mode, compare_threshold=compare_threshold, sync_policy_manager=sync_policy_manager, - **kwargs + **kwargs, ) return get_synchronizer diff --git a/test/unit/sync/test_exception.py b/test/unit/sync/test_exception.py index 9a8335ed5..b7d01be0b 100644 --- a/test/unit/sync/test_exception.py +++ b/test/unit/sync/test_exception.py @@ -24,10 +24,13 @@ def test_environment_encoding_error(self): try: raise EnvironmentEncodingError('fred', 'george') except EnvironmentEncodingError as e: - assert str(e) == """file name fred cannot be decoded with system encoding (george). + assert ( + str(e) + == """file name fred cannot be decoded with system encoding (george). We think this is an environment error which you should workaround by setting your system encoding properly, for example like this: -export LANG=en_US.UTF-8""", str(e) +export LANG=en_US.UTF-8""" + ), str(e) def test_invalid_argument(self): try: diff --git a/test/unit/sync/test_sync.py b/test/unit/sync/test_sync.py index 1b1d65279..a9b51d7ec 100644 --- a/test/unit/sync/test_sync.py +++ b/test/unit/sync/test_sync.py @@ -78,38 +78,33 @@ def assert_folder_sync_actions(self, synchronizer, src_folder, dst_folder, expec @pytest.mark.apiver(to_ver=0) @pytest.mark.parametrize( - 'args', [ - { - 'newer_file_mode': IllegalEnum.ILLEGAL - }, - { - 'keep_days_or_delete': IllegalEnum.ILLEGAL - }, + 'args', + [ + {'newer_file_mode': IllegalEnum.ILLEGAL}, + {'keep_days_or_delete': IllegalEnum.ILLEGAL}, ], ids=[ 'newer_file_mode', 'keep_days_or_delete', - ] + ], ) def test_illegal_args_up_to_v0(self, synchronizer_factory, apiver, args): from apiver_deps_exception import CommandError + with pytest.raises(CommandError): synchronizer_factory(**args) @pytest.mark.apiver(from_ver=1) @pytest.mark.parametrize( - 'args', [ - { - 'newer_file_mode': IllegalEnum.ILLEGAL - }, - { - 'keep_days_or_delete': IllegalEnum.ILLEGAL - }, + 'args', + [ + {'newer_file_mode': IllegalEnum.ILLEGAL}, + {'keep_days_or_delete': IllegalEnum.ILLEGAL}, ], ids=[ 'newer_file_mode', 'keep_days_or_delete', - ] + ], ) def test_illegal_args_up_v1_and_up(self, synchronizer_factory, apiver, args): with pytest.raises(InvalidArgument): @@ -239,7 +234,7 @@ def test_delete_multiple_versions(self, synchronizer_factory, src_type): dst = self.b2_folder_factory(('a.txt', [100, 200])) expected = [ 'b2_delete(folder/a.txt, id_a_100, )', - 'b2_delete(folder/a.txt, id_a_200, (old version))' + 'b2_delete(folder/a.txt, id_a_200, (old version))', ] self.assert_folder_sync_actions(synchronizer, src, dst, expected) @@ -257,7 +252,8 @@ def test_delete_hide_b2_multiple_versions(self, synchronizer_factory, src_type): src = self.folder_factory(src_type) dst = self.b2_folder_factory(('a.txt', [TODAY, TODAY - 2 * DAY, TODAY - 4 * DAY])) expected = [ - 'b2_hide(folder/a.txt)', 'b2_delete(folder/a.txt, id_a_8294400000, (old version))' + 'b2_hide(folder/a.txt)', + 'b2_delete(folder/a.txt, id_a_8294400000, (old version))', ] self.assert_folder_sync_actions(synchronizer, src, dst, expected) @@ -275,7 +271,8 @@ def test_delete_hide_b2_multiple_versions_old(self, synchronizer_factory, src_ty src = self.folder_factory(src_type) dst = self.b2_folder_factory(('a.txt', [TODAY - 1 * DAY, TODAY - 3 * DAY, TODAY - 5 * DAY])) expected = [ - 'b2_hide(folder/a.txt)', 'b2_delete(folder/a.txt, id_a_8208000000, (old version))' + 'b2_hide(folder/a.txt)', + 'b2_delete(folder/a.txt, id_a_8208000000, (old version))', ] self.assert_folder_sync_actions(synchronizer, src, dst, expected) @@ -366,7 +363,7 @@ def test_already_hidden_multiple_versions_keep_days_delete_hide_marker( expected = [ 'b2_delete(folder/a.txt, id_a_8467200000, (hide marker))', 'b2_delete(folder/a.txt, id_a_8294400000, (old version))', - 'b2_delete(folder/a.txt, id_a_8121600000, (old version))' + 'b2_delete(folder/a.txt, id_a_8121600000, (old version))', ] self.assert_folder_sync_actions(synchronizer, src, dst, expected) @@ -387,7 +384,7 @@ def test_already_hidden_multiple_versions_keep_days_old_delete( dst = self.b2_folder_factory(('a.txt', [-TODAY + 2 * DAY, TODAY - 4 * DAY])) expected = [ 'b2_delete(folder/a.txt, id_a_8467200000, (hide marker))', - 'b2_delete(folder/a.txt, id_a_8294400000, (old version))' + 'b2_delete(folder/a.txt, id_a_8294400000, (old version))', ] self.assert_folder_sync_actions(synchronizer, src, dst, expected) @@ -405,7 +402,7 @@ def test_already_hidden_multiple_versions_delete(self, synchronizer_factory, src expected = [ 'b2_delete(folder/a.txt, id_a_8640000000, (hide marker))', 'b2_delete(folder/a.txt, id_a_8467200000, (old version))', - 'b2_delete(folder/a.txt, id_a_8294400000, (old version))' + 'b2_delete(folder/a.txt, id_a_8294400000, (old version))', ] self.assert_folder_sync_actions(synchronizer, src, dst, expected) @@ -500,16 +497,18 @@ def test_newer(self, synchronizer, src_type, dst_type, expected): 'src_type,expected', [ ( - 'local', [ + 'local', + [ 'b2_upload(/dir/a.txt, folder/a.txt, 8640000000)', 'b2_delete(folder/a.txt, id_a_8208000000, (old version))', - ] + ], ), ( - 'b2', [ + 'b2', + [ 'b2_copy(folder/a.txt, id_a_8640000000, folder/a.txt, 8640000000)', 'b2_delete(folder/a.txt, id_a_8208000000, (old version))', - ] + ], ), ], ) @@ -525,18 +524,20 @@ def test_newer_clean_old_versions(self, synchronizer_factory, src_type, expected 'src_type,expected', [ ( - 'local', [ + 'local', + [ 'b2_upload(/dir/a.txt, folder/a.txt, 8640000000)', 'b2_delete(folder/a.txt, id_a_8553600000, (old version))', 'b2_delete(folder/a.txt, id_a_8380800000, (old version))', - ] + ], ), ( - 'b2', [ + 'b2', + [ 'b2_copy(folder/a.txt, id_a_8640000000, folder/a.txt, 8640000000)', 'b2_delete(folder/a.txt, id_a_8553600000, (old version))', 'b2_delete(folder/a.txt, id_a_8380800000, (old version))', - ] + ], ), ], ) @@ -563,12 +564,12 @@ def test_older(self, synchronizer, apiver, src_type, dst_type, expected): self.assert_folder_sync_actions(synchronizer, src, dst, expected) messages = defaultdict( lambda: 'source file is older than destination: %s://a.txt with a time of 100 ' - 'cannot be synced to %s://a.txt with a time of 200, ' - 'unless a valid newer_file_mode is provided', + 'cannot be synced to %s://a.txt with a time of 200, ' + 'unless a valid newer_file_mode is provided', v0='source file is older than destination: %s://a.txt with a time of 100 ' - 'cannot be synced to %s://a.txt with a time of 200, ' - 'unless --skipNewer or --replaceNewer is provided' - ) # yapf: disable + 'cannot be synced to %s://a.txt with a time of 200, ' + 'unless --skipNewer or --replaceNewer is provided', + ) assert str(excinfo.value) == messages[apiver] % (src_type, dst_type) @@ -604,16 +605,18 @@ def test_older_replace(self, synchronizer_factory, src_type, dst_type, expected) 'src_type,expected', [ ( - 'local', [ + 'local', + [ 'b2_upload(/dir/a.txt, folder/a.txt, 100)', 'b2_delete(folder/a.txt, id_a_200, (old version))', - ] + ], ), ( - 'b2', [ + 'b2', + [ 'b2_copy(folder/a.txt, id_a_100, folder/a.txt, 100)', 'b2_delete(folder/a.txt, id_a_200, (old version))', - ] + ], ), ], ) @@ -687,17 +690,21 @@ def test_compare_size_not_equal(self, synchronizer_factory, src_type, dst_type, 'src_type,dst_type,expected', [ ( - 'local', 'b2', [ + 'local', + 'b2', + [ 'b2_upload(/dir/a.txt, folder/a.txt, 200)', - 'b2_delete(folder/a.txt, id_a_100, (old version))' - ] + 'b2_delete(folder/a.txt, id_a_100, (old version))', + ], ), ('b2', 'local', ['b2_download(folder/a.txt, id_a_200, /dir/a.txt, 200)']), ( - 'b2', 'b2', [ + 'b2', + 'b2', + [ 'b2_copy(folder/a.txt, id_a_200, folder/a.txt, 200)', - 'b2_delete(folder/a.txt, id_a_100, (old version))' - ] + 'b2_delete(folder/a.txt, id_a_100, (old version))', + ], ), ], ) @@ -706,7 +713,7 @@ def test_compare_size_not_equal_delete( ): synchronizer = synchronizer_factory( compare_version_mode=CompareVersionMode.SIZE, - keep_days_or_delete=KeepOrDeleteMode.DELETE + keep_days_or_delete=KeepOrDeleteMode.DELETE, ) src = self.folder_factory(src_type, ('a.txt', [200], 11)) dst = self.folder_factory(dst_type, ('a.txt', [100], 10)) @@ -731,7 +738,7 @@ def test_encryption_b2_to_local(self, synchronizer_factory, apiver): local, TODAY, self.reporter, - encryption_settings_provider=provider + encryption_settings_provider=provider, ) ) ) @@ -751,10 +758,12 @@ def test_encryption_b2_to_local(self, synchronizer_factory, apiver): file_version_kwarg = 'file_version' provider.get_setting_for_download.assert_has_calls( - [mock.call( - bucket=bucket, - **{file_version_kwarg: mock.ANY}, - )] + [ + mock.call( + bucket=bucket, + **{file_version_kwarg: mock.ANY}, + ) + ] ) # FIXME: rewrite this test to not use mock.call checks when all of Synchronizers tests are rewritten to test_bucket @@ -776,7 +785,7 @@ def test_encryption_local_to_b2(self, synchronizer_factory): remote, TODAY, self.reporter, - encryption_settings_provider=provider + encryption_settings_provider=provider, ) ) ) @@ -802,7 +811,7 @@ def test_encryption_local_to_b2(self, synchronizer_factory): bucket=bucket, b2_file_name='folder/directory/a.txt', file_info={'src_last_modified_millis': '100'}, - length=10 + length=10, ) ] @@ -826,7 +835,7 @@ def test_encryption_b2_to_b2(self, synchronizer_factory, apiver): dst, TODAY, self.reporter, - encryption_settings_provider=provider + encryption_settings_provider=provider, ) ) ) @@ -841,7 +850,7 @@ def test_encryption_b2_to_b2(self, synchronizer_factory, apiver): source_file_info={'in_b2': 'yes'}, progress_listener=mock.ANY, source_encryption=source_encryption, - destination_encryption=destination_encryption + destination_encryption=destination_encryption, ) ] @@ -891,20 +900,25 @@ def get_policy_class(self, sync_type, delete, keep_days): # style - i.e. with simulated api and fake files returned from methods. @pytest.mark.apiver(from_ver=2) @pytest.mark.parametrize( - "local_size,remote_size,local_sha1,local_partial_sha1,remote_sha1,should_be_incremental", + 'local_size,remote_size,local_sha1,local_partial_sha1,remote_sha1,should_be_incremental', [ - (2000, 1000, "ff" * 20, "aa" * 20, "aa" * 20, True), # incremental upload possible - (2000, 999, "ff" * 20, "aa" * 20, "aa" * 20, False), # uploaded part too small - (2000, 1000, "ff" * 20, "aa" * 20, None, False), # remote sha unknown - (2000, 1000, "ff" * 20, "aa" * 20, "bb" * 20, False), # remote sha mismatch - (2000, 3000, "ff" * 20, "aa" * 20, "bb" * 20, False), # remote file bigger - ] + (2000, 1000, 'ff' * 20, 'aa' * 20, 'aa' * 20, True), # incremental upload possible + (2000, 999, 'ff' * 20, 'aa' * 20, 'aa' * 20, False), # uploaded part too small + (2000, 1000, 'ff' * 20, 'aa' * 20, None, False), # remote sha unknown + (2000, 1000, 'ff' * 20, 'aa' * 20, 'bb' * 20, False), # remote sha mismatch + (2000, 3000, 'ff' * 20, 'aa' * 20, 'bb' * 20, False), # remote file bigger + ], ) def test_incremental_upload( - self, synchronizer_factory, local_size, remote_size, local_sha1, local_partial_sha1, - remote_sha1, should_be_incremental + self, + synchronizer_factory, + local_size, + remote_size, + local_sha1, + local_partial_sha1, + remote_sha1, + should_be_incremental, ): - synchronizer = synchronizer_factory( upload_mode=UploadMode.INCREMENTAL, absolute_minimum_part_size=1000 ) @@ -982,10 +996,13 @@ def test_sync_lexicographical_order(self, synchronizer_factory): dst = self.folder_factory('b2') self.assert_folder_sync_actions( - synchronizer, src, dst, [ + synchronizer, + src, + dst, + [ 'b2_upload(/dir/a b/bar, folder/a b/bar, 100)', - 'b2_upload(/dir/a/foo, folder/a/foo, 100)' - ] + 'b2_upload(/dir/a/foo, folder/a/foo, 100)', + ], ) dst_with_files = self.folder_factory('b2', *files) diff --git a/test/unit/sync/test_sync_report.py b/test/unit/sync/test_sync_report.py index 8c065da95..496224f79 100644 --- a/test/unit/sync/test_sync_report.py +++ b/test/unit/sync/test_sync_report.py @@ -21,7 +21,8 @@ def test_bad_terminal(self): stdout.write = MagicMock( side_effect=[ UnicodeEncodeError('codec', 'foo', 100, 105, 'artificial UnicodeEncodeError') - ] + list(range(25)) + ] + + list(range(25)) ) sync_report = SyncReport(stdout, False) sync_report.print_completion('transferred: 123.txt') diff --git a/test/unit/test_cache.py b/test/unit/test_cache.py index 389cd352f..2c9b38f58 100644 --- a/test/unit/test_cache.py +++ b/test/unit/test_cache.py @@ -31,7 +31,7 @@ def auth_info_cache(): return AuthInfoCache(InMemoryAccountInfo()) -@pytest.fixture(scope="function", params=[lf('in_memory_cache'), lf('auth_info_cache')]) +@pytest.fixture(scope='function', params=[lf('in_memory_cache'), lf('auth_info_cache')]) def cache(request): return request.param diff --git a/test/unit/test_exception.py b/test/unit/test_exception.py index d8be22f9d..258e301a4 100644 --- a/test/unit/test_exception.py +++ b/test/unit/test_exception.py @@ -64,6 +64,7 @@ def test_already_failed_exception(self): @pytest.mark.apiver(to_ver=1) def test_command_error(self): from apiver_deps_exception import CommandError + try: raise CommandError('foo') except CommandError as e: @@ -73,8 +74,9 @@ def test_command_error(self): class TestInterpretError: def test_file_already_hidden(self): self._check_one(FileAlreadyHidden, 400, 'already_hidden', '', {}) - assert 'File already hidden: file.txt' == \ - str(interpret_b2_error(400, 'already_hidden', '', {}, {'fileName': 'file.txt'})) + assert 'File already hidden: file.txt' == str( + interpret_b2_error(400, 'already_hidden', '', {}, {'fileName': 'file.txt'}) + ) def test_bad_json(self): self._check_one(BadJson, 400, 'bad_json', '', {}) @@ -83,34 +85,34 @@ def test_file_not_present(self): self._check_one(FileNotPresent, 400, 'no_such_file', '', {}) self._check_one(FileNotPresent, 400, 'file_not_present', '', {}) self._check_one(FileNotPresent, 404, 'not_found', '', {}) - assert 'File not present: file.txt' == \ - str(interpret_b2_error(404, 'not_found', '', {}, {'fileName': 'file.txt'})) - assert 'File not present: 01010101' == \ - str(interpret_b2_error(404, 'not_found', '', {}, {'fileId': '01010101'})) + assert 'File not present: file.txt' == str( + interpret_b2_error(404, 'not_found', '', {}, {'fileName': 'file.txt'}) + ) + assert 'File not present: 01010101' == str( + interpret_b2_error(404, 'not_found', '', {}, {'fileId': '01010101'}) + ) def test_file_or_bucket_not_present(self): self._check_one(ResourceNotFound, 404, None, None, {}) - assert 'No such file, bucket, or endpoint: ' == \ - str(interpret_b2_error(404, None, None, {})) + assert 'No such file, bucket, or endpoint: ' == str(interpret_b2_error(404, None, None, {})) def test_duplicate_bucket_name(self): self._check_one(DuplicateBucketName, 400, 'duplicate_bucket_name', '', {}) - assert 'Bucket name is already in use: my-bucket' == \ - str( - interpret_b2_error( - 400, 'duplicate_bucket_name', '', {}, {'bucketName': 'my-bucket'} - ) - ) + assert 'Bucket name is already in use: my-bucket' == str( + interpret_b2_error(400, 'duplicate_bucket_name', '', {}, {'bucketName': 'my-bucket'}) + ) def test_missing_part(self): self._check_one(MissingPart, 400, 'missing_part', '', {}) - assert 'Part number has not been uploaded: my-file-id' == \ - str(interpret_b2_error(400, 'missing_part', '', {}, {'fileId': 'my-file-id'})) + assert 'Part number has not been uploaded: my-file-id' == str( + interpret_b2_error(400, 'missing_part', '', {}, {'fileId': 'my-file-id'}) + ) def test_part_sha1_mismatch(self): self._check_one(PartSha1Mismatch, 400, 'part_sha1_mismatch', '', {}) - assert 'Part number my-file-id has wrong SHA1' == \ - str(interpret_b2_error(400, 'part_sha1_mismatch', '', {}, {'fileId': 'my-file-id'})) + assert 'Part number my-file-id has wrong SHA1' == str( + interpret_b2_error(400, 'part_sha1_mismatch', '', {}, {'fileId': 'my-file-id'}) + ) def test_unauthorized(self): self._check_one(Unauthorized, 401, '', '', {}) @@ -179,12 +181,13 @@ def _check_one( return actual_exception @pytest.mark.parametrize( - "status, code, expected_exception_cls", [ - (401, "email_not_verified", EmailNotVerified), - (401, "no_payment_history", NoPaymentHistory), - ] + 'status, code, expected_exception_cls', + [ + (401, 'email_not_verified', EmailNotVerified), + (401, 'no_payment_history', NoPaymentHistory), + ], ) def test_simple_error_handlers(self, status, code, expected_exception_cls): - error = interpret_b2_error(status, code, "", {}) + error = interpret_b2_error(status, code, '', {}) assert isinstance(error, expected_exception_cls) assert error.code == code diff --git a/test/unit/test_progress.py b/test/unit/test_progress.py index 5cf293265..3b382c5b9 100644 --- a/test/unit/test_progress.py +++ b/test/unit/test_progress.py @@ -12,22 +12,22 @@ @pytest.mark.parametrize( - "tqdm_available, quiet, expected_listener", + 'tqdm_available, quiet, expected_listener', [ - (True, False, "TqdmProgressListener"), - (False, False, "SimpleProgressListener"), - (False, True, "DoNothingProgressListener"), + (True, False, 'TqdmProgressListener'), + (False, False, 'SimpleProgressListener'), + (False, True, 'DoNothingProgressListener'), ], ) def test_make_progress_listener(tqdm_available, quiet, expected_listener, monkeypatch): if not tqdm_available: - monkeypatch.setattr("b2sdk._internal.progress.tqdm", None) + monkeypatch.setattr('b2sdk._internal.progress.tqdm', None) - assert make_progress_listener("description", quiet).__class__.__name__ == expected_listener + assert make_progress_listener('description', quiet).__class__.__name__ == expected_listener def test_tqdm_progress_listener__without_tqdm_module(monkeypatch): - monkeypatch.setattr("b2sdk._internal.progress.tqdm", None) + monkeypatch.setattr('b2sdk._internal.progress.tqdm', None) with pytest.raises(ModuleNotFoundError): - TqdmProgressListener("description") + TqdmProgressListener('description') diff --git a/test/unit/test_raw_simulator.py b/test/unit/test_raw_simulator.py index 9cfbf681d..a3c789540 100644 --- a/test/unit/test_raw_simulator.py +++ b/test/unit/test_raw_simulator.py @@ -10,12 +10,12 @@ from __future__ import annotations import hashlib -from test.helpers import patch_bind_params from unittest.mock import Mock import pytest from b2sdk import _v3 as v3 +from test.helpers import patch_bind_params @pytest.fixture @@ -34,12 +34,12 @@ def dummy_raw_simulator(apiver_module): def file_sim(apiver_module, dummy_bucket_simulator, file_info): data = b'dummy' return apiver_module.FileSimulator( - account_id="account_id", + account_id='account_id', bucket=dummy_bucket_simulator, - file_id="dummy-id", - action="upload", - name="dummy.txt", - content_type="text/plain", + file_id='dummy-id', + action='upload', + name='dummy.txt', + content_type='text/plain', content_sha1=hashlib.sha1(data).hexdigest(), file_info=file_info, data_bytes=data, @@ -47,22 +47,22 @@ def file_sim(apiver_module, dummy_bucket_simulator, file_info): server_side_encryption=apiver_module.EncryptionSetting( mode=apiver_module.EncryptionMode.SSE_C, algorithm=apiver_module.EncryptionAlgorithm.AES256, - key=apiver_module.EncryptionKey(key_id=None, secret=b"test"), - ) + key=apiver_module.EncryptionKey(key_id=None, secret=b'test'), + ), ) def test_file_sim__as_download_headers(file_sim): assert file_sim.as_download_headers() == { - 'content-length': "5", + 'content-length': '5', 'content-type': 'text/plain', 'x-bz-content-sha1': '829c3804401b0727f70f73d4415e162400cbe57b', - 'x-bz-upload-timestamp': "0", + 'x-bz-upload-timestamp': '0', 'x-bz-file-id': 'dummy-id', 'x-bz-file-name': 'dummy.txt', 'X-Bz-Info-key': 'value', 'X-Bz-Server-Side-Encryption-Customer-Algorithm': 'AES256', - 'X-Bz-Server-Side-Encryption-Customer-Key-Md5': 'CY9rzUYh03PK3k6DJie09g==' + 'X-Bz-Server-Side-Encryption-Customer-Key-Md5': 'CY9rzUYh03PK3k6DJie09g==', } @@ -71,8 +71,9 @@ def test_bucket_simulator__upload_file__supports_file_infos( apiver_module, dummy_bucket_simulator, file_info ): """Test v2.BucketSimulator.upload_file support of deprecated file_infos param""" - with patch_bind_params(v3.BucketSimulator, 'upload_file') as mock_method,\ - pytest.warns(DeprecationWarning, match=r'deprecated argument'): + with patch_bind_params(v3.BucketSimulator, 'upload_file') as mock_method, pytest.warns( + DeprecationWarning, match=r'deprecated argument' + ): assert dummy_bucket_simulator.upload_file( 'upload_id', 'upload_auth_token', @@ -83,15 +84,16 @@ def test_bucket_simulator__upload_file__supports_file_infos( file_infos=file_info, data_stream='data_stream', ) - assert mock_method.get_bound_call_args()["file_info"] == file_info + assert mock_method.get_bound_call_args()['file_info'] == file_info assert 'file_infos' not in mock_method.call_args[1] @pytest.mark.apiver(to_ver=2) def test_raw_simulator__get_upload_file_headers__supports_file_infos(apiver_module, file_info): """Test v2.RawSimulator.get_upload_file_headers support of deprecated file_infos param""" - with patch_bind_params(v3.RawSimulator, 'get_upload_file_headers') as mock_method,\ - pytest.warns(DeprecationWarning, match=r'deprecated argument'): + with patch_bind_params(v3.RawSimulator, 'get_upload_file_headers') as mock_method, pytest.warns( + DeprecationWarning, match=r'deprecated argument' + ): apiver_module.RawSimulator.get_upload_file_headers( upload_auth_token='upload_auth_token', file_name='file_name', @@ -103,15 +105,16 @@ def test_raw_simulator__get_upload_file_headers__supports_file_infos(apiver_modu legal_hold=None, file_infos=file_info, ) - assert mock_method.get_bound_call_args()["file_info"] == file_info + assert mock_method.get_bound_call_args()['file_info'] == file_info assert 'file_infos' not in mock_method.call_args[1] @pytest.mark.apiver(to_ver=2) def test_raw_simulator__upload_file__supports_file_infos(dummy_raw_simulator, file_info): """Test v2.RawSimulator.upload_file support of deprecated file_infos param""" - with patch_bind_params(v3.RawSimulator, 'upload_file') as mock_method,\ - pytest.warns(DeprecationWarning, match=r'deprecated argument'): + with patch_bind_params(v3.RawSimulator, 'upload_file') as mock_method, pytest.warns( + DeprecationWarning, match=r'deprecated argument' + ): dummy_raw_simulator.upload_file( 'upload_url', 'upload_auth_token', @@ -122,5 +125,5 @@ def test_raw_simulator__upload_file__supports_file_infos(dummy_raw_simulator, fi file_infos=file_info, data_stream='data_stream', ) - assert mock_method.get_bound_call_args()["file_info"] == file_info + assert mock_method.get_bound_call_args()['file_info'] == file_info assert 'file_infos' not in mock_method.call_args[1] diff --git a/test/unit/test_session.py b/test/unit/test_session.py index 1a1adabdc..bee5e1475 100644 --- a/test/unit/test_session.py +++ b/test/unit/test_session.py @@ -39,7 +39,7 @@ def setup(self, b2_session): allowed=mock.ANY, application_key_id='123', ), - marks=pytest.mark.apiver(from_ver=2) + marks=pytest.mark.apiver(from_ver=2), ), pytest.param( dict( @@ -54,7 +54,7 @@ def setup(self, b2_session): allowed=mock.ANY, application_key_id='123', ), - marks=pytest.mark.apiver(to_ver=1) + marks=pytest.mark.apiver(to_ver=1), ), ], ) @@ -77,7 +77,9 @@ def test_clear_cache(self): def test_session__with_in_memory_account_info(apiver_int): memory_info = InMemoryAccountInfo() - b2_session = B2Session(account_info=memory_info,) + b2_session = B2Session( + account_info=memory_info, + ) assert b2_session.account_info is memory_info diff --git a/test/unit/utils/test_docs.py b/test/unit/utils/test_docs.py index ce26ee468..6df5d310c 100644 --- a/test/unit/utils/test_docs.py +++ b/test/unit/utils/test_docs.py @@ -34,15 +34,16 @@ class MyCustomClass: @pytest.mark.parametrize( - 'type_,expected', [ + 'type_,expected', + [ (AbstractRawApi, {}), ( - LifecycleRule, { - 'B2 Cloud Storage Lifecycle Rules': - 'https://www.backblaze.com/docs/cloud-storage-lifecycle-rules', - } + LifecycleRule, + { + 'B2 Cloud Storage Lifecycle Rules': 'https://www.backblaze.com/docs/cloud-storage-lifecycle-rules', + }, ), - ] + ], ) def test_get_b2sdk_doc_urls(type_, expected): assert get_b2sdk_doc_urls(type_) == expected diff --git a/test/unit/utils/test_escape.py b/test/unit/utils/test_escape.py index 3d92c1bb4..10b881e68 100644 --- a/test/unit/utils/test_escape.py +++ b/test/unit/utils/test_escape.py @@ -18,9 +18,12 @@ @pytest.mark.parametrize( ( - "input_", "expected_unprintable_to_hex", "expected_escape_control_chars", - "expected_substitute_control_chars" - ), [ + 'input_', + 'expected_unprintable_to_hex', + 'expected_escape_control_chars', + 'expected_substitute_control_chars', + ), + [ ('', '', '', ('', False)), (' abc-z', ' abc-z', "' abc-z'", (' abc-z', False)), ('a\x7fb', 'a\\x7fb', "'a\\x7fb'", ('a�b', True)), @@ -28,14 +31,18 @@ ('a\x7fb\nc', 'a\\x7fb\nc', "'a\\x7fb\nc'", ('a�b\nc', True)), ('\x9bT\x9bEtest', '\\x9bT\\x9bEtest', "'\\x9bT\\x9bEtest'", ('�T�Etest', True)), ( - '\x1b[32mC\x1b[33mC\x1b[34mI', '\\x1b[32mC\\x1b[33mC\\x1b[34mI', - "'\\x1b[32mC\\x1b[33mC\\x1b[34mI'", ('�[32mC�[33mC�[34mI', True) + '\x1b[32mC\x1b[33mC\x1b[34mI', + '\\x1b[32mC\\x1b[33mC\\x1b[34mI', + "'\\x1b[32mC\\x1b[33mC\\x1b[34mI'", + ('�[32mC�[33mC�[34mI', True), ), - ] + ], ) def test_unprintable_to_hex( - input_, expected_unprintable_to_hex, expected_escape_control_chars, - expected_substitute_control_chars + input_, + expected_unprintable_to_hex, + expected_escape_control_chars, + expected_substitute_control_chars, ): assert unprintable_to_hex(input_) == expected_unprintable_to_hex assert escape_control_chars(input_) == expected_escape_control_chars diff --git a/test/unit/utils/test_filesystem.py b/test/unit/utils/test_filesystem.py index 278d25a73..b692adf6a 100644 --- a/test/unit/utils/test_filesystem.py +++ b/test/unit/utils/test_filesystem.py @@ -18,23 +18,23 @@ points_to_stdout, ) -EXPECTED_STDOUT_PATH = pathlib.Path("CON" if platform.system() == "Windows" else "/dev/stdout") +EXPECTED_STDOUT_PATH = pathlib.Path('CON' if platform.system() == 'Windows' else '/dev/stdout') class TestPointsToFifo: - @pytest.mark.skipif(platform.system() == "Windows", reason="no os.mkfifo() on Windows") + @pytest.mark.skipif(platform.system() == 'Windows', reason='no os.mkfifo() on Windows') def test_fifo_path(self, tmp_path): - fifo_path = tmp_path / "fifo" + fifo_path = tmp_path / 'fifo' os.mkfifo(fifo_path) assert points_to_fifo(fifo_path) is True def test_non_fifo_path(self, tmp_path): - path = tmp_path / "subdir" + path = tmp_path / 'subdir' path.mkdir(parents=True) assert points_to_fifo(path) is False def test_non_existent_path(self, tmp_path): - path = tmp_path / "file.txt" + path = tmp_path / 'file.txt' assert points_to_fifo(path) is False @@ -44,10 +44,10 @@ def test_stdout_path(self): assert points_to_stdout(STDOUT_FILEPATH) is True def test_non_stdout_path(self, tmp_path): - path = tmp_path / "file.txt" + path = tmp_path / 'file.txt' path.touch() assert points_to_stdout(path) is False def test_non_existent_stdout_path(self, tmp_path): - path = tmp_path / "file.txt" + path = tmp_path / 'file.txt' assert points_to_stdout(path) is False diff --git a/test/unit/utils/test_incremental_hex_digester.py b/test/unit/utils/test_incremental_hex_digester.py index 7635733e6..70c994f5c 100644 --- a/test/unit/utils/test_incremental_hex_digester.py +++ b/test/unit/utils/test_incremental_hex_digester.py @@ -11,12 +11,12 @@ import hashlib import io -from test.unit.test_base import TestBase from b2sdk._internal.utils import ( IncrementalHexDigester, Sha1HexDigest, ) +from test.unit.test_base import TestBase class TestIncrementalHexDigester(TestBase): @@ -69,7 +69,7 @@ def test_limited_and_unlimited_read(self): digester = self._get_digester(stream) for idx in range(blocks_count - 1): - expected_sha1_part = self._get_sha1(input_data[:limit * (idx + 1)]) + expected_sha1_part = self._get_sha1(input_data[: limit * (idx + 1)]) result_sha1_part = digester.update_from_stream(limit) self.assertEqual(expected_sha1_part, result_sha1_part) diff --git a/test/unit/utils/test_range_.py b/test/unit/utils/test_range_.py index a2ca1d229..5ff3c2d57 100644 --- a/test/unit/utils/test_range_.py +++ b/test/unit/utils/test_range_.py @@ -29,17 +29,18 @@ def test_range_initialization_invalid(apiver_module): def test_range_from_header(apiver_module): - r = apiver_module.Range.from_header("bytes=0-11") + r = apiver_module.Range.from_header('bytes=0-11') assert r.start == 0 assert r.end == 11 @pytest.mark.parametrize( - "raw_range_header, start, end, total_length", [ - ("bytes 0-11", 0, 11, None), - ("bytes 1-11/*", 1, 11, None), - ("bytes 10-110/200", 10, 110, 200), - ] + 'raw_range_header, start, end, total_length', + [ + ('bytes 0-11', 0, 11, None), + ('bytes 1-11/*', 1, 11, None), + ('bytes 10-110/200', 10, 110, 200), + ], ) def test_range_from_header_with_size(apiver_module, raw_range_header, start, end, total_length): r, length = apiver_module.Range.from_header_with_size(raw_range_header) @@ -72,7 +73,7 @@ def test_range_as_tuple(apiver_module): def test_range_repr(apiver_module): r = apiver_module.Range(0, 10) - assert repr(r) == "Range(0, 10)" + assert repr(r) == 'Range(0, 10)' def test_empty_range(apiver_module): diff --git a/test/unit/utils/test_thread_pool.py b/test/unit/utils/test_thread_pool.py index 82a3c4af0..e90e8b0ad 100644 --- a/test/unit/utils/test_thread_pool.py +++ b/test/unit/utils/test_thread_pool.py @@ -20,7 +20,6 @@ def thread_pool(self): return LazyThreadPool() def test_submit(self, thread_pool): - future = thread_pool.submit(sum, (1, 2)) assert isinstance(future, Future) assert future.result() == 3 diff --git a/test/unit/v0/test_bucket.py b/test/unit/v0/test_bucket.py index fd8754e2c..de44fc4b4 100644 --- a/test/unit/v0/test_bucket.py +++ b/test/unit/v0/test_bucket.py @@ -260,8 +260,11 @@ def test_error_in_state(self): large_file_upload_state.set_error('test error') try: self.bucket.api.services.upload_manager.upload_part( - self.bucket.id_, file1.file_id, UploadSourceBytes(content), 1, - large_file_upload_state + self.bucket.id_, + file1.file_id, + UploadSourceBytes(content), + 1, + large_file_upload_state, ).result() self.fail('should have thrown') except AlreadyFailed: @@ -297,8 +300,15 @@ def test_version_by_name(self): self.assertIsInstance(info, FileVersionInfo) expected = ( - a_id, 'a', 11, 'upload', 'b2/x-auto', 'none', NO_RETENTION_FILE_SETTING, - LegalHold.UNSET, None + a_id, + 'a', + 11, + 'upload', + 'b2/x-auto', + 'none', + NO_RETENTION_FILE_SETTING, + LegalHold.UNSET, + None, ) actual = ( info.id_, @@ -331,12 +341,13 @@ def test_version_by_name_file_lock(self): low_perm_account_info = StubAccountInfo() low_perm_api = B2Api(low_perm_account_info, raw_api=self.simulator) low_perm_key_resp = self.api.create_key( - key_name='lowperm', capabilities=[ + key_name='lowperm', + capabilities=[ 'listKeys', 'listBuckets', 'listFiles', 'readFiles', - ] + ], ) low_perm_api.authorize_account( @@ -378,7 +389,9 @@ def test_three_files_at_root(self): self.bucket.upload_bytes(data, 'bb') self.bucket.upload_bytes(data, 'ccc') expected = [ - ('a', 11, 'upload', None), ('bb', 11, 'upload', None), ('ccc', 11, 'upload', None) + ('a', 11, 'upload', None), + ('bb', 11, 'upload', None), + ('ccc', 11, 'upload', None), ] self.assertBucketContents(expected, '') @@ -391,8 +404,9 @@ def test_three_files_in_dir(self): self.bucket.upload_bytes(data, 'bb/3') self.bucket.upload_bytes(data, 'ccc') expected = [ - ('bb/1', 11, 'upload', None), ('bb/2/sub1', 11, 'upload', 'bb/2/'), - ('bb/3', 11, 'upload', None) + ('bb/1', 11, 'upload', None), + ('bb/2/sub1', 11, 'upload', 'bb/2/'), + ('bb/3', 11, 'upload', None), ] self.assertBucketContents(expected, 'bb', fetch_count=1) @@ -595,7 +609,7 @@ def test_copy_retention(self): ) self.assertEqual( FileRetentionSetting(RetentionMode.COMPLIANCE, 100), - resulting_file_version.file_retention + resulting_file_version.file_retention, ) self.assertEqual(LegalHold.ON, resulting_file_version.legal_hold) @@ -621,16 +635,18 @@ def test_copy_encryption(self): file_id=a_id, destination_encryption=SSE_C_AES, file_info={'new': 'value'}, - content_type='text/plain' - ), SSE_C_AES_NO_SECRET + content_type='text/plain', + ), + SSE_C_AES_NO_SECRET, ), ( dict( file_id=a_id, destination_encryption=SSE_C_AES, source_file_info={'old': 'value'}, - source_content_type='text/plain' - ), SSE_C_AES_NO_SECRET + source_content_type='text/plain', + ), + SSE_C_AES_NO_SECRET, ), (dict(file_id=b_id), SSE_NONE), (dict(file_id=b_id, source_encryption=SSE_B2_AES), SSE_NONE), @@ -638,8 +654,9 @@ def test_copy_encryption(self): dict( file_id=b_id, source_encryption=SSE_B2_AES, - destination_encryption=SSE_B2_AES - ), SSE_B2_AES + destination_encryption=SSE_B2_AES, + ), + SSE_B2_AES, ), ( dict( @@ -647,8 +664,9 @@ def test_copy_encryption(self): source_encryption=SSE_B2_AES, destination_encryption=SSE_C_AES, file_info={'new': 'value'}, - content_type='text/plain' - ), SSE_C_AES_NO_SECRET + content_type='text/plain', + ), + SSE_C_AES_NO_SECRET, ), ( dict( @@ -656,29 +674,33 @@ def test_copy_encryption(self): source_encryption=SSE_B2_AES, destination_encryption=SSE_C_AES, source_file_info={'old': 'value'}, - source_content_type='text/plain' - ), SSE_C_AES_NO_SECRET + source_content_type='text/plain', + ), + SSE_C_AES_NO_SECRET, ), ( dict( file_id=c_id, source_encryption=SSE_C_AES, file_info={'new': 'value'}, - content_type='text/plain' - ), SSE_NONE + content_type='text/plain', + ), + SSE_NONE, ), ( dict( file_id=c_id, source_encryption=SSE_C_AES, source_file_info={'old': 'value'}, - source_content_type='text/plain' - ), SSE_NONE + source_content_type='text/plain', + ), + SSE_NONE, ), ( dict( file_id=c_id, source_encryption=SSE_C_AES, destination_encryption=SSE_C_AES - ), SSE_C_AES_NO_SECRET + ), + SSE_C_AES_NO_SECRET, ), ( dict( @@ -686,8 +708,9 @@ def test_copy_encryption(self): source_encryption=SSE_C_AES, destination_encryption=SSE_B2_AES, source_file_info={'old': 'value'}, - source_content_type='text/plain' - ), SSE_B2_AES + source_content_type='text/plain', + ), + SSE_B2_AES, ), ( dict( @@ -695,8 +718,9 @@ def test_copy_encryption(self): source_encryption=SSE_C_AES, destination_encryption=SSE_B2_AES, file_info={'new': 'value'}, - content_type='text/plain' - ), SSE_B2_AES + content_type='text/plain', + ), + SSE_B2_AES, ), ( dict( @@ -704,8 +728,9 @@ def test_copy_encryption(self): source_encryption=SSE_C_AES, destination_encryption=SSE_C_AES_2, source_file_info={'old': 'value'}, - source_content_type='text/plain' - ), SSE_C_AES_2_NO_SECRET + source_content_type='text/plain', + ), + SSE_C_AES_2_NO_SECRET, ), ( dict( @@ -713,8 +738,9 @@ def test_copy_encryption(self): source_encryption=SSE_C_AES, destination_encryption=SSE_C_AES_2, file_info={'new': 'value'}, - content_type='text/plain' - ), SSE_C_AES_2_NO_SECRET + content_type='text/plain', + ), + SSE_C_AES_2_NO_SECRET, ), ]: with self.subTest(kwargs=kwargs, length=length, data=data): @@ -788,7 +814,7 @@ def test_upload_local_file_retention(self): 'file1', encryption=SSE_C_AES, file_retention=retention, - legal_hold=LegalHold.ON + legal_hold=LegalHold.ON, ) self._check_file_contents('file1', data) self.assertEqual(retention, file_info.file_retention) @@ -889,8 +915,8 @@ def test_upload_large_resume_all_parts_there(self): data = self._make_data(part_size * 3) large_file_id = self._start_large_file('file1') self._upload_part(large_file_id, 1, data[:part_size]) - self._upload_part(large_file_id, 2, data[part_size:2 * part_size]) - self._upload_part(large_file_id, 3, data[2 * part_size:]) + self._upload_part(large_file_id, 2, data[part_size : 2 * part_size]) + self._upload_part(large_file_id, 3, data[2 * part_size :]) progress_listener = StubProgressListener() file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener) self.assertEqual(large_file_id, file_info.id_) @@ -912,7 +938,7 @@ def test_upload_large_resume_wrong_part_size(self): part_size = self.simulator.MIN_PART_SIZE data = self._make_data(part_size * 3) large_file_id = self._start_large_file('file1') - self._upload_part(large_file_id, 1, data[:part_size + 1]) # one byte to much + self._upload_part(large_file_id, 1, data[: part_size + 1]) # one byte to much progress_listener = StubProgressListener() file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener) self.assertNotEqual(large_file_id, file_info.id_) @@ -959,8 +985,12 @@ def _upload_part(self, large_file_id, part_number, part_data): self.api_url, self.account_auth_token, large_file_id ) self.simulator.upload_part( - upload_info['uploadUrl'], upload_info['authorizationToken'], part_number, - len(part_data), hex_sha1_of_bytes(part_data), part_stream + upload_info['uploadUrl'], + upload_info['authorizationToken'], + part_number, + len(part_data), + hex_sha1_of_bytes(part_data), + part_stream, ) def _check_file_contents(self, file_name, expected_contents): @@ -988,12 +1018,14 @@ def test_create_remote(self): UploadSourceLocalFile(path), CopySource(f2_id, length=len(data), offset=0), ], - file_name='created_file' + file_name='created_file', ) self.assertIsInstance(created_file, FileVersionInfo) actual = ( - created_file.id_, created_file.file_name, created_file.size, - created_file.server_side_encryption + created_file.id_, + created_file.file_name, + created_file.size, + created_file.server_side_encryption, ) expected = ('9997', 'created_file', 33, SSE_NONE) self.assertEqual(expected, actual) @@ -1012,19 +1044,21 @@ def test_create_remote_encryption(self): CopySource(f2_id, length=len(data), offset=0, encryption=SSE_C_AES_2), ], file_name=f'created_file_{len(data)}', - encryption=SSE_C_AES + encryption=SSE_C_AES, ) self.assertIsInstance(created_file, FileVersionInfo) actual = ( - created_file.id_, created_file.file_name, created_file.size, - created_file.server_side_encryption + created_file.id_, + created_file.file_name, + created_file.size, + created_file.server_side_encryption, ) expected = ( mock.ANY, f'created_file_{len(data)}', mock.ANY, # FIXME: this should be equal to len(data) * 3, # but there is a problem in the simulator/test code somewhere - SSE_C_AES_NO_SECRET + SSE_C_AES_NO_SECRET, ) self.assertEqual(expected, actual) @@ -1034,7 +1068,7 @@ def _create_remote(self, sources, file_name, encryption=None): return self.bucket.create_file( [wi for wi in WriteIntent.wrap_sources_iterator(sources)], file_name=file_name, - encryption=encryption + encryption=encryption, ) @@ -1048,7 +1082,7 @@ def _create_remote(self, sources, file_name, encryption=None): return self.bucket.create_file_stream( [wi for wi in WriteIntent.wrap_sources_iterator(sources)], file_name=file_name, - encryption=encryption + encryption=encryption, ) @@ -1197,7 +1231,7 @@ def _check_local_file_contents(self, path, expected_contents): class EmptyFileDownloadScenarioMixin: - """ use with DownloadTests, but not for TestDownloadParallel as it does not like empty files """ + """use with DownloadTests, but not for TestDownloadParallel as it does not like empty files""" def test_download_by_name_empty_file(self): self.file_info = self.bucket.upload_bytes(b'', 'empty') diff --git a/test/unit/v0/test_copy_manager.py b/test/unit/v0/test_copy_manager.py index 082c6e98f..3e8a0d529 100644 --- a/test/unit/v0/test_copy_manager.py +++ b/test/unit/v0/test_copy_manager.py @@ -39,7 +39,11 @@ class TestCopyManager(TestBase): def test_establish_sse_c_replace(self): file_info = {'some_key': 'some_value'} content_type = 'text/plain' - metadata_directive, new_file_info, new_content_type = CopyManager.establish_sse_c_file_metadata( + ( + metadata_directive, + new_file_info, + new_content_type, + ) = CopyManager.establish_sse_c_file_metadata( MetadataDirectiveMode.REPLACE, destination_file_info=file_info, destination_content_type=content_type, @@ -50,17 +54,21 @@ def test_establish_sse_c_replace(self): ) self.assertEqual( ( - MetadataDirectiveMode.REPLACE, { - 'some_key': 'some_value', - SSE_C_KEY_ID_FILE_INFO_KEY_NAME: 'some-id' - }, content_type - ), (metadata_directive, new_file_info, new_content_type) + MetadataDirectiveMode.REPLACE, + {'some_key': 'some_value', SSE_C_KEY_ID_FILE_INFO_KEY_NAME: 'some-id'}, + content_type, + ), + (metadata_directive, new_file_info, new_content_type), ) def test_establish_sse_c_copy_no_enc(self): file_info = {} content_type = 'text/plain' - metadata_directive, new_file_info, new_content_type = CopyManager.establish_sse_c_file_metadata( + ( + metadata_directive, + new_file_info, + new_content_type, + ) = CopyManager.establish_sse_c_file_metadata( MetadataDirectiveMode.COPY, destination_file_info=file_info, destination_content_type=content_type, @@ -71,13 +79,17 @@ def test_establish_sse_c_copy_no_enc(self): ) self.assertEqual( (MetadataDirectiveMode.COPY, {}, content_type), - (metadata_directive, new_file_info, new_content_type) + (metadata_directive, new_file_info, new_content_type), ) def test_establish_sse_c_copy_b2(self): file_info = {} content_type = 'text/plain' - metadata_directive, new_file_info, new_content_type = CopyManager.establish_sse_c_file_metadata( + ( + metadata_directive, + new_file_info, + new_content_type, + ) = CopyManager.establish_sse_c_file_metadata( MetadataDirectiveMode.COPY, destination_file_info=file_info, destination_content_type=content_type, @@ -88,13 +100,17 @@ def test_establish_sse_c_copy_b2(self): ) self.assertEqual( (MetadataDirectiveMode.COPY, {}, content_type), - (metadata_directive, new_file_info, new_content_type) + (metadata_directive, new_file_info, new_content_type), ) def test_establish_sse_c_copy_same_key_id(self): file_info = None content_type = 'text/plain' - metadata_directive, new_file_info, new_content_type = CopyManager.establish_sse_c_file_metadata( + ( + metadata_directive, + new_file_info, + new_content_type, + ) = CopyManager.establish_sse_c_file_metadata( MetadataDirectiveMode.COPY, destination_file_info=file_info, destination_content_type=content_type, @@ -105,11 +121,15 @@ def test_establish_sse_c_copy_same_key_id(self): ) self.assertEqual( (MetadataDirectiveMode.COPY, None, content_type), - (metadata_directive, new_file_info, new_content_type) + (metadata_directive, new_file_info, new_content_type), ) def test_establish_sse_c_copy_sources_given(self): - metadata_directive, new_file_info, new_content_type = CopyManager.establish_sse_c_file_metadata( + ( + metadata_directive, + new_file_info, + new_content_type, + ) = CopyManager.establish_sse_c_file_metadata( MetadataDirectiveMode.COPY, destination_file_info=None, destination_content_type=None, @@ -117,25 +137,23 @@ def test_establish_sse_c_copy_sources_given(self): source_server_side_encryption=SSE_C_AES_2, source_file_info={ 'some_key': 'some_value', - SSE_C_KEY_ID_FILE_INFO_KEY_NAME: 'some-id-2' + SSE_C_KEY_ID_FILE_INFO_KEY_NAME: 'some-id-2', }, source_content_type='text/plain', ) self.assertEqual( ( - MetadataDirectiveMode.REPLACE, { - 'some_key': 'some_value', - SSE_C_KEY_ID_FILE_INFO_KEY_NAME: 'some-id' - }, 'text/plain' - ), (metadata_directive, new_file_info, new_content_type) + MetadataDirectiveMode.REPLACE, + {'some_key': 'some_value', SSE_C_KEY_ID_FILE_INFO_KEY_NAME: 'some-id'}, + 'text/plain', + ), + (metadata_directive, new_file_info, new_content_type), ) def test_establish_sse_c_copy_sources_unknown(self): for source_file_info, source_content_type in [ (None, None), - ({ - 'a': 'b' - }, None), + ({'a': 'b'}, None), (None, 'text/plain'), ]: with self.subTest( @@ -144,7 +162,7 @@ def test_establish_sse_c_copy_sources_unknown(self): with self.assertRaises( SSECKeyIdMismatchInCopy, 'attempting to copy file using MetadataDirectiveMode.COPY without providing source_file_info ' - 'and source_content_type for differing sse_c_key_ids: source="some-id-2", destination="some-id"' + 'and source_content_type for differing sse_c_key_ids: source="some-id-2", destination="some-id"', ): CopyManager.establish_sse_c_file_metadata( MetadataDirectiveMode.COPY, diff --git a/test/unit/v0/test_download_dest.py b/test/unit/v0/test_download_dest.py index f1d4013c2..da43a0784 100644 --- a/test/unit/v0/test_download_dest.py +++ b/test/unit/v0/test_download_dest.py @@ -25,7 +25,7 @@ class TestDownloadDestLocalFile(TestBase): expected_result = 'hello world' def _make_dest(self, temp_dir): - file_path = os.path.join(temp_dir, "test.txt") + file_path = os.path.join(temp_dir, 'test.txt') return DownloadDestLocalFile(file_path), file_path def test_write_and_set_mod_time(self): @@ -36,7 +36,7 @@ def test_write_and_set_mod_time(self): with tempfile.TemporaryDirectory() as temp_dir: download_dest, file_path = self._make_dest(temp_dir) with download_dest.make_file_context( - "file_id", "file_name", 100, "content_type", "sha1", {}, mod_time + 'file_id', 'file_name', 100, 'content_type', 'sha1', {}, mod_time ) as f: f.write(b'hello world') with open(file_path, 'rb') as f: @@ -51,7 +51,7 @@ def test_failed_write_deletes_partial_file(self): download_dest, file_path = self._make_dest(temp_dir) try: with download_dest.make_file_context( - "file_id", "file_name", 100, "content_type", "sha1", {}, 1500222333000 + 'file_id', 'file_name', 100, 'content_type', 'sha1', {}, 1500222333000 ) as f: f.write(b'hello world') raise Exception('test error') @@ -64,7 +64,7 @@ class TestPreSeekedDownloadDest(TestDownloadDestLocalFile): expected_result = '123hello world567890' def _make_dest(self, temp_dir): - file_path = os.path.join(temp_dir, "test.txt") + file_path = os.path.join(temp_dir, 'test.txt') with open(file_path, 'wb') as f: f.write(b'12345678901234567890') return PreSeekedDownloadDest(local_file_path=file_path, seek_target=3), file_path @@ -77,12 +77,12 @@ def test_write_and_set_mod_time_and_progress(self): """ mod_time = 1500222333000 with tempfile.TemporaryDirectory() as temp_dir: - file_path = os.path.join(temp_dir, "test.txt") + file_path = os.path.join(temp_dir, 'test.txt') download_local_file = DownloadDestLocalFile(file_path) progress_listener = ProgressListenerForTest() download_dest = DownloadDestProgressWrapper(download_local_file, progress_listener) with download_dest.make_file_context( - "file_id", "file_name", 100, "content_type", "sha1", {}, mod_time + 'file_id', 'file_name', 100, 'content_type', 'sha1', {}, mod_time ) as f: f.write(b'hello world\n') with open(file_path, 'rb') as f: diff --git a/test/unit/v0/test_file_metadata.py b/test/unit/v0/test_file_metadata.py index 2514aa3e2..0d4d79c9a 100644 --- a/test/unit/v0/test_file_metadata.py +++ b/test/unit/v0/test_file_metadata.py @@ -26,7 +26,7 @@ class TestFileMetadata(TestBase): 'content_length': '1', 'content_sha1': '4518012e1b365e504001dbc94120624f15b8bbd5', 'file_info': {}, - } # yapf: disable + } INFO_DICT = {snake_to_camel(k): v for k, v in KWARGS.items()} def test_verified_sha1(self): diff --git a/test/unit/v0/test_policy.py b/test/unit/v0/test_policy.py index eca47dd2a..7efa9710e 100644 --- a/test/unit/v0/test_policy.py +++ b/test/unit/v0/test_policy.py @@ -34,23 +34,26 @@ def test_no_source_one_old_version_hides(self): def test_old_hide_causes_delete(self): # A hide marker that is old gets deleted, as do the things after it. self.check_one_answer( - True, [(1, -5, 'upload'), (2, -10, 'hide'), (3, -20, 'upload')], - ['b2_delete(folder/a, 2, (hide marker))', 'b2_delete(folder/a, 3, (old version))'] + True, + [(1, -5, 'upload'), (2, -10, 'hide'), (3, -20, 'upload')], + ['b2_delete(folder/a, 2, (hide marker))', 'b2_delete(folder/a, 3, (old version))'], ) def test_old_upload_causes_delete(self): # An upload that is old stays if there is a source file, but things # behind it go away. self.check_one_answer( - True, [(1, -5, 'upload'), (2, -10, 'upload'), (3, -20, 'upload')], - ['b2_delete(folder/a, 3, (old version))'] + True, + [(1, -5, 'upload'), (2, -10, 'upload'), (3, -20, 'upload')], + ['b2_delete(folder/a, 3, (old version))'], ) def test_out_of_order_dates(self): # The one at date -3 will get deleted because the one before it is old. self.check_one_answer( - True, [(1, -5, 'upload'), (2, -10, 'upload'), (3, -3, 'upload')], - ['b2_delete(folder/a, 3, (old version))'] + True, + [(1, -5, 'upload'), (2, -10, 'upload'), (3, -3, 'upload')], + ['b2_delete(folder/a, 3, (old version))'], ) def check_one_answer(self, has_source, id_relative_date_action_list, expected_actions): @@ -65,11 +68,14 @@ def check_one_answer(self, has_source, id_relative_date_action_list, expected_ac file_info={}, content_type='text/plain', content_sha1='content_sha1', - ) for (id_, relative_date, action) in id_relative_date_action_list + ) + for (id_, relative_date, action) in id_relative_date_action_list ] - dest_file = B2SyncPath( - 'a', selected_version=dest_file_versions[0], all_versions=dest_file_versions - ) if dest_file_versions else None + dest_file = ( + B2SyncPath('a', selected_version=dest_file_versions[0], all_versions=dest_file_versions) + if dest_file_versions + else None + ) bucket = MagicMock() api = MagicMock() api.get_bucket_by_name.return_value = bucket diff --git a/test/unit/v0/test_raw_api.py b/test/unit/v0/test_raw_api.py index 2b46bee0b..e41f8d95d 100644 --- a/test/unit/v0/test_raw_api.py +++ b/test/unit/v0/test_raw_api.py @@ -42,7 +42,7 @@ def _should_be_ok(self, filename): :param filename: unicode (or str) that follows the rules """ - print(f"Filename \"{filename}\" should be OK") + print(f'Filename "{filename}" should be OK') self.assertTrue(self.raw_api.check_b2_filename(filename) is None) def _should_raise(self, filename, exception_message): @@ -51,9 +51,7 @@ def _should_raise(self, filename, exception_message): :param filename: unicode (or str) that doesn't follow the rules :param exception_message: regexp that matches the exception's detailed message """ - print( - f"Filename \"{filename}\" should raise UnusableFileName(\".*{exception_message}.*\")." - ) + print(f'Filename "{filename}" should raise UnusableFileName(".*{exception_message}.*").') with self.assertRaisesRegex(UnusableFileName, exception_message): self.raw_api.check_b2_filename(filename) @@ -68,7 +66,7 @@ def test_b2_filename_checker(self): - File names cannot start with "/", end with "/", or contain "//". - Maximum of 250 bytes of UTF-8 in each segment (part between slashes) of a file name. """ - print("test b2 filename rules") + print('test b2 filename rules') # Examples from doc: self._should_be_ok('Kitten Videos') @@ -79,28 +77,28 @@ def test_b2_filename_checker(self): s_1024 = 4 * (250 * 'x' + '/') + 20 * 'y' self._should_be_ok(s_1024) # 1025 is too long. - self._should_raise(s_1024 + 'x', "too long") + self._should_raise(s_1024 + 'x', 'too long') # 1024 bytes with two byte characters should also work. s_1024_two_byte = 4 * (125 * TWO_BYTE_UNICHR + '/') + 20 * 'y' self._should_be_ok(s_1024_two_byte) # But 1025 bytes is too long. - self._should_raise(s_1024_two_byte + 'x', "too long") + self._should_raise(s_1024_two_byte + 'x', 'too long') # Names with unicode values < 32, and DEL aren't allowed. - self._should_raise('hey' + CHAR_UNDER_32, "contains code.*less than 32") + self._should_raise('hey' + CHAR_UNDER_32, 'contains code.*less than 32') # Unicode in the filename shouldn't break the exception message. - self._should_raise(TWO_BYTE_UNICHR + CHAR_UNDER_32, "contains code.*less than 32") - self._should_raise(DEL_CHAR, "DEL.*not allowed") + self._should_raise(TWO_BYTE_UNICHR + CHAR_UNDER_32, 'contains code.*less than 32') + self._should_raise(DEL_CHAR, 'DEL.*not allowed') # Names can't start or end with '/' or contain '//' - self._should_raise('/hey', "not start.*/") - self._should_raise('hey/', "not .*end.*/") - self._should_raise('not//allowed', "contain.*//") + self._should_raise('/hey', 'not start.*/') + self._should_raise('hey/', 'not .*end.*/') + self._should_raise('not//allowed', 'contain.*//') # Reject segments longer than 250 bytes - self._should_raise('foo/' + 251 * 'x', "segment too long") + self._should_raise('foo/' + 251 * 'x', 'segment too long') # So a segment of 125 two-byte chars plus one should also fail. - self._should_raise('foo/' + 125 * TWO_BYTE_UNICHR + 'x', "segment too long") + self._should_raise('foo/' + 125 * TWO_BYTE_UNICHR + 'x', 'segment too long') class BucketTestBase: @@ -123,13 +121,15 @@ def test_assertion_raises(self): self.raw_api.update_bucket('test', 'account_auth_token', 'account_id', 'bucket_id') @pytest.mark.parametrize( - 'bucket_type,bucket_info,default_retention', ( + 'bucket_type,bucket_info,default_retention', + ( (None, {}, None), ( - 'allPublic', None, - BucketRetentionSetting(RetentionMode.COMPLIANCE, RetentionPeriod(years=1)) + 'allPublic', + None, + BucketRetentionSetting(RetentionMode.COMPLIANCE, RetentionPeriod(years=1)), ), - ) + ), ) def test_assertion_not_raises(self, bucket_type, bucket_info, default_retention): self.raw_api.update_bucket( @@ -143,14 +143,17 @@ def test_assertion_not_raises(self, bucket_type, bucket_info, default_retention) ) @pytest.mark.parametrize( - 'encryption_setting,', ( + 'encryption_setting,', + ( EncryptionSetting( mode=EncryptionMode.SSE_C, algorithm=EncryptionAlgorithm.AES256, - key=EncryptionKey(b'key', 'key-id') + key=EncryptionKey(b'key', 'key-id'), ), - EncryptionSetting(mode=EncryptionMode.UNKNOWN,), - ) + EncryptionSetting( + mode=EncryptionMode.UNKNOWN, + ), + ), ) def test_update_bucket_wrong_encryption(self, encryption_setting): with pytest.raises(WrongEncryptionModeForBucketDefault): @@ -168,17 +171,19 @@ class TestCreateBucket(BucketTestBase): """Test creating bucket.""" @pytest.mark.parametrize( - 'encryption_setting,', ( + 'encryption_setting,', + ( EncryptionSetting( mode=EncryptionMode.SSE_C, algorithm=EncryptionAlgorithm.AES256, - key=EncryptionKey(b'key', 'key-id') + key=EncryptionKey(b'key', 'key-id'), ), - EncryptionSetting(mode=EncryptionMode.UNKNOWN,), - ) + EncryptionSetting( + mode=EncryptionMode.UNKNOWN, + ), + ), ) def test_create_bucket_wrong_encryption(self, encryption_setting): - with pytest.raises(WrongEncryptionModeForBucketDefault): self.raw_api.create_bucket( 'test', diff --git a/test/unit/v0/test_session.py b/test/unit/v0/test_session.py index 7ea786d6c..505982ede 100644 --- a/test/unit/v0/test_session.py +++ b/test/unit/v0/test_session.py @@ -84,6 +84,6 @@ def test_app_key_info_all_info(self): self.raw_api.get_file_info_by_id.side_effect = Unauthorized('no_go', 'code') with self.assertRaisesRegex( Unauthorized, - r"no_go for application key with capabilities 'readFiles', restricted to bucket 'my-bucket', restricted to files that start with 'prefix/' \(code\)" + r"no_go for application key with capabilities 'readFiles', restricted to bucket 'my-bucket', restricted to files that start with 'prefix/' \(code\)", ): self.session.get_file_info_by_id(None) diff --git a/test/unit/v0/test_sync.py b/test/unit/v0/test_sync.py index 273fccdf2..4ec679284 100644 --- a/test/unit/v0/test_sync.py +++ b/test/unit/v0/test_sync.py @@ -92,7 +92,7 @@ def prepare_folder( prepare_files=True, broken_symlink=False, invalid_permissions=False, - use_file_versions_info=False + use_file_versions_info=False, ): raise NotImplementedError @@ -286,7 +286,7 @@ def prepare_folder( prepare_files=True, broken_symlink=False, invalid_permissions=False, - use_file_versions_info=False + use_file_versions_info=False, ): assert not (broken_symlink and invalid_permissions) @@ -300,7 +300,7 @@ def prepare_folder( if broken_symlink: os.symlink( os.path.join(self.root_dir, 'non_existant_file'), - os.path.join(self.root_dir, 'bad_symlink') + os.path.join(self.root_dir, 'bad_symlink'), ) elif invalid_permissions: os.chmod(os.path.join(self.root_dir, self.NAMES[0]), 0) @@ -350,8 +350,9 @@ def test_invalid_permissions(self): def test_syncable_paths(self): syncable_paths = ( - ('test.txt', 'test.txt'), ('./a/test.txt', 'a/test.txt'), - ('./a/../test.txt', 'test.txt') + ('test.txt', 'test.txt'), + ('./a/test.txt', 'a/test.txt'), + ('./a/../test.txt', 'test.txt'), ) folder = self.prepare_folder(prepare_files=False) @@ -372,38 +373,39 @@ class TestB2Folder(TestFolder): __test__ = True FILE_VERSION_INFOS = { - os.path.join('inner', 'a.txt'): - [ - ( - FileVersionInfo( - 'a2', 'inner/a.txt', 200, 'text/plain', 'sha1', {}, 2000, 'upload' - ), '' - ), - ( - FileVersionInfo( - 'a1', 'inner/a.txt', 100, 'text/plain', 'sha1', {}, 1000, 'upload' - ), '' - ) - ], - os.path.join('inner', 'b.txt'): - [ - ( - FileVersionInfo( - 'b2', 'inner/b.txt', 200, 'text/plain', 'sha1', {}, 1999, 'upload' - ), '' - ), - ( - FileVersionInfo( - 'bs', 'inner/b.txt', 150, 'text/plain', 'sha1', {}, 1500, 'start' - ), '' + os.path.join('inner', 'a.txt'): [ + ( + FileVersionInfo('a2', 'inner/a.txt', 200, 'text/plain', 'sha1', {}, 2000, 'upload'), + '', + ), + ( + FileVersionInfo('a1', 'inner/a.txt', 100, 'text/plain', 'sha1', {}, 1000, 'upload'), + '', + ), + ], + os.path.join('inner', 'b.txt'): [ + ( + FileVersionInfo('b2', 'inner/b.txt', 200, 'text/plain', 'sha1', {}, 1999, 'upload'), + '', + ), + ( + FileVersionInfo('bs', 'inner/b.txt', 150, 'text/plain', 'sha1', {}, 1500, 'start'), + '', + ), + ( + FileVersionInfo( + 'b1', + 'inner/b.txt', + 100, + 'text/plain', + 'sha1', + {'src_last_modified_millis': 1001}, + 6666, + 'upload', ), - ( - FileVersionInfo( - 'b1', 'inner/b.txt', 100, 'text/plain', 'sha1', - {'src_last_modified_millis': 1001}, 6666, 'upload' - ), '' - ) - ] + '', + ), + ], } def setUp(self): @@ -418,7 +420,7 @@ def prepare_folder( prepare_files=True, broken_symlink=False, invalid_permissions=False, - use_file_versions_info=False + use_file_versions_info=False, ): if prepare_files: for relative_path in self.NAMES: @@ -437,9 +439,16 @@ def prepare_file(self, relative_path, use_file_versions_info=False): self.bucket.ls.return_value.append( ( FileVersionInfo( - relative_path, relative_path, 100, 'text/plain', 'sha1', {}, - self.MOD_TIMES[relative_path], 'upload' - ), self.root_dir + relative_path, + relative_path, + 100, + 'text/plain', + 'sha1', + {}, + self.MOD_TIMES[relative_path], + 'upload', + ), + self.root_dir, ) ) else: @@ -447,7 +456,8 @@ def prepare_file(self, relative_path, use_file_versions_info=False): ( FileVersionInfo( relative_path, relative_path, 100, 'text/plain', 'sha1', {}, TODAY, 'upload' - ), self.root_dir + ), + self.root_dir, ) ) @@ -462,11 +472,13 @@ def test_multiple_versions(self): self.assertEqual( [ "B2Path(inner/a.txt, [('a2', 2000, 'upload'), ('a1', 1000, 'upload')])", - "B2Path(inner/b.txt, [('b2', 1999, 'upload'), ('b1', 1001, 'upload')])" - ], [ - str(f) for f in folder.all_files(self.reporter) + "B2Path(inner/b.txt, [('b2', 1999, 'upload'), ('b1', 1001, 'upload')])", + ], + [ + str(f) + for f in folder.all_files(self.reporter) if f.relative_path in ('inner/a.txt', 'inner/b.txt') - ] + ], ) def test_exclude_modified_multiple_versions(self): @@ -475,10 +487,12 @@ def test_exclude_modified_multiple_versions(self): ) folder = self.prepare_folder(use_file_versions_info=True) self.assertEqual( - ["B2Path(inner/b.txt, [('b2', 1999, 'upload'), ('b1', 1001, 'upload')])"], [ - str(f) for f in folder.all_files(self.reporter, policies_manager=polices_manager) + ["B2Path(inner/b.txt, [('b2', 1999, 'upload'), ('b1', 1001, 'upload')])"], + [ + str(f) + for f in folder.all_files(self.reporter, policies_manager=polices_manager) if f.relative_path in ('inner/a.txt', 'inner/b.txt') - ] + ], ) def test_exclude_modified_all_versions(self): @@ -546,7 +560,7 @@ def test_unsyncable_filenames(self): r'a\\.\\b', ] - if platform.system() == "Windows": + if platform.system() == 'Windows': filenames_to_test.extend(self.NOT_SYNCD_ON_WINDOWS) for filename in filenames_to_test: @@ -587,7 +601,7 @@ def test_syncable_filenames(self): ] # filenames not permitted on Windows *should* be allowed on Linux - if platform.system() != "Windows": + if platform.system() != 'Windows': filenames_to_test.extend(self.NOT_SYNCD_ON_WINDOWS) for filename in filenames_to_test: @@ -649,8 +663,9 @@ def file_versions_from_file_tuples(cls, name, mod_times, size=10): file_info={'in_b2': 'yes'}, content_type='text/plain', content_sha1='content_sha1', - ) for mod_time in mod_times - ] # yapf disable + ) + for mod_time in mod_times + ] @classmethod def sync_path_from_file_tuple(cls, name, mod_times, size=10): @@ -695,7 +710,7 @@ def _check_one(self, expected, to_parse): class TestFolderExceptions: """There is an exact copy of this class in unit/v1/test_sync.py - TODO: leave only one when migrating tests to - sync-like structure. + sync-like structure. """ @pytest.mark.parametrize( @@ -739,7 +754,7 @@ def test_ensure_present_unable_to_create(self, exception, msg): pytest.param( CommandError, 'Directory .* is empty. Use --allowEmptySource to sync anyway.', - marks=pytest.mark.apiver(to_ver=1) + marks=pytest.mark.apiver(to_ver=1), ), ], ) @@ -759,7 +774,6 @@ def test_ensure_non_empty(self, exception, msg): ], ) def test_double_slash_not_allowed(self, exception, msg): - with pytest.raises(exception, match=msg): parse_sync_folder('b2://a//b', MagicMock()) @@ -771,18 +785,18 @@ def test_empty(self): self.assertEqual([], list(zip_folders(folder_a, folder_b, self.reporter))) def test_one_empty(self): - file_a1 = LocalSyncPath("a.txt", "a.txt", 100, 10) + file_a1 = LocalSyncPath('a.txt', 'a.txt', 100, 10) folder_a = FakeLocalFolder([file_a1]) folder_b = FakeB2Folder([]) self.assertEqual([(file_a1, None)], list(zip_folders(folder_a, folder_b, self.reporter))) def test_two(self): - file_a1 = ("a.txt", 100, 10) - file_a2 = ("b.txt", 100, 10) - file_a3 = ("d.txt", 100, 10) - file_a4 = ("f.txt", 100, 10) - file_b1 = ("b.txt", 200, 10) - file_b2 = ("e.txt", 200, 10) + file_a1 = ('a.txt', 100, 10) + file_a2 = ('b.txt', 100, 10) + file_a3 = ('d.txt', 100, 10) + file_a4 = ('f.txt', 100, 10) + file_b1 = ('b.txt', 200, 10) + file_b2 = ('e.txt', 200, 10) folder_a = FakeB2Folder([file_a1, file_a2, file_a3, file_a4]) folder_b = FakeB2Folder([file_b1, file_b2]) self.assertEqual( @@ -790,11 +804,13 @@ def test_two(self): (FakeB2Folder.sync_path_from_file_tuple(*file_a1), None), ( FakeB2Folder.sync_path_from_file_tuple(*file_a2), - FakeB2Folder.sync_path_from_file_tuple(*file_b1) - ), (FakeB2Folder.sync_path_from_file_tuple(*file_a3), None), + FakeB2Folder.sync_path_from_file_tuple(*file_b1), + ), + (FakeB2Folder.sync_path_from_file_tuple(*file_a3), None), (None, FakeB2Folder.sync_path_from_file_tuple(*file_b2)), - (FakeB2Folder.sync_path_from_file_tuple(*file_a4), None) - ], list(zip_folders(folder_a, folder_b, self.reporter)) + (FakeB2Folder.sync_path_from_file_tuple(*file_a4), None), + ], + list(zip_folders(folder_a, folder_b, self.reporter)), ) def test_pass_reporter_to_folder(self): @@ -885,7 +901,7 @@ def _check_folder_sync(self, expected_actions, fakeargs): exclude_dir_regexes=fakeargs.excludeDirRegex, exclude_file_regexes=fakeargs.excludeRegex, include_file_regexes=fakeargs.includeRegex, - exclude_all_symlinks=fakeargs.excludeAllSymlinks + exclude_all_symlinks=fakeargs.excludeAllSymlinks, ) actions = list( make_folder_sync_actions( @@ -902,7 +918,7 @@ def test_file_exclusions_with_delete(self): 'b2_upload(/dir/d/d.txt, folder/d/d.txt, 100)', 'b2_upload(/dir/e/e.incl, folder/e/e.incl, 100)', ] - self._check_folder_sync(expected_actions, FakeArgs(delete=True, excludeRegex=["b\\.txt"])) + self._check_folder_sync(expected_actions, FakeArgs(delete=True, excludeRegex=['b\\.txt'])) def test_file_exclusions_inclusions_with_delete(self): expected_actions = [ @@ -913,7 +929,7 @@ def test_file_exclusions_inclusions_with_delete(self): 'b2_upload(/dir/e/e.incl, folder/e/e.incl, 100)', 'b2_upload(/dir/b.txt.incl, folder/b.txt.incl, 100)', ] - fakeargs = FakeArgs(delete=True, excludeRegex=["b\\.txt"], includeRegex=[".*\\.incl"]) + fakeargs = FakeArgs(delete=True, excludeRegex=['b\\.txt'], includeRegex=['.*\\.incl']) self._check_folder_sync(expected_actions, fakeargs) diff --git a/test/unit/v0/test_utils.py b/test/unit/v0/test_utils.py index 6c98332a1..a410c4d5b 100644 --- a/test/unit/v0/test_utils.py +++ b/test/unit/v0/test_utils.py @@ -20,216 +20,52 @@ # These are from the B2 Docs (https://www.backblaze.com/b2/docs/string_encoding.html) ENCODING_TEST_CASES = [ - { - 'fullyEncoded': '%20', - 'minimallyEncoded': '+', - 'string': ' ' - }, - { - 'fullyEncoded': '%21', - 'minimallyEncoded': '!', - 'string': '!' - }, - { - 'fullyEncoded': '%22', - 'minimallyEncoded': '%22', - 'string': '"' - }, - { - 'fullyEncoded': '%23', - 'minimallyEncoded': '%23', - 'string': '#' - }, - { - 'fullyEncoded': '%24', - 'minimallyEncoded': '$', - 'string': '$' - }, - { - 'fullyEncoded': '%25', - 'minimallyEncoded': '%25', - 'string': '%' - }, - { - 'fullyEncoded': '%26', - 'minimallyEncoded': '%26', - 'string': '&' - }, - { - 'fullyEncoded': '%27', - 'minimallyEncoded': "'", - 'string': "'" - }, - { - 'fullyEncoded': '%28', - 'minimallyEncoded': '(', - 'string': '(' - }, - { - 'fullyEncoded': '%29', - 'minimallyEncoded': ')', - 'string': ')' - }, - { - 'fullyEncoded': '%2A', - 'minimallyEncoded': '*', - 'string': '*' - }, - { - 'fullyEncoded': '%2B', - 'minimallyEncoded': '%2B', - 'string': '+' - }, - { - 'fullyEncoded': '%2C', - 'minimallyEncoded': '%2C', - 'string': ',' - }, - { - 'fullyEncoded': '%2D', - 'minimallyEncoded': '-', - 'string': '-' - }, - { - 'fullyEncoded': '%2E', - 'minimallyEncoded': '.', - 'string': '.' - }, - { - 'fullyEncoded': '/', - 'minimallyEncoded': '/', - 'string': '/' - }, - { - 'fullyEncoded': '%30', - 'minimallyEncoded': '0', - 'string': '0' - }, - { - 'fullyEncoded': '%39', - 'minimallyEncoded': '9', - 'string': '9' - }, - { - 'fullyEncoded': '%3A', - 'minimallyEncoded': ':', - 'string': ':' - }, - { - 'fullyEncoded': '%3B', - 'minimallyEncoded': ';', - 'string': ';' - }, - { - 'fullyEncoded': '%3C', - 'minimallyEncoded': '%3C', - 'string': '<' - }, - { - 'fullyEncoded': '%3D', - 'minimallyEncoded': '=', - 'string': '=' - }, - { - 'fullyEncoded': '%3E', - 'minimallyEncoded': '%3E', - 'string': '>' - }, - { - 'fullyEncoded': '%3F', - 'minimallyEncoded': '%3F', - 'string': '?' - }, - { - 'fullyEncoded': '%40', - 'minimallyEncoded': '@', - 'string': '@' - }, - { - 'fullyEncoded': '%41', - 'minimallyEncoded': 'A', - 'string': 'A' - }, - { - 'fullyEncoded': '%5A', - 'minimallyEncoded': 'Z', - 'string': 'Z' - }, - { - 'fullyEncoded': '%5B', - 'minimallyEncoded': '%5B', - 'string': '[' - }, - { - 'fullyEncoded': '%5C', - 'minimallyEncoded': '%5C', - 'string': '\\' - }, - { - 'fullyEncoded': '%5D', - 'minimallyEncoded': '%5D', - 'string': ']' - }, - { - 'fullyEncoded': '%5E', - 'minimallyEncoded': '%5E', - 'string': '^' - }, - { - 'fullyEncoded': '%5F', - 'minimallyEncoded': '_', - 'string': '_' - }, - { - 'fullyEncoded': '%60', - 'minimallyEncoded': '%60', - 'string': '`' - }, - { - 'fullyEncoded': '%61', - 'minimallyEncoded': 'a', - 'string': 'a' - }, - { - 'fullyEncoded': '%7A', - 'minimallyEncoded': 'z', - 'string': 'z' - }, - { - 'fullyEncoded': '%7B', - 'minimallyEncoded': '%7B', - 'string': '{' - }, - { - 'fullyEncoded': '%7C', - 'minimallyEncoded': '%7C', - 'string': '|' - }, - { - 'fullyEncoded': '%7D', - 'minimallyEncoded': '%7D', - 'string': '}' - }, - { - 'fullyEncoded': '%7E', - 'minimallyEncoded': '~', - 'string': '~' - }, - { - 'fullyEncoded': '%7F', - 'minimallyEncoded': '%7F', - 'string': '\u007f' - }, + {'fullyEncoded': '%20', 'minimallyEncoded': '+', 'string': ' '}, + {'fullyEncoded': '%21', 'minimallyEncoded': '!', 'string': '!'}, + {'fullyEncoded': '%22', 'minimallyEncoded': '%22', 'string': '"'}, + {'fullyEncoded': '%23', 'minimallyEncoded': '%23', 'string': '#'}, + {'fullyEncoded': '%24', 'minimallyEncoded': '$', 'string': '$'}, + {'fullyEncoded': '%25', 'minimallyEncoded': '%25', 'string': '%'}, + {'fullyEncoded': '%26', 'minimallyEncoded': '%26', 'string': '&'}, + {'fullyEncoded': '%27', 'minimallyEncoded': "'", 'string': "'"}, + {'fullyEncoded': '%28', 'minimallyEncoded': '(', 'string': '('}, + {'fullyEncoded': '%29', 'minimallyEncoded': ')', 'string': ')'}, + {'fullyEncoded': '%2A', 'minimallyEncoded': '*', 'string': '*'}, + {'fullyEncoded': '%2B', 'minimallyEncoded': '%2B', 'string': '+'}, + {'fullyEncoded': '%2C', 'minimallyEncoded': '%2C', 'string': ','}, + {'fullyEncoded': '%2D', 'minimallyEncoded': '-', 'string': '-'}, + {'fullyEncoded': '%2E', 'minimallyEncoded': '.', 'string': '.'}, + {'fullyEncoded': '/', 'minimallyEncoded': '/', 'string': '/'}, + {'fullyEncoded': '%30', 'minimallyEncoded': '0', 'string': '0'}, + {'fullyEncoded': '%39', 'minimallyEncoded': '9', 'string': '9'}, + {'fullyEncoded': '%3A', 'minimallyEncoded': ':', 'string': ':'}, + {'fullyEncoded': '%3B', 'minimallyEncoded': ';', 'string': ';'}, + {'fullyEncoded': '%3C', 'minimallyEncoded': '%3C', 'string': '<'}, + {'fullyEncoded': '%3D', 'minimallyEncoded': '=', 'string': '='}, + {'fullyEncoded': '%3E', 'minimallyEncoded': '%3E', 'string': '>'}, + {'fullyEncoded': '%3F', 'minimallyEncoded': '%3F', 'string': '?'}, + {'fullyEncoded': '%40', 'minimallyEncoded': '@', 'string': '@'}, + {'fullyEncoded': '%41', 'minimallyEncoded': 'A', 'string': 'A'}, + {'fullyEncoded': '%5A', 'minimallyEncoded': 'Z', 'string': 'Z'}, + {'fullyEncoded': '%5B', 'minimallyEncoded': '%5B', 'string': '['}, + {'fullyEncoded': '%5C', 'minimallyEncoded': '%5C', 'string': '\\'}, + {'fullyEncoded': '%5D', 'minimallyEncoded': '%5D', 'string': ']'}, + {'fullyEncoded': '%5E', 'minimallyEncoded': '%5E', 'string': '^'}, + {'fullyEncoded': '%5F', 'minimallyEncoded': '_', 'string': '_'}, + {'fullyEncoded': '%60', 'minimallyEncoded': '%60', 'string': '`'}, + {'fullyEncoded': '%61', 'minimallyEncoded': 'a', 'string': 'a'}, + {'fullyEncoded': '%7A', 'minimallyEncoded': 'z', 'string': 'z'}, + {'fullyEncoded': '%7B', 'minimallyEncoded': '%7B', 'string': '{'}, + {'fullyEncoded': '%7C', 'minimallyEncoded': '%7C', 'string': '|'}, + {'fullyEncoded': '%7D', 'minimallyEncoded': '%7D', 'string': '}'}, + {'fullyEncoded': '%7E', 'minimallyEncoded': '~', 'string': '~'}, + {'fullyEncoded': '%7F', 'minimallyEncoded': '%7F', 'string': '\u007f'}, { 'fullyEncoded': '%E8%87%AA%E7%94%B1', 'minimallyEncoded': '%E8%87%AA%E7%94%B1', - 'string': '\u81ea\u7531' - }, - { - 'fullyEncoded': '%F0%90%90%80', - 'minimallyEncoded': '%F0%90%90%80', - 'string': '\U00010400' + 'string': '\u81ea\u7531', }, + {'fullyEncoded': '%F0%90%90%80', 'minimallyEncoded': '%F0%90%90%80', 'string': '\U00010400'}, ] diff --git a/test/unit/v0/test_version_utils.py b/test/unit/v0/test_version_utils.py index 39cd2591d..2262d8ee4 100644 --- a/test/unit/v0/test_version_utils.py +++ b/test/unit/v0/test_version_utils.py @@ -21,12 +21,12 @@ class TestRenameArgument(TestBase): def test_warning(self): @rename_argument('aaa', 'bbb', '0.1.0', '0.2.0', current_version=self.VERSION) def easy(bbb): - """ easy docstring """ + """easy docstring""" return bbb # check that warning is not emitted too early with warnings.catch_warnings(record=True) as w: - warnings.simplefilter("always") + warnings.simplefilter('always') assert easy(5) == 5 assert easy(bbb=5) == 5 assert easy.__name__ == 'easy' @@ -34,21 +34,19 @@ def easy(bbb): assert len(w) == 0 with warnings.catch_warnings(record=True) as w: - warnings.simplefilter("always") + warnings.simplefilter('always') assert easy(aaa=5) == 5 assert len(w) == 1 assert issubclass(w[-1].category, DeprecationWarning) - assert str( - w[-1].message - ) == "'aaa' is a deprecated argument for 'easy' function/method - it was renamed to 'bbb' in version 0.1.0. Support for the old name is going to be dropped in 0.2.0.", str( - w[-1].message - ) + assert ( + str(w[-1].message) + == "'aaa' is a deprecated argument for 'easy' function/method - it was renamed to 'bbb' in version 0.1.0. Support for the old name is going to be dropped in 0.2.0." + ), str(w[-1].message) def test_outdated_replacement(self): with self.assertRaises( AssertionError, - msg= - f"rename_argument decorator is still used in version {self.VERSION} when old argument name 'aaa' was scheduled to be dropped in 0.1.2. It is time to remove the mapping.", + msg=f"rename_argument decorator is still used in version {self.VERSION} when old argument name 'aaa' was scheduled to be dropped in 0.1.2. It is time to remove the mapping.", ): @rename_argument('aaa', 'bbb', '0.1.0', '0.1.2', current_version=self.VERSION) @@ -60,8 +58,7 @@ def late(bbb): def test_future_replacement(self): with self.assertRaises( AssertionError, - msg= - "rename_argument decorator indicates that the replacement of argument 'aaa' should take place in the future version 0.2.0, while the current version is 0.2.2. It looks like should be _discouraged_ at this point and not _deprecated_ yet. Consider using 'discourage_argument' decorator instead." + msg="rename_argument decorator indicates that the replacement of argument 'aaa' should take place in the future version 0.2.0, while the current version is 0.2.2. It looks like should be _discouraged_ at this point and not _deprecated_ yet. Consider using 'discourage_argument' decorator instead.", ): @rename_argument('aaa', 'bbb', '0.2.0', '0.2.2', current_version=self.VERSION) @@ -73,8 +70,7 @@ def early(bbb): def test_inverted_versions(self): with self.assertRaises( AssertionError, - msg= - "rename_argument decorator is set to start renaming argument 'aaa' starting at version 0.2.2 and finishing in 0.2.0. It needs to start at a lower version and finish at a higher version." + msg="rename_argument decorator is set to start renaming argument 'aaa' starting at version 0.2.2 and finishing in 0.2.0. It needs to start at a lower version and finish at a higher version.", ): @rename_argument('aaa', 'bbb', '0.2.2', '0.2.0', current_version=self.VERSION) @@ -98,12 +94,11 @@ def old(bbb): return bbb with warnings.catch_warnings(record=True) as w: - warnings.simplefilter("always") + warnings.simplefilter('always') assert old(5) == 5 assert len(w) == 1 assert issubclass(w[-1].category, DeprecationWarning) - assert str( - w[-1].message - ) == "'old' is deprecated since version 0.1.0 - it was moved to 'new', please switch to use that. The proxy for the old name is going to be removed in 0.2.0.", str( - w[-1].message - ) + assert ( + str(w[-1].message) + == "'old' is deprecated since version 0.1.0 - it was moved to 'new', please switch to use that. The proxy for the old name is going to be removed in 0.2.0." + ), str(w[-1].message) diff --git a/test/unit/v1/test_copy_manager.py b/test/unit/v1/test_copy_manager.py index c6ee6d8c5..d864dd12b 100644 --- a/test/unit/v1/test_copy_manager.py +++ b/test/unit/v1/test_copy_manager.py @@ -39,7 +39,11 @@ class TestCopyManager(TestBase): def test_establish_sse_c_replace(self): file_info = {'some_key': 'some_value'} content_type = 'text/plain' - metadata_directive, new_file_info, new_content_type = CopyManager.establish_sse_c_file_metadata( + ( + metadata_directive, + new_file_info, + new_content_type, + ) = CopyManager.establish_sse_c_file_metadata( MetadataDirectiveMode.REPLACE, destination_file_info=file_info, destination_content_type=content_type, @@ -50,17 +54,21 @@ def test_establish_sse_c_replace(self): ) self.assertEqual( ( - MetadataDirectiveMode.REPLACE, { - 'some_key': 'some_value', - SSE_C_KEY_ID_FILE_INFO_KEY_NAME: 'some-id' - }, content_type - ), (metadata_directive, new_file_info, new_content_type) + MetadataDirectiveMode.REPLACE, + {'some_key': 'some_value', SSE_C_KEY_ID_FILE_INFO_KEY_NAME: 'some-id'}, + content_type, + ), + (metadata_directive, new_file_info, new_content_type), ) def test_establish_sse_c_copy_no_enc(self): file_info = {} content_type = 'text/plain' - metadata_directive, new_file_info, new_content_type = CopyManager.establish_sse_c_file_metadata( + ( + metadata_directive, + new_file_info, + new_content_type, + ) = CopyManager.establish_sse_c_file_metadata( MetadataDirectiveMode.COPY, destination_file_info=file_info, destination_content_type=content_type, @@ -71,13 +79,17 @@ def test_establish_sse_c_copy_no_enc(self): ) self.assertEqual( (MetadataDirectiveMode.COPY, {}, content_type), - (metadata_directive, new_file_info, new_content_type) + (metadata_directive, new_file_info, new_content_type), ) def test_establish_sse_c_copy_b2(self): file_info = {} content_type = 'text/plain' - metadata_directive, new_file_info, new_content_type = CopyManager.establish_sse_c_file_metadata( + ( + metadata_directive, + new_file_info, + new_content_type, + ) = CopyManager.establish_sse_c_file_metadata( MetadataDirectiveMode.COPY, destination_file_info=file_info, destination_content_type=content_type, @@ -88,13 +100,17 @@ def test_establish_sse_c_copy_b2(self): ) self.assertEqual( (MetadataDirectiveMode.COPY, {}, content_type), - (metadata_directive, new_file_info, new_content_type) + (metadata_directive, new_file_info, new_content_type), ) def test_establish_sse_c_copy_same_key_id(self): file_info = None content_type = 'text/plain' - metadata_directive, new_file_info, new_content_type = CopyManager.establish_sse_c_file_metadata( + ( + metadata_directive, + new_file_info, + new_content_type, + ) = CopyManager.establish_sse_c_file_metadata( MetadataDirectiveMode.COPY, destination_file_info=file_info, destination_content_type=content_type, @@ -105,11 +121,15 @@ def test_establish_sse_c_copy_same_key_id(self): ) self.assertEqual( (MetadataDirectiveMode.COPY, None, content_type), - (metadata_directive, new_file_info, new_content_type) + (metadata_directive, new_file_info, new_content_type), ) def test_establish_sse_c_copy_sources_given(self): - metadata_directive, new_file_info, new_content_type = CopyManager.establish_sse_c_file_metadata( + ( + metadata_directive, + new_file_info, + new_content_type, + ) = CopyManager.establish_sse_c_file_metadata( MetadataDirectiveMode.COPY, destination_file_info=None, destination_content_type=None, @@ -117,25 +137,23 @@ def test_establish_sse_c_copy_sources_given(self): source_server_side_encryption=SSE_C_AES_2, source_file_info={ 'some_key': 'some_value', - SSE_C_KEY_ID_FILE_INFO_KEY_NAME: 'some-id-2' + SSE_C_KEY_ID_FILE_INFO_KEY_NAME: 'some-id-2', }, source_content_type='text/plain', ) self.assertEqual( ( - MetadataDirectiveMode.REPLACE, { - 'some_key': 'some_value', - SSE_C_KEY_ID_FILE_INFO_KEY_NAME: 'some-id' - }, 'text/plain' - ), (metadata_directive, new_file_info, new_content_type) + MetadataDirectiveMode.REPLACE, + {'some_key': 'some_value', SSE_C_KEY_ID_FILE_INFO_KEY_NAME: 'some-id'}, + 'text/plain', + ), + (metadata_directive, new_file_info, new_content_type), ) def test_establish_sse_c_copy_sources_unknown(self): for source_file_info, source_content_type in [ (None, None), - ({ - 'a': 'b' - }, None), + ({'a': 'b'}, None), (None, 'text/plain'), ]: with self.subTest( @@ -144,7 +162,7 @@ def test_establish_sse_c_copy_sources_unknown(self): with self.assertRaises( SSECKeyIdMismatchInCopy, 'attempting to copy file using MetadataDirectiveMode.COPY without providing source_file_info ' - 'and source_content_type for differing sse_c_key_ids: source="some-id-2", destination="some-id"' + 'and source_content_type for differing sse_c_key_ids: source="some-id-2", destination="some-id"', ): CopyManager.establish_sse_c_file_metadata( MetadataDirectiveMode.COPY, diff --git a/test/unit/v1/test_download_dest.py b/test/unit/v1/test_download_dest.py index 80875923d..192bfb0eb 100644 --- a/test/unit/v1/test_download_dest.py +++ b/test/unit/v1/test_download_dest.py @@ -25,7 +25,7 @@ class TestDownloadDestLocalFile(TestBase): expected_result = 'hello world' def _make_dest(self, temp_dir): - file_path = os.path.join(temp_dir, "test.txt") + file_path = os.path.join(temp_dir, 'test.txt') return DownloadDestLocalFile(file_path), file_path def test_write_and_set_mod_time(self): @@ -36,7 +36,7 @@ def test_write_and_set_mod_time(self): with tempfile.TemporaryDirectory() as temp_dir: download_dest, file_path = self._make_dest(temp_dir) with download_dest.make_file_context( - "file_id", "file_name", 100, "content_type", "sha1", {}, mod_time + 'file_id', 'file_name', 100, 'content_type', 'sha1', {}, mod_time ) as f: f.write(b'hello world') with open(file_path, 'rb') as f: @@ -51,7 +51,7 @@ def test_failed_write_deletes_partial_file(self): download_dest, file_path = self._make_dest(temp_dir) try: with download_dest.make_file_context( - "file_id", "file_name", 100, "content_type", "sha1", {}, 1500222333000 + 'file_id', 'file_name', 100, 'content_type', 'sha1', {}, 1500222333000 ) as f: f.write(b'hello world') raise Exception('test error') @@ -64,7 +64,7 @@ class TestPreSeekedDownloadDest(TestDownloadDestLocalFile): expected_result = '123hello world567890' def _make_dest(self, temp_dir): - file_path = os.path.join(temp_dir, "test.txt") + file_path = os.path.join(temp_dir, 'test.txt') with open(file_path, 'wb') as f: f.write(b'12345678901234567890') return PreSeekedDownloadDest(local_file_path=file_path, seek_target=3), file_path @@ -77,12 +77,12 @@ def test_write_and_set_mod_time_and_progress(self): """ mod_time = 1500222333000 with tempfile.TemporaryDirectory() as temp_dir: - file_path = os.path.join(temp_dir, "test.txt") + file_path = os.path.join(temp_dir, 'test.txt') download_local_file = DownloadDestLocalFile(file_path) progress_listener = ProgressListenerForTest() download_dest = DownloadDestProgressWrapper(download_local_file, progress_listener) with download_dest.make_file_context( - "file_id", "file_name", 100, "content_type", "sha1", {}, mod_time + 'file_id', 'file_name', 100, 'content_type', 'sha1', {}, mod_time ) as f: f.write(b'hello world\n') with open(file_path, 'rb') as f: diff --git a/test/unit/v1/test_file_metadata.py b/test/unit/v1/test_file_metadata.py index 374398ea7..46b42d389 100644 --- a/test/unit/v1/test_file_metadata.py +++ b/test/unit/v1/test_file_metadata.py @@ -26,7 +26,7 @@ class TestFileMetadata(TestBase): 'content_length': '1', 'content_sha1': '4518012e1b365e504001dbc94120624f15b8bbd5', 'file_info': {}, - } # yapf: disable + } INFO_DICT = {snake_to_camel(k): v for k, v in KWARGS.items()} def test_verified_sha1(self): diff --git a/test/unit/v1/test_policy.py b/test/unit/v1/test_policy.py index dd367afbf..e78976a18 100644 --- a/test/unit/v1/test_policy.py +++ b/test/unit/v1/test_policy.py @@ -34,23 +34,26 @@ def test_no_source_one_old_version_hides(self): def test_old_hide_causes_delete(self): # A hide marker that is old gets deleted, as do the things after it. self.check_one_answer( - True, [(1, -5, 'upload'), (2, -10, 'hide'), (3, -20, 'upload')], - ['b2_delete(folder/a, 2, (hide marker))', 'b2_delete(folder/a, 3, (old version))'] + True, + [(1, -5, 'upload'), (2, -10, 'hide'), (3, -20, 'upload')], + ['b2_delete(folder/a, 2, (hide marker))', 'b2_delete(folder/a, 3, (old version))'], ) def test_old_upload_causes_delete(self): # An upload that is old stays if there is a source file, but things # behind it go away. self.check_one_answer( - True, [(1, -5, 'upload'), (2, -10, 'upload'), (3, -20, 'upload')], - ['b2_delete(folder/a, 3, (old version))'] + True, + [(1, -5, 'upload'), (2, -10, 'upload'), (3, -20, 'upload')], + ['b2_delete(folder/a, 3, (old version))'], ) def test_out_of_order_dates(self): # The one at date -3 will get deleted because the one before it is old. self.check_one_answer( - True, [(1, -5, 'upload'), (2, -10, 'upload'), (3, -3, 'upload')], - ['b2_delete(folder/a, 3, (old version))'] + True, + [(1, -5, 'upload'), (2, -10, 'upload'), (3, -3, 'upload')], + ['b2_delete(folder/a, 3, (old version))'], ) def check_one_answer(self, has_source, id_relative_date_action_list, expected_actions): @@ -65,11 +68,14 @@ def check_one_answer(self, has_source, id_relative_date_action_list, expected_ac file_info={}, content_type='text/plain', content_sha1='content_sha1', - ) for (id_, relative_date, action) in id_relative_date_action_list + ) + for (id_, relative_date, action) in id_relative_date_action_list ] - dest_file = B2SyncPath( - 'a', selected_version=dest_file_versions[0], all_versions=dest_file_versions - ) if dest_file_versions else None + dest_file = ( + B2SyncPath('a', selected_version=dest_file_versions[0], all_versions=dest_file_versions) + if dest_file_versions + else None + ) bucket = MagicMock() api = MagicMock() api.get_bucket_by_name.return_value = bucket diff --git a/test/unit/v1/test_raw_api.py b/test/unit/v1/test_raw_api.py index 351d921e8..af2ce3288 100644 --- a/test/unit/v1/test_raw_api.py +++ b/test/unit/v1/test_raw_api.py @@ -42,7 +42,7 @@ def _should_be_ok(self, filename): :param filename: unicode (or str) that follows the rules """ - print(f"Filename \"{filename}\" should be OK") + print(f'Filename "{filename}" should be OK') self.assertTrue(self.raw_api.check_b2_filename(filename) is None) def _should_raise(self, filename, exception_message): @@ -51,9 +51,7 @@ def _should_raise(self, filename, exception_message): :param filename: unicode (or str) that doesn't follow the rules :param exception_message: regexp that matches the exception's detailed message """ - print( - f"Filename \"{filename}\" should raise UnusableFileName(\".*{exception_message}.*\")." - ) + print(f'Filename "{filename}" should raise UnusableFileName(".*{exception_message}.*").') with self.assertRaisesRegex(UnusableFileName, exception_message): self.raw_api.check_b2_filename(filename) @@ -68,7 +66,7 @@ def test_b2_filename_checker(self): - File names cannot start with "/", end with "/", or contain "//". - Maximum of 250 bytes of UTF-8 in each segment (part between slashes) of a file name. """ - print("test b2 filename rules") + print('test b2 filename rules') # Examples from doc: self._should_be_ok('Kitten Videos') @@ -79,28 +77,28 @@ def test_b2_filename_checker(self): s_1024 = 4 * (250 * 'x' + '/') + 20 * 'y' self._should_be_ok(s_1024) # 1025 is too long. - self._should_raise(s_1024 + 'x', "too long") + self._should_raise(s_1024 + 'x', 'too long') # 1024 bytes with two byte characters should also work. s_1024_two_byte = 4 * (125 * TWO_BYTE_UNICHR + '/') + 20 * 'y' self._should_be_ok(s_1024_two_byte) # But 1025 bytes is too long. - self._should_raise(s_1024_two_byte + 'x', "too long") + self._should_raise(s_1024_two_byte + 'x', 'too long') # Names with unicode values < 32, and DEL aren't allowed. - self._should_raise('hey' + CHAR_UNDER_32, "contains code.*less than 32") + self._should_raise('hey' + CHAR_UNDER_32, 'contains code.*less than 32') # Unicode in the filename shouldn't break the exception message. - self._should_raise(TWO_BYTE_UNICHR + CHAR_UNDER_32, "contains code.*less than 32") - self._should_raise(DEL_CHAR, "DEL.*not allowed") + self._should_raise(TWO_BYTE_UNICHR + CHAR_UNDER_32, 'contains code.*less than 32') + self._should_raise(DEL_CHAR, 'DEL.*not allowed') # Names can't start or end with '/' or contain '//' - self._should_raise('/hey', "not start.*/") - self._should_raise('hey/', "not .*end.*/") - self._should_raise('not//allowed', "contain.*//") + self._should_raise('/hey', 'not start.*/') + self._should_raise('hey/', 'not .*end.*/') + self._should_raise('not//allowed', 'contain.*//') # Reject segments longer than 250 bytes - self._should_raise('foo/' + 251 * 'x', "segment too long") + self._should_raise('foo/' + 251 * 'x', 'segment too long') # So a segment of 125 two-byte chars plus one should also fail. - self._should_raise('foo/' + 125 * TWO_BYTE_UNICHR + 'x', "segment too long") + self._should_raise('foo/' + 125 * TWO_BYTE_UNICHR + 'x', 'segment too long') class BucketTestBase: @@ -118,13 +116,15 @@ def test_assertion_raises(self): self.raw_api.update_bucket('test', 'account_auth_token', 'account_id', 'bucket_id') @pytest.mark.parametrize( - 'bucket_type,bucket_info,default_retention', ( + 'bucket_type,bucket_info,default_retention', + ( (None, {}, None), ( - 'allPublic', None, - BucketRetentionSetting(RetentionMode.COMPLIANCE, RetentionPeriod(years=1)) + 'allPublic', + None, + BucketRetentionSetting(RetentionMode.COMPLIANCE, RetentionPeriod(years=1)), ), - ) + ), ) def test_assertion_not_raises(self, bucket_type, bucket_info, default_retention): self.raw_api.update_bucket( @@ -138,14 +138,17 @@ def test_assertion_not_raises(self, bucket_type, bucket_info, default_retention) ) @pytest.mark.parametrize( - 'encryption_setting,', ( + 'encryption_setting,', + ( EncryptionSetting( mode=EncryptionMode.SSE_C, algorithm=EncryptionAlgorithm.AES256, - key=EncryptionKey(b'key', 'key-id') + key=EncryptionKey(b'key', 'key-id'), ), - EncryptionSetting(mode=EncryptionMode.UNKNOWN,), - ) + EncryptionSetting( + mode=EncryptionMode.UNKNOWN, + ), + ), ) def test_update_bucket_wrong_encryption(self, encryption_setting): with pytest.raises(WrongEncryptionModeForBucketDefault): @@ -163,17 +166,19 @@ class TestCreateBucket(BucketTestBase): """Test creating bucket.""" @pytest.mark.parametrize( - 'encryption_setting,', ( + 'encryption_setting,', + ( EncryptionSetting( mode=EncryptionMode.SSE_C, algorithm=EncryptionAlgorithm.AES256, - key=EncryptionKey(b'key', 'key-id') + key=EncryptionKey(b'key', 'key-id'), ), - EncryptionSetting(mode=EncryptionMode.UNKNOWN,), - ) + EncryptionSetting( + mode=EncryptionMode.UNKNOWN, + ), + ), ) def test_create_bucket_wrong_encryption(self, encryption_setting): - with pytest.raises(WrongEncryptionModeForBucketDefault): self.raw_api.create_bucket( 'test', diff --git a/test/unit/v1/test_session.py b/test/unit/v1/test_session.py index 06c107b1a..fe71cd10a 100644 --- a/test/unit/v1/test_session.py +++ b/test/unit/v1/test_session.py @@ -84,6 +84,6 @@ def test_app_key_info_all_info(self): self.raw_api.get_file_info_by_id.side_effect = Unauthorized('no_go', 'code') with self.assertRaisesRegex( Unauthorized, - r"no_go for application key with capabilities 'readFiles', restricted to bucket 'my-bucket', restricted to files that start with 'prefix/' \(code\)" + r"no_go for application key with capabilities 'readFiles', restricted to bucket 'my-bucket', restricted to files that start with 'prefix/' \(code\)", ): self.session.get_file_info_by_id(None) diff --git a/test/unit/v1/test_sync.py b/test/unit/v1/test_sync.py index 444f6b3f6..03e0cf189 100644 --- a/test/unit/v1/test_sync.py +++ b/test/unit/v1/test_sync.py @@ -289,7 +289,7 @@ def prepare_folder( prepare_files=True, broken_symlink=False, invalid_permissions=False, - use_file_versions_info=False + use_file_versions_info=False, ): assert not (broken_symlink and invalid_permissions) @@ -305,7 +305,7 @@ def prepare_folder( if broken_symlink: os.symlink( os.path.join(self.root_dir, 'non_existant_file'), - os.path.join(self.root_dir, 'bad_symlink') + os.path.join(self.root_dir, 'bad_symlink'), ) elif invalid_permissions: os.chmod(os.path.join(self.root_dir, self.NAMES[0]), 0) @@ -355,8 +355,9 @@ def test_invalid_permissions(self): def test_syncable_paths(self): syncable_paths = ( - ('test.txt', 'test.txt'), ('./a/test.txt', 'a/test.txt'), - ('./a/../test.txt', 'test.txt') + ('test.txt', 'test.txt'), + ('./a/test.txt', 'a/test.txt'), + ('./a/../test.txt', 'test.txt'), ) folder = self.prepare_folder(prepare_files=False) @@ -377,38 +378,39 @@ class TestB2Folder(TestFolder): __test__ = True FILE_VERSION_INFOS = { - os.path.join('inner', 'a.txt'): - [ - ( - FileVersionInfo( - 'a2', 'inner/a.txt', 200, 'text/plain', 'sha1', {}, 2000, 'upload' - ), '' - ), - ( - FileVersionInfo( - 'a1', 'inner/a.txt', 100, 'text/plain', 'sha1', {}, 1000, 'upload' - ), '' - ) - ], - os.path.join('inner', 'b.txt'): - [ - ( - FileVersionInfo( - 'b2', 'inner/b.txt', 200, 'text/plain', 'sha1', {}, 1999, 'upload' - ), '' - ), - ( - FileVersionInfo( - 'bs', 'inner/b.txt', 150, 'text/plain', 'sha1', {}, 1500, 'start' - ), '' + os.path.join('inner', 'a.txt'): [ + ( + FileVersionInfo('a2', 'inner/a.txt', 200, 'text/plain', 'sha1', {}, 2000, 'upload'), + '', + ), + ( + FileVersionInfo('a1', 'inner/a.txt', 100, 'text/plain', 'sha1', {}, 1000, 'upload'), + '', + ), + ], + os.path.join('inner', 'b.txt'): [ + ( + FileVersionInfo('b2', 'inner/b.txt', 200, 'text/plain', 'sha1', {}, 1999, 'upload'), + '', + ), + ( + FileVersionInfo('bs', 'inner/b.txt', 150, 'text/plain', 'sha1', {}, 1500, 'start'), + '', + ), + ( + FileVersionInfo( + 'b1', + 'inner/b.txt', + 100, + 'text/plain', + 'sha1', + {'src_last_modified_millis': 1001}, + 6666, + 'upload', ), - ( - FileVersionInfo( - 'b1', 'inner/b.txt', 100, 'text/plain', 'sha1', - {'src_last_modified_millis': 1001}, 6666, 'upload' - ), '' - ) - ] + '', + ), + ], } def setUp(self): @@ -423,7 +425,7 @@ def prepare_folder( prepare_files=True, broken_symlink=False, invalid_permissions=False, - use_file_versions_info=False + use_file_versions_info=False, ): if prepare_files: for relative_path in self.NAMES: @@ -442,9 +444,16 @@ def prepare_file(self, relative_path, use_file_versions_info=False): self.bucket.ls.return_value.append( ( FileVersionInfo( - relative_path, relative_path, 100, 'text/plain', 'sha1', {}, - self.MOD_TIMES[relative_path], 'upload' - ), self.root_dir + relative_path, + relative_path, + 100, + 'text/plain', + 'sha1', + {}, + self.MOD_TIMES[relative_path], + 'upload', + ), + self.root_dir, ) ) else: @@ -452,7 +461,8 @@ def prepare_file(self, relative_path, use_file_versions_info=False): ( FileVersionInfo( relative_path, relative_path, 100, 'text/plain', 'sha1', {}, TODAY, 'upload' - ), self.root_dir + ), + self.root_dir, ) ) @@ -467,11 +477,13 @@ def test_multiple_versions(self): self.assertEqual( [ "B2Path(inner/a.txt, [('a2', 2000, 'upload'), ('a1', 1000, 'upload')])", - "B2Path(inner/b.txt, [('b2', 1999, 'upload'), ('b1', 1001, 'upload')])" - ], [ - str(f) for f in folder.all_files(self.reporter) + "B2Path(inner/b.txt, [('b2', 1999, 'upload'), ('b1', 1001, 'upload')])", + ], + [ + str(f) + for f in folder.all_files(self.reporter) if f.relative_path in ('inner/a.txt', 'inner/b.txt') - ] + ], ) def test_exclude_modified_multiple_versions(self): @@ -480,10 +492,12 @@ def test_exclude_modified_multiple_versions(self): ) folder = self.prepare_folder(use_file_versions_info=True) self.assertEqual( - ["B2Path(inner/b.txt, [('b2', 1999, 'upload'), ('b1', 1001, 'upload')])"], [ - str(f) for f in folder.all_files(self.reporter, policies_manager=polices_manager) + ["B2Path(inner/b.txt, [('b2', 1999, 'upload'), ('b1', 1001, 'upload')])"], + [ + str(f) + for f in folder.all_files(self.reporter, policies_manager=polices_manager) if f.relative_path in ('inner/a.txt', 'inner/b.txt') - ] + ], ) def test_exclude_modified_all_versions(self): @@ -551,7 +565,7 @@ def test_unsyncable_filenames(self): r'a\\.\\b', ] - if platform.system() == "Windows": + if platform.system() == 'Windows': filenames_to_test.extend(self.NOT_SYNCD_ON_WINDOWS) for filename in filenames_to_test: @@ -592,7 +606,7 @@ def test_syncable_filenames(self): ] # filenames not permitted on Windows *should* be allowed on Linux - if platform.system() != "Windows": + if platform.system() != 'Windows': filenames_to_test.extend(self.NOT_SYNCD_ON_WINDOWS) for filename in filenames_to_test: @@ -654,8 +668,9 @@ def file_versions_from_file_tuples(cls, name, mod_times, size=10): file_info={'in_b2': 'yes'}, content_type='text/plain', content_sha1='content_sha1', - ) for mod_time in mod_times - ] # yapf disable + ) + for mod_time in mod_times + ] @classmethod def sync_path_from_file_tuple(cls, name, mod_times, size=10): @@ -700,7 +715,7 @@ def _check_one(self, expected, to_parse): class TestFolderExceptions: """There is an exact copy of this class in unit/v0/test_sync.py - TODO: leave only one when migrating tests to - sync-like structure. + sync-like structure. """ @pytest.mark.parametrize( @@ -744,7 +759,7 @@ def test_ensure_present_unable_to_create(self, exception, msg): pytest.param( CommandError, 'Directory .* is empty. Use --allowEmptySource to sync anyway.', - marks=pytest.mark.apiver(to_ver=1) + marks=pytest.mark.apiver(to_ver=1), ), ], ) @@ -764,7 +779,6 @@ def test_ensure_non_empty(self, exception, msg): ], ) def test_double_slash_not_allowed(self, exception, msg): - with pytest.raises(exception, match=msg): parse_sync_folder('b2://a//b', MagicMock()) @@ -776,18 +790,18 @@ def test_empty(self): self.assertEqual([], list(zip_folders(folder_a, folder_b, self.reporter))) def test_one_empty(self): - file_a1 = LocalSyncPath("a.txt", "a.txt", 100, 10) + file_a1 = LocalSyncPath('a.txt', 'a.txt', 100, 10) folder_a = FakeLocalFolder([file_a1]) folder_b = FakeB2Folder([]) self.assertEqual([(file_a1, None)], list(zip_folders(folder_a, folder_b, self.reporter))) def test_two(self): - file_a1 = ("a.txt", [100], 10) - file_a2 = ("b.txt", [100], 10) - file_a3 = ("d.txt", [100], 10) - file_a4 = ("f.txt", [100], 10) - file_b1 = ("b.txt", [200], 10) - file_b2 = ("e.txt", [200], 10) + file_a1 = ('a.txt', [100], 10) + file_a2 = ('b.txt', [100], 10) + file_a3 = ('d.txt', [100], 10) + file_a4 = ('f.txt', [100], 10) + file_b1 = ('b.txt', [200], 10) + file_b2 = ('e.txt', [200], 10) folder_a = FakeB2Folder([file_a1, file_a2, file_a3, file_a4]) folder_b = FakeB2Folder([file_b1, file_b2]) self.assertEqual( @@ -795,11 +809,13 @@ def test_two(self): (FakeB2Folder.sync_path_from_file_tuple(*file_a1), None), ( FakeB2Folder.sync_path_from_file_tuple(*file_a2), - FakeB2Folder.sync_path_from_file_tuple(*file_b1) - ), (FakeB2Folder.sync_path_from_file_tuple(*file_a3), None), + FakeB2Folder.sync_path_from_file_tuple(*file_b1), + ), + (FakeB2Folder.sync_path_from_file_tuple(*file_a3), None), (None, FakeB2Folder.sync_path_from_file_tuple(*file_b2)), - (FakeB2Folder.sync_path_from_file_tuple(*file_a4), None) - ], list(zip_folders(folder_a, folder_b, self.reporter)) + (FakeB2Folder.sync_path_from_file_tuple(*file_a4), None), + ], + list(zip_folders(folder_a, folder_b, self.reporter)), ) def test_pass_reporter_to_folder(self): @@ -900,7 +916,7 @@ def _check_folder_sync(self, expected_actions, fakeargs): exclude_dir_regexes=fakeargs.excludeDirRegex, exclude_file_regexes=fakeargs.excludeRegex, include_file_regexes=fakeargs.includeRegex, - exclude_all_symlinks=fakeargs.excludeAllSymlinks + exclude_all_symlinks=fakeargs.excludeAllSymlinks, ) synchronizer = fakeargs.get_synchronizer(policies_manager=policies_manager) actions = list( @@ -922,7 +938,7 @@ def test_file_exclusions_with_delete(self): expected_actions, FakeArgs( keep_days_or_delete=KeepOrDeleteMode.DELETE, - excludeRegex=["b\\.txt"], + excludeRegex=['b\\.txt'], ), ) @@ -937,8 +953,8 @@ def test_file_exclusions_inclusions_with_delete(self): ] fakeargs = FakeArgs( keep_days_or_delete=KeepOrDeleteMode.DELETE, - excludeRegex=["b\\.txt"], - includeRegex=[".*\\.incl"] + excludeRegex=['b\\.txt'], + includeRegex=['.*\\.incl'], ) self._check_folder_sync(expected_actions, fakeargs) diff --git a/test/unit/v1/test_utils.py b/test/unit/v1/test_utils.py index 4cc7d211d..f48cd344c 100644 --- a/test/unit/v1/test_utils.py +++ b/test/unit/v1/test_utils.py @@ -20,216 +20,52 @@ # These are from the B2 Docs (https://www.backblaze.com/b2/docs/string_encoding.html) ENCODING_TEST_CASES = [ - { - 'fullyEncoded': '%20', - 'minimallyEncoded': '+', - 'string': ' ' - }, - { - 'fullyEncoded': '%21', - 'minimallyEncoded': '!', - 'string': '!' - }, - { - 'fullyEncoded': '%22', - 'minimallyEncoded': '%22', - 'string': '"' - }, - { - 'fullyEncoded': '%23', - 'minimallyEncoded': '%23', - 'string': '#' - }, - { - 'fullyEncoded': '%24', - 'minimallyEncoded': '$', - 'string': '$' - }, - { - 'fullyEncoded': '%25', - 'minimallyEncoded': '%25', - 'string': '%' - }, - { - 'fullyEncoded': '%26', - 'minimallyEncoded': '%26', - 'string': '&' - }, - { - 'fullyEncoded': '%27', - 'minimallyEncoded': "'", - 'string': "'" - }, - { - 'fullyEncoded': '%28', - 'minimallyEncoded': '(', - 'string': '(' - }, - { - 'fullyEncoded': '%29', - 'minimallyEncoded': ')', - 'string': ')' - }, - { - 'fullyEncoded': '%2A', - 'minimallyEncoded': '*', - 'string': '*' - }, - { - 'fullyEncoded': '%2B', - 'minimallyEncoded': '%2B', - 'string': '+' - }, - { - 'fullyEncoded': '%2C', - 'minimallyEncoded': '%2C', - 'string': ',' - }, - { - 'fullyEncoded': '%2D', - 'minimallyEncoded': '-', - 'string': '-' - }, - { - 'fullyEncoded': '%2E', - 'minimallyEncoded': '.', - 'string': '.' - }, - { - 'fullyEncoded': '/', - 'minimallyEncoded': '/', - 'string': '/' - }, - { - 'fullyEncoded': '%30', - 'minimallyEncoded': '0', - 'string': '0' - }, - { - 'fullyEncoded': '%39', - 'minimallyEncoded': '9', - 'string': '9' - }, - { - 'fullyEncoded': '%3A', - 'minimallyEncoded': ':', - 'string': ':' - }, - { - 'fullyEncoded': '%3B', - 'minimallyEncoded': ';', - 'string': ';' - }, - { - 'fullyEncoded': '%3C', - 'minimallyEncoded': '%3C', - 'string': '<' - }, - { - 'fullyEncoded': '%3D', - 'minimallyEncoded': '=', - 'string': '=' - }, - { - 'fullyEncoded': '%3E', - 'minimallyEncoded': '%3E', - 'string': '>' - }, - { - 'fullyEncoded': '%3F', - 'minimallyEncoded': '%3F', - 'string': '?' - }, - { - 'fullyEncoded': '%40', - 'minimallyEncoded': '@', - 'string': '@' - }, - { - 'fullyEncoded': '%41', - 'minimallyEncoded': 'A', - 'string': 'A' - }, - { - 'fullyEncoded': '%5A', - 'minimallyEncoded': 'Z', - 'string': 'Z' - }, - { - 'fullyEncoded': '%5B', - 'minimallyEncoded': '%5B', - 'string': '[' - }, - { - 'fullyEncoded': '%5C', - 'minimallyEncoded': '%5C', - 'string': '\\' - }, - { - 'fullyEncoded': '%5D', - 'minimallyEncoded': '%5D', - 'string': ']' - }, - { - 'fullyEncoded': '%5E', - 'minimallyEncoded': '%5E', - 'string': '^' - }, - { - 'fullyEncoded': '%5F', - 'minimallyEncoded': '_', - 'string': '_' - }, - { - 'fullyEncoded': '%60', - 'minimallyEncoded': '%60', - 'string': '`' - }, - { - 'fullyEncoded': '%61', - 'minimallyEncoded': 'a', - 'string': 'a' - }, - { - 'fullyEncoded': '%7A', - 'minimallyEncoded': 'z', - 'string': 'z' - }, - { - 'fullyEncoded': '%7B', - 'minimallyEncoded': '%7B', - 'string': '{' - }, - { - 'fullyEncoded': '%7C', - 'minimallyEncoded': '%7C', - 'string': '|' - }, - { - 'fullyEncoded': '%7D', - 'minimallyEncoded': '%7D', - 'string': '}' - }, - { - 'fullyEncoded': '%7E', - 'minimallyEncoded': '~', - 'string': '~' - }, - { - 'fullyEncoded': '%7F', - 'minimallyEncoded': '%7F', - 'string': '\u007f' - }, + {'fullyEncoded': '%20', 'minimallyEncoded': '+', 'string': ' '}, + {'fullyEncoded': '%21', 'minimallyEncoded': '!', 'string': '!'}, + {'fullyEncoded': '%22', 'minimallyEncoded': '%22', 'string': '"'}, + {'fullyEncoded': '%23', 'minimallyEncoded': '%23', 'string': '#'}, + {'fullyEncoded': '%24', 'minimallyEncoded': '$', 'string': '$'}, + {'fullyEncoded': '%25', 'minimallyEncoded': '%25', 'string': '%'}, + {'fullyEncoded': '%26', 'minimallyEncoded': '%26', 'string': '&'}, + {'fullyEncoded': '%27', 'minimallyEncoded': "'", 'string': "'"}, + {'fullyEncoded': '%28', 'minimallyEncoded': '(', 'string': '('}, + {'fullyEncoded': '%29', 'minimallyEncoded': ')', 'string': ')'}, + {'fullyEncoded': '%2A', 'minimallyEncoded': '*', 'string': '*'}, + {'fullyEncoded': '%2B', 'minimallyEncoded': '%2B', 'string': '+'}, + {'fullyEncoded': '%2C', 'minimallyEncoded': '%2C', 'string': ','}, + {'fullyEncoded': '%2D', 'minimallyEncoded': '-', 'string': '-'}, + {'fullyEncoded': '%2E', 'minimallyEncoded': '.', 'string': '.'}, + {'fullyEncoded': '/', 'minimallyEncoded': '/', 'string': '/'}, + {'fullyEncoded': '%30', 'minimallyEncoded': '0', 'string': '0'}, + {'fullyEncoded': '%39', 'minimallyEncoded': '9', 'string': '9'}, + {'fullyEncoded': '%3A', 'minimallyEncoded': ':', 'string': ':'}, + {'fullyEncoded': '%3B', 'minimallyEncoded': ';', 'string': ';'}, + {'fullyEncoded': '%3C', 'minimallyEncoded': '%3C', 'string': '<'}, + {'fullyEncoded': '%3D', 'minimallyEncoded': '=', 'string': '='}, + {'fullyEncoded': '%3E', 'minimallyEncoded': '%3E', 'string': '>'}, + {'fullyEncoded': '%3F', 'minimallyEncoded': '%3F', 'string': '?'}, + {'fullyEncoded': '%40', 'minimallyEncoded': '@', 'string': '@'}, + {'fullyEncoded': '%41', 'minimallyEncoded': 'A', 'string': 'A'}, + {'fullyEncoded': '%5A', 'minimallyEncoded': 'Z', 'string': 'Z'}, + {'fullyEncoded': '%5B', 'minimallyEncoded': '%5B', 'string': '['}, + {'fullyEncoded': '%5C', 'minimallyEncoded': '%5C', 'string': '\\'}, + {'fullyEncoded': '%5D', 'minimallyEncoded': '%5D', 'string': ']'}, + {'fullyEncoded': '%5E', 'minimallyEncoded': '%5E', 'string': '^'}, + {'fullyEncoded': '%5F', 'minimallyEncoded': '_', 'string': '_'}, + {'fullyEncoded': '%60', 'minimallyEncoded': '%60', 'string': '`'}, + {'fullyEncoded': '%61', 'minimallyEncoded': 'a', 'string': 'a'}, + {'fullyEncoded': '%7A', 'minimallyEncoded': 'z', 'string': 'z'}, + {'fullyEncoded': '%7B', 'minimallyEncoded': '%7B', 'string': '{'}, + {'fullyEncoded': '%7C', 'minimallyEncoded': '%7C', 'string': '|'}, + {'fullyEncoded': '%7D', 'minimallyEncoded': '%7D', 'string': '}'}, + {'fullyEncoded': '%7E', 'minimallyEncoded': '~', 'string': '~'}, + {'fullyEncoded': '%7F', 'minimallyEncoded': '%7F', 'string': '\u007f'}, { 'fullyEncoded': '%E8%87%AA%E7%94%B1', 'minimallyEncoded': '%E8%87%AA%E7%94%B1', - 'string': '\u81ea\u7531' - }, - { - 'fullyEncoded': '%F0%90%90%80', - 'minimallyEncoded': '%F0%90%90%80', - 'string': '\U00010400' + 'string': '\u81ea\u7531', }, + {'fullyEncoded': '%F0%90%90%80', 'minimallyEncoded': '%F0%90%90%80', 'string': '\U00010400'}, ] diff --git a/test/unit/v1/test_version_utils.py b/test/unit/v1/test_version_utils.py index 368259c8f..68c8b7309 100644 --- a/test/unit/v1/test_version_utils.py +++ b/test/unit/v1/test_version_utils.py @@ -22,12 +22,12 @@ class TestRenameArgument(TestBase): def test_warning(self): @rename_argument('aaa', 'bbb', '0.1.0', '0.2.0', current_version=self.VERSION) def easy(bbb): - """ easy docstring """ + """easy docstring""" return bbb # check that warning is not emitted too early with warnings.catch_warnings(record=True) as w: - warnings.simplefilter("always") + warnings.simplefilter('always') assert easy(5) == 5 assert easy(bbb=5) == 5 assert easy.__name__ == 'easy' @@ -35,21 +35,19 @@ def easy(bbb): assert len(w) == 0 with warnings.catch_warnings(record=True) as w: - warnings.simplefilter("always") + warnings.simplefilter('always') assert easy(aaa=5) == 5 assert len(w) == 1 assert issubclass(w[-1].category, DeprecationWarning) - assert str( - w[-1].message - ) == "'aaa' is a deprecated argument for 'easy' function/method - it was renamed to 'bbb' in version 0.1.0. Support for the old name is going to be dropped in 0.2.0.", str( - w[-1].message - ) + assert ( + str(w[-1].message) + == "'aaa' is a deprecated argument for 'easy' function/method - it was renamed to 'bbb' in version 0.1.0. Support for the old name is going to be dropped in 0.2.0." + ), str(w[-1].message) def test_outdated_replacement(self): with self.assertRaises( AssertionError, - msg= - f"rename_argument decorator is still used in version {self.VERSION} when old argument name 'aaa' was scheduled to be dropped in 0.1.2. It is time to remove the mapping.", + msg=f"rename_argument decorator is still used in version {self.VERSION} when old argument name 'aaa' was scheduled to be dropped in 0.1.2. It is time to remove the mapping.", ): @rename_argument('aaa', 'bbb', '0.1.0', '0.1.2', current_version=self.VERSION) @@ -61,8 +59,7 @@ def late(bbb): def test_future_replacement(self): with self.assertRaises( AssertionError, - msg= - "rename_argument decorator indicates that the replacement of argument 'aaa' should take place in the future version 0.2.0, while the current version is 0.2.2. It looks like should be _discouraged_ at this point and not _deprecated_ yet. Consider using 'discourage_argument' decorator instead." + msg="rename_argument decorator indicates that the replacement of argument 'aaa' should take place in the future version 0.2.0, while the current version is 0.2.2. It looks like should be _discouraged_ at this point and not _deprecated_ yet. Consider using 'discourage_argument' decorator instead.", ): @rename_argument('aaa', 'bbb', '0.2.0', '0.2.2', current_version=self.VERSION) @@ -74,8 +71,7 @@ def early(bbb): def test_inverted_versions(self): with self.assertRaises( AssertionError, - msg= - "rename_argument decorator is set to start renaming argument 'aaa' starting at version 0.2.2 and finishing in 0.2.0. It needs to start at a lower version and finish at a higher version." + msg="rename_argument decorator is set to start renaming argument 'aaa' starting at version 0.2.2 and finishing in 0.2.0. It needs to start at a lower version and finish at a higher version.", ): @rename_argument('aaa', 'bbb', '0.2.2', '0.2.0', current_version=self.VERSION) @@ -99,12 +95,11 @@ def old(bbb): return bbb with warnings.catch_warnings(record=True) as w: - warnings.simplefilter("always") + warnings.simplefilter('always') assert old(5) == 5 assert len(w) == 1 assert issubclass(w[-1].category, DeprecationWarning) - assert str( - w[-1].message - ) == "'old' is deprecated since version 0.1.0 - it was moved to 'new', please switch to use that. The proxy for the old name is going to be removed in 0.2.0.", str( - w[-1].message - ) + assert ( + str(w[-1].message) + == "'old' is deprecated since version 0.1.0 - it was moved to 'new', please switch to use that. The proxy for the old name is going to be removed in 0.2.0." + ), str(w[-1].message) diff --git a/test/unit/v2/test_bucket.py b/test/unit/v2/test_bucket.py index 3e41f0d24..db0396925 100644 --- a/test/unit/v2/test_bucket.py +++ b/test/unit/v2/test_bucket.py @@ -9,13 +9,13 @@ ###################################################################### from __future__ import annotations -from test.helpers import patch_bind_params from unittest.mock import Mock import pytest from b2sdk import _v3 as v3 from b2sdk.v2 import B2Api, Bucket +from test.helpers import patch_bind_params @pytest.fixture @@ -25,25 +25,27 @@ def dummy_bucket(): def test_bucket__upload_file__supports_file_infos(dummy_bucket, file_info): """Test v2.Bucket.upload_file support of deprecated file_infos param""" - with patch_bind_params(v3.Bucket, 'upload_local_file') as mock_method,\ - pytest.warns(DeprecationWarning, match=r'deprecated argument'): + with patch_bind_params(v3.Bucket, 'upload_local_file') as mock_method, pytest.warns( + DeprecationWarning, match=r'deprecated argument' + ): dummy_bucket.upload_local_file( 'filename', 'filename', file_infos=file_info, ) - assert mock_method.get_bound_call_args()["file_info"] == file_info + assert mock_method.get_bound_call_args()['file_info'] == file_info assert 'file_infos' not in mock_method.call_args[1] def test_bucket__upload_bytes__supports_file_infos(dummy_bucket, file_info): """Test v2.Bucket.upload_bytes support of deprecated file_infos param""" - with patch_bind_params(dummy_bucket, 'upload') as mock_method,\ - pytest.warns(DeprecationWarning, match=r'deprecated argument'): + with patch_bind_params(dummy_bucket, 'upload') as mock_method, pytest.warns( + DeprecationWarning, match=r'deprecated argument' + ): dummy_bucket.upload_bytes( b'data', 'filename', file_infos=file_info, ) - assert mock_method.get_bound_call_args()["file_info"] == file_info + assert mock_method.get_bound_call_args()['file_info'] == file_info assert 'file_infos' not in mock_method.call_args[1] diff --git a/test/unit/v2/test_raw_api.py b/test/unit/v2/test_raw_api.py index 2e0869ef0..0417e3491 100644 --- a/test/unit/v2/test_raw_api.py +++ b/test/unit/v2/test_raw_api.py @@ -9,13 +9,13 @@ ###################################################################### from __future__ import annotations -from test.helpers import patch_bind_params from unittest.mock import Mock import pytest from b2sdk import _v3 as v3 from b2sdk.v2 import B2Http, B2RawHTTPApi +from test.helpers import patch_bind_params @pytest.fixture @@ -27,8 +27,9 @@ def test_b2_raw_http_api__get_upload_file_headers__supports_file_infos( dummy_b2_raw_http_api, file_info ): """Test v2.B2RawHTTPApi.get_upload_file_headers support of deprecated file_infos param""" - with patch_bind_params(v3.B2RawHTTPApi, 'get_upload_file_headers') as mock_method,\ - pytest.warns(DeprecationWarning, match=r'deprecated argument'): + with patch_bind_params(v3.B2RawHTTPApi, 'get_upload_file_headers') as mock_method, pytest.warns( + DeprecationWarning, match=r'deprecated argument' + ): dummy_b2_raw_http_api.get_upload_file_headers( 'upload_auth_token', 'file_name', @@ -41,14 +42,15 @@ def test_b2_raw_http_api__get_upload_file_headers__supports_file_infos( legal_hold=None, custom_upload_timestamp=None, ) - assert mock_method.get_bound_call_args()["file_info"] == file_info + assert mock_method.get_bound_call_args()['file_info'] == file_info assert 'file_infos' not in mock_method.call_args[1] def test_b2_raw_http_api__upload_file__supports_file_infos(dummy_b2_raw_http_api, file_info): """Test v2.B2RawHTTPApi.upload_file support of deprecated file_infos param""" - with patch_bind_params(v3.B2RawHTTPApi, 'upload_file') as mock_method,\ - pytest.warns(DeprecationWarning, match=r'deprecated argument'): + with patch_bind_params(v3.B2RawHTTPApi, 'upload_file') as mock_method, pytest.warns( + DeprecationWarning, match=r'deprecated argument' + ): dummy_b2_raw_http_api.upload_file( 'upload_id', 'upload_auth_token', @@ -59,5 +61,5 @@ def test_b2_raw_http_api__upload_file__supports_file_infos(dummy_b2_raw_http_api file_infos=file_info, data_stream='data_stream', ) - assert mock_method.get_bound_call_args()["file_info"] == file_info + assert mock_method.get_bound_call_args()['file_info'] == file_info assert 'file_infos' not in mock_method.call_args[1] diff --git a/test/unit/v2/test_session.py b/test/unit/v2/test_session.py index ed16b4355..7cef0e004 100644 --- a/test/unit/v2/test_session.py +++ b/test/unit/v2/test_session.py @@ -9,13 +9,13 @@ ###################################################################### from __future__ import annotations -from test.helpers import patch_bind_params from unittest.mock import Mock import pytest from b2sdk import _v3 as v3 from b2sdk.v2 import B2Session +from test.helpers import patch_bind_params @pytest.fixture @@ -25,8 +25,9 @@ def dummy_session(): def test_session__upload_file__supports_file_infos(dummy_session, file_info): """Test v2.B2Session.upload_file support of deprecated file_infos param""" - with patch_bind_params(v3.B2Session, 'upload_file') as mock_method,\ - pytest.warns(DeprecationWarning, match=r'deprecated argument'): + with patch_bind_params(v3.B2Session, 'upload_file') as mock_method, pytest.warns( + DeprecationWarning, match=r'deprecated argument' + ): dummy_session.upload_file( 'filename', 'filename', @@ -36,5 +37,5 @@ def test_session__upload_file__supports_file_infos(dummy_session, file_info): data_stream=Mock(), file_infos=file_info, ) - assert mock_method.get_bound_call_args()["file_info"] == file_info + assert mock_method.get_bound_call_args()['file_info'] == file_info assert 'file_infos' not in mock_method.call_args[1] diff --git a/test/unit/v_all/test_api.py b/test/unit/v_all/test_api.py index efb2552d3..e113dc201 100644 --- a/test/unit/v_all/test_api.py +++ b/test/unit/v_all/test_api.py @@ -63,20 +63,14 @@ class TestServices: DummyB, ], ], - ) # yapf: disable + ) def test_api_initialization(self, kwargs, _raw_api_class): self.account_info = InMemoryAccountInfo() self.cache = InMemoryCache() api_config = B2HttpApiConfig(_raw_api_class=_raw_api_class) - self.api = B2Api( - self.account_info, - self.cache, - api_config=api_config, - - **kwargs - ) # yapf: disable + self.api = B2Api(self.account_info, self.cache, api_config=api_config, **kwargs) assert self.api.account_info is self.account_info assert self.api.api_config is api_config @@ -164,4 +158,7 @@ def test_get_download_url_for_fileid(self): download_url = self.api.get_download_url_for_fileid('file-id') - assert download_url == 'http://download.example.com/b2api/v2/b2_download_file_by_id?fileId=file-id' + assert ( + download_url + == 'http://download.example.com/b2api/v2/b2_download_file_by_id?fileId=file-id' + ) diff --git a/test/unit/v_all/test_constants.py b/test/unit/v_all/test_constants.py index 6d51279cc..b1e776bee 100644 --- a/test/unit/v_all/test_constants.py +++ b/test/unit/v_all/test_constants.py @@ -16,21 +16,21 @@ @pytest.mark.apiver(from_ver=2) def test_public_constants(): assert set(dir(apiver_deps)) >= { - "ALL_CAPABILITIES", - "B2_ACCOUNT_INFO_DEFAULT_FILE", - "B2_ACCOUNT_INFO_ENV_VAR", - "B2_ACCOUNT_INFO_PROFILE_FILE", - "DEFAULT_MIN_PART_SIZE", - "DEFAULT_RECOMMENDED_UPLOAD_PART_SIZE", - "LARGE_FILE_SHA1", - "LIST_FILE_NAMES_MAX_LIMIT", - "SERVER_DEFAULT_SYNC_ENCRYPTION_SETTINGS_PROVIDER", - "SRC_LAST_MODIFIED_MILLIS", - "SSE_B2_AES", - "SSE_C_KEY_ID_FILE_INFO_KEY_NAME", - "SSE_NONE", - "UNKNOWN_KEY_ID", - "V", - "VERSION", - "XDG_CONFIG_HOME_ENV_VAR", + 'ALL_CAPABILITIES', + 'B2_ACCOUNT_INFO_DEFAULT_FILE', + 'B2_ACCOUNT_INFO_ENV_VAR', + 'B2_ACCOUNT_INFO_PROFILE_FILE', + 'DEFAULT_MIN_PART_SIZE', + 'DEFAULT_RECOMMENDED_UPLOAD_PART_SIZE', + 'LARGE_FILE_SHA1', + 'LIST_FILE_NAMES_MAX_LIMIT', + 'SERVER_DEFAULT_SYNC_ENCRYPTION_SETTINGS_PROVIDER', + 'SRC_LAST_MODIFIED_MILLIS', + 'SSE_B2_AES', + 'SSE_C_KEY_ID_FILE_INFO_KEY_NAME', + 'SSE_NONE', + 'UNKNOWN_KEY_ID', + 'V', + 'VERSION', + 'XDG_CONFIG_HOME_ENV_VAR', } diff --git a/test/unit/v_all/test_replication.py b/test/unit/v_all/test_replication.py index 8d82721a9..ee3185333 100644 --- a/test/unit/v_all/test_replication.py +++ b/test/unit/v_all/test_replication.py @@ -60,6 +60,7 @@ def test_setup_both(self): ) from pprint import pprint + pprint([k.as_dict() for k in self.api.list_keys()]) keymap = {k.key_name: k for k in self.api.list_keys()}