Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Replace yapf with ruff #528

Merged
merged 2 commits into from
Dec 28, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ In order to make it easier to contribute, core developers of this project:
* maintain a set of unit tests
* maintain a set of integration tests (run with a production cloud)
* maintain development automation tools using [nox](https://github.com/theacodes/nox) that can easily:
* format the code using [yapf](https://github.com/google/yapf) and [ruff](https://github.com/astral-sh/ruff)
* format the code using [ruff](https://github.com/astral-sh/ruff)
* runs linters to find subtle/potential issues with maintainability
* run the test suite on multiple Python versions using [pytest](https://github.com/pytest-dev/pytest)
* maintain Continuous Integration (by using GitHub Actions) that:
Expand Down
1 change: 1 addition & 0 deletions b2sdk/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,5 +10,6 @@
from __future__ import annotations

import b2sdk.version # noqa: E402

__version__ = b2sdk.version.VERSION
assert __version__ # PEP-0396
41 changes: 31 additions & 10 deletions b2sdk/_internal/account_info/abstract.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,8 +142,9 @@ def is_master_key(self) -> bool:
new_style_master_key_suffix = '0000000000'
if account_id == application_key_id:
return True # old style
if len(application_key_id
) == (3 + len(account_id) + len(new_style_master_key_suffix)): # 3 for cluster id
if len(application_key_id) == (
3 + len(account_id) + len(new_style_master_key_suffix)
): # 3 for cluster id
# new style
if application_key_id.endswith(account_id + new_style_master_key_suffix):
return True
Expand Down Expand Up @@ -320,9 +321,17 @@ def set_auth_data(
assert self.allowed_is_valid(allowed)

self._set_auth_data(
account_id, auth_token, api_url, download_url, recommended_part_size,
absolute_minimum_part_size, application_key, realm, s3_api_url, allowed,
application_key_id
account_id,
auth_token,
api_url,
download_url,
recommended_part_size,
absolute_minimum_part_size,
application_key,
realm,
s3_api_url,
allowed,
application_key_id,
)

@classmethod
Expand All @@ -338,15 +347,27 @@ def allowed_is_valid(cls, allowed):
:rtype: bool
"""
return (
('bucketId' in allowed) and ('bucketName' in allowed) and
((allowed['bucketId'] is not None) or (allowed['bucketName'] is None)) and
('capabilities' in allowed) and ('namePrefix' in allowed)
('bucketId' in allowed)
and ('bucketName' in allowed)
and ((allowed['bucketId'] is not None) or (allowed['bucketName'] is None))
and ('capabilities' in allowed)
and ('namePrefix' in allowed)
)

@abstractmethod
def _set_auth_data(
self, account_id, auth_token, api_url, download_url, recommended_part_size,
absolute_minimum_part_size, application_key, realm, s3_api_url, allowed, application_key_id
self,
account_id,
auth_token,
api_url,
download_url,
recommended_part_size,
absolute_minimum_part_size,
application_key,
realm,
s3_api_url,
allowed,
application_key_id,
):
"""
Actually store the auth data. Can assume that 'allowed' is present and valid.
Expand Down
7 changes: 5 additions & 2 deletions b2sdk/_internal/account_info/exception.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ class AccountInfoError(B2Error, metaclass=ABCMeta):
"""
Base class for all account info errors.
"""

pass


Expand All @@ -35,8 +36,10 @@ def __init__(self, file_name):
self.file_name = file_name

def __str__(self):
return f'Account info file ({self.file_name}) appears corrupted. ' \
f'Try removing and then re-authorizing the account.'
return (
f'Account info file ({self.file_name}) appears corrupted. '
f'Try removing and then re-authorizing the account.'
)


class MissingAccountData(AccountInfoError):
Expand Down
14 changes: 12 additions & 2 deletions b2sdk/_internal/account_info/in_memory.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,8 +60,18 @@ def _clear_in_memory_account_fields(self):
self._s3_api_url = None

def _set_auth_data(
self, account_id, auth_token, api_url, download_url, recommended_part_size,
absolute_minimum_part_size, application_key, realm, s3_api_url, allowed, application_key_id
self,
account_id,
auth_token,
api_url,
download_url,
recommended_part_size,
absolute_minimum_part_size,
application_key,
realm,
s3_api_url,
allowed,
application_key_id,
):
self._account_id = account_id
self._application_key_id = application_key_id
Expand Down
46 changes: 28 additions & 18 deletions b2sdk/_internal/account_info/sqlite_account_info.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,9 @@
B2_ACCOUNT_INFO_PROFILE_NAME_REGEXP = re.compile(r'[a-zA-Z0-9_\-]{1,64}')
XDG_CONFIG_HOME_ENV_VAR = 'XDG_CONFIG_HOME'

DEFAULT_ABSOLUTE_MINIMUM_PART_SIZE = 5000000 # this value is used ONLY in migrating db, and in v1 wrapper, it is not
DEFAULT_ABSOLUTE_MINIMUM_PART_SIZE = (
5000000 # this value is used ONLY in migrating db, and in v1 wrapper, it is not
)
# meant to be a default for other applications


Expand Down Expand Up @@ -162,8 +164,13 @@ def _validate_database(self, last_upgrade_to_run=None):
with open(self.filename, 'rb') as f:
data = json.loads(f.read().decode('utf-8'))
keys = [
'account_id', 'application_key', 'account_auth_token', 'api_url',
'download_url', 'minimum_part_size', 'realm'
'account_id',
'application_key',
'account_auth_token',
'api_url',
'download_url',
'minimum_part_size',
'realm',
]
if all(k in data for k in keys):
# remove the json file
Expand All @@ -184,7 +191,7 @@ def _validate_database(self, last_upgrade_to_run=None):
# new column absolute_minimum_part_size = DEFAULT_ABSOLUTE_MINIMUM_PART_SIZE
conn.execute(
insert_statement,
(*(data[k] for k in keys), DEFAULT_ABSOLUTE_MINIMUM_PART_SIZE)
(*(data[k] for k in keys), DEFAULT_ABSOLUTE_MINIMUM_PART_SIZE),
)
# all is happy now
return
Expand Down Expand Up @@ -287,23 +294,24 @@ def _create_tables(self, conn, last_upgrade_to_run):
self._ensure_update(3, ['ALTER TABLE account ADD COLUMN s3_api_url TEXT;'])
if 4 <= last_upgrade_to_run:
self._ensure_update(
4, [
"""
4,
[
f"""
CREATE TABLE
tmp_account (
account_id TEXT NOT NULL,
application_key TEXT NOT NULL,
account_auth_token TEXT NOT NULL,
api_url TEXT NOT NULL,
download_url TEXT NOT NULL,
absolute_minimum_part_size INT NOT NULL DEFAULT {},
absolute_minimum_part_size INT NOT NULL DEFAULT {DEFAULT_ABSOLUTE_MINIMUM_PART_SIZE},
recommended_part_size INT NOT NULL,
realm TEXT NOT NULL,
allowed TEXT,
account_id_or_app_key_id TEXT,
s3_api_url TEXT
);
""".format(DEFAULT_ABSOLUTE_MINIMUM_PART_SIZE),
""",
"""INSERT INTO tmp_account(
account_id,
application_key,
Expand Down Expand Up @@ -373,7 +381,7 @@ def _create_tables(self, conn, last_upgrade_to_run):
FROM tmp_account;
""",
'DROP TABLE tmp_account;',
]
],
)

def _ensure_update(self, update_number, update_commands: list[str]):
Expand All @@ -387,7 +395,7 @@ def _ensure_update(self, update_number, update_commands: list[str]):
conn.execute('BEGIN')
cursor = conn.execute(
'SELECT COUNT(*) AS count FROM update_done WHERE update_number = ?;',
(update_number,)
(update_number,),
)
update_count = cursor.fetchone()[0]
if update_count == 0:
Expand Down Expand Up @@ -433,7 +441,8 @@ def _set_auth_data(
"""

conn.execute(
insert_statement, (
insert_statement,
(
account_id,
application_key_id,
application_key,
Expand All @@ -445,7 +454,7 @@ def _set_auth_data(
realm,
json.dumps(allowed),
s3_api_url,
)
),
)

def set_auth_data_with_schema_0_for_test(
Expand Down Expand Up @@ -480,15 +489,16 @@ def set_auth_data_with_schema_0_for_test(
"""

conn.execute(
insert_statement, (
insert_statement,
(
account_id,
application_key,
auth_token,
api_url,
download_url,
minimum_part_size,
realm,
)
),
)

def get_application_key(self):
Expand Down Expand Up @@ -576,25 +586,25 @@ def _get_account_info_or_raise(self, column_name):
except Exception as e:
logger.exception(
'_get_account_info_or_raise encountered a problem while trying to retrieve "%s"',
column_name
column_name,
)
raise MissingAccountData(str(e))

def refresh_entire_bucket_name_cache(self, name_id_iterable):
with self._get_connection() as conn:
conn.execute('DELETE FROM bucket;')
for (bucket_name, bucket_id) in name_id_iterable:
for bucket_name, bucket_id in name_id_iterable:
conn.execute(
'INSERT INTO bucket (bucket_name, bucket_id) VALUES (?, ?);',
(bucket_name, bucket_id)
(bucket_name, bucket_id),
)

def save_bucket(self, bucket):
with self._get_connection() as conn:
conn.execute('DELETE FROM bucket WHERE bucket_id = ?;', (bucket.id_,))
conn.execute(
'INSERT INTO bucket (bucket_id, bucket_name) VALUES (?, ?);',
(bucket.id_, bucket.name)
(bucket.id_, bucket.name),
)

def remove_bucket_name(self, bucket_name):
Expand Down
1 change: 1 addition & 0 deletions b2sdk/_internal/account_info/upload_url_pool.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,7 @@ class UrlPoolAccountInfo(AbstractAccountInfo):
Implement part of :py:class:`AbstractAccountInfo` for upload URL pool management
with a simple, key-value storage, such as :py:class:`b2sdk.v2.UploadUrlPool`.
"""

# staticmethod is necessary here to avoid the first argument binding to the first argument (like ``partial(fun, arg)``)
BUCKET_UPLOAD_POOL_CLASS = staticmethod(
UploadUrlPool
Expand Down
39 changes: 24 additions & 15 deletions b2sdk/_internal/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,8 @@ def url_for_api(info, api_name):


class Services:
""" Gathers objects that provide high level logic over raw api usage. """
"""Gathers objects that provide high level logic over raw api usage."""

UPLOAD_MANAGER_CLASS = staticmethod(UploadManager)
COPY_MANAGER_CLASS = staticmethod(CopyManager)
DOWNLOAD_MANAGER_CLASS = staticmethod(DownloadManager)
Expand Down Expand Up @@ -133,6 +134,7 @@ class handles several things that simplify the task of uploading
The class also keeps a cache of information needed to access the
service, such as auth tokens and upload URLs.
"""

BUCKET_FACTORY_CLASS = staticmethod(BucketFactory)
BUCKET_CLASS = staticmethod(Bucket)
SESSION_CLASS = staticmethod(B2Session)
Expand Down Expand Up @@ -272,8 +274,12 @@ def create_bucket(
replication=replication,
)
bucket = self.BUCKET_FACTORY_CLASS.from_api_bucket_dict(self, response)
assert name == bucket.name, f'API created a bucket with different name than requested: {name} != {name}'
assert bucket_type == bucket.type_, f'API created a bucket with different type than requested: {bucket_type} != {bucket.type_}'
assert (
name == bucket.name
), f'API created a bucket with different name than requested: {name} != {name}'
assert (
bucket_type == bucket.type_
), f'API created a bucket with different type than requested: {bucket_type} != {bucket.type_}'
self.cache.save_bucket(bucket)
return bucket

Expand Down Expand Up @@ -389,8 +395,9 @@ def delete_bucket(self, bucket):
account_id = self.account_info.get_account_id()
self.session.delete_bucket(account_id, bucket.id_)

def list_buckets(self, bucket_name=None, bucket_id=None, *,
use_cache: bool = False) -> Sequence[Bucket]:
def list_buckets(
self, bucket_name=None, bucket_id=None, *, use_cache: bool = False
) -> Sequence[Bucket]:
"""
Call ``b2_list_buckets`` and return a list of buckets.

Expand Down Expand Up @@ -418,13 +425,14 @@ def list_buckets(self, bucket_name=None, bucket_id=None, *,
cached_list = self.cache.list_bucket_names_ids()
buckets = [
self.BUCKET_CLASS(self, cache_b_id, name=cached_b_name)
for cached_b_name, cache_b_id in cached_list if (
(bucket_name is None or bucket_name == cached_b_name) and
(bucket_id is None or bucket_id == cache_b_id)
for cached_b_name, cache_b_id in cached_list
if (
(bucket_name is None or bucket_name == cached_b_name)
and (bucket_id is None or bucket_id == cache_b_id)
)
]
if buckets:
logger.debug("Using cached bucket list as it is not empty")
logger.debug('Using cached bucket list as it is not empty')
return buckets

account_id = self.account_info.get_account_id()
Expand Down Expand Up @@ -494,8 +502,8 @@ def get_download_url_for_file_name(self, bucket_name, file_name):
:param str file_name: a file name
"""
self.check_bucket_name_restrictions(bucket_name)
return '{}/file/{}/{}'.format(
self.account_info.get_download_url(), bucket_name, b2_url_encode(file_name)
return (
f'{self.account_info.get_download_url()}/file/{bucket_name}/{b2_url_encode(file_name)}'
)

# keys
Expand Down Expand Up @@ -524,7 +532,7 @@ def create_key(
key_name=key_name,
valid_duration_seconds=valid_duration_seconds,
bucket_id=bucket_id,
name_prefix=name_prefix
name_prefix=name_prefix,
)

assert set(response['capabilities']) == set(capabilities)
Expand All @@ -551,8 +559,9 @@ def delete_key_by_id(self, application_key_id: str) -> ApplicationKey:
response = self.session.delete_key(application_key_id=application_key_id)
return ApplicationKey.from_api_response(response)

def list_keys(self, start_application_key_id: str | None = None
) -> Generator[ApplicationKey, None, None]:
def list_keys(
self, start_application_key_id: str | None = None
) -> Generator[ApplicationKey, None, None]:
"""
List application keys. Lazily perform requests to B2 cloud and return all keys.

Expand Down Expand Up @@ -603,7 +612,7 @@ def get_file_info(self, file_id: str) -> FileVersion:

def get_file_info_by_name(self, bucket_name: str, file_name: str) -> DownloadVersion:
"""
Gets info about a file version. Similar to `get_file_info` but
Gets info about a file version. Similar to `get_file_info` but
takes the bucket name and file name instead of file id.

:param str bucket_name: The name of the bucket where the file resides.
Expand Down
Loading
Loading