From 3d9261e6028c34cbe25383b95756e83043ae20ba Mon Sep 17 00:00:00 2001 From: David Butenhof Date: Mon, 28 Aug 2023 19:14:19 -0400 Subject: [PATCH] Fix the SQL constraint violation logic (#3528) * Fix the SQL constraint violation logic PBENCH-1251 This is mostly a fix for the ops review problem discovered Thursday, that our carefully unit-tested logic to decode SQLAlchemy `IntegrityError` instances to identify the specific violated constraint (specifically, duplicate vs null) doesn't work in a production environment. The root problem is that the text strings reported differ between the unit test environment (`sqlite3` engine) and the production environment (`postgresql` engine). In order to consolidate on the central constraint error decoder, (which was why this wasn't done earlier), we need all SQL-related exception constructors to be consistent, but I didn't like dropping all context-specific arguments. Instead I've adopted a `**kwargs` model where the decoder passes through arbitrary parameters, which are captured in the exception to provide context. This also adopts a more "pythonic exception" model where `e.args` captures a statically formatted message string and allows the base `Exception.__str__` to provide formatting. I've done this for all the SQLAlchemy model objects, but nowhere else. And to avoid complication in many places where we use `str(exception)`, I'm not including the `kwargs` or raw `cause` in the `args` tuple. I played with that idea, but since `Exception.__str__` formats the entire tuple, the results are ugly and also more difficult to meaningfully compare in tests. We use `str(exception)` a lot both internally and in tests. --- lib/pbench/server/database/models/__init__.py | 41 ++-- lib/pbench/server/database/models/api_keys.py | 47 ++--- lib/pbench/server/database/models/audit.py | 77 +++----- lib/pbench/server/database/models/datasets.py | 179 +++++++++--------- .../server/database/models/index_map.py | 74 +++----- .../server/database/models/server_settings.py | 91 ++++----- .../server/database/models/templates.py | 91 ++++----- lib/pbench/server/database/models/users.py | 73 +++---- .../unit/server/database/test_audit_db.py | 17 +- .../database/test_datasets_metadata_db.py | 4 +- .../unit/server/database/test_index_map_db.py | 33 +++- .../server/database/test_server_settings.py | 2 +- .../unit/server/database/test_template_db.py | 3 +- .../unit/server/database/test_users_db.py | 13 +- lib/pbench/test/unit/server/test_upload.py | 6 +- 15 files changed, 346 insertions(+), 405 deletions(-) diff --git a/lib/pbench/server/database/models/__init__.py b/lib/pbench/server/database/models/__init__.py index 4240b0698b..60a2e13e7b 100644 --- a/lib/pbench/server/database/models/__init__.py +++ b/lib/pbench/server/database/models/__init__.py @@ -1,5 +1,5 @@ import datetime -from typing import Callable +from typing import Callable, Optional from sqlalchemy import DateTime from sqlalchemy.exc import IntegrityError @@ -54,24 +54,37 @@ def process_result_value(self, value, dialect): return value -def decode_integrity_error( - exception: IntegrityError, on_null: Callable, on_duplicate: Callable +def decode_sql_error( + exception: Exception, + on_null: Callable[[Exception], Exception], + on_duplicate: Callable[[Exception], Exception], + fallback: Optional[Callable[[Exception], Exception]] = None, + **kwargs ) -> Exception: - """Decode a SQLAlchemy IntegrityError to look for a recognizable UNIQUE - or NOT NULL constraint violation. + """Analyze an exception for a SQL constraint violation - Return the original exception if it doesn't match. + Analyzes SQLAlchemy IntegrityException instances for NOT NULL and UNIQUE + KEY constraints, constructing and returning an appropriate exception + instance. If the exception doesn't match a recognized SQL constraint, + construct and return a fallback exception instance if specified or the + original exception. Args: - exception : An IntegrityError to decode + exception: An exception to decode + on_null: Exception class to build if null contraint + on_duplicate: Exception class to build if duplicate constraint + fallback: Exception class to build otherwise + kwargs: additional arguments passed to exception constructors Returns: - a more specific exception, or the original if decoding fails + a more specific exception, or the original if no matches are found and + no fallback template is provided. """ - cause = exception.orig.args[-1] - if "UNIQUE constraint" in cause: - return on_duplicate(cause) - elif "NOT NULL constraint" in cause: - return on_null(cause) - return exception + if isinstance(exception, IntegrityError): + cause = exception.orig.args[-1].lower() + if "unique constraint" in cause: + return on_duplicate(exception, **kwargs) + elif "not null constraint" in cause: + return on_null(exception, **kwargs) + return exception if not fallback else fallback(exception, **kwargs) diff --git a/lib/pbench/server/database/models/api_keys.py b/lib/pbench/server/database/models/api_keys.py index 755bc9771c..29028a6f53 100644 --- a/lib/pbench/server/database/models/api_keys.py +++ b/lib/pbench/server/database/models/api_keys.py @@ -7,7 +7,7 @@ from pbench.server import JSONOBJECT from pbench.server.database.database import Database -from pbench.server.database.models import decode_integrity_error, TZDateTime +from pbench.server.database.models import decode_sql_error, TZDateTime from pbench.server.database.models.users import User # Module private constants @@ -17,31 +17,34 @@ class APIKeyError(Exception): """A base class for errors reported by the APIKey class.""" - def __init__(self, message): - self.message = message + pass - def __str__(self): - return repr(self.message) + +class APIKeySqlError(APIKeyError): + """Report a generic SQL error""" + + def __init__(self, cause: Exception, **kwargs): + super().__init__(f"API key SQL error: '{cause}' {kwargs}") + self.cause = cause + self.kwargs = kwargs class DuplicateApiKey(APIKeyError): """Attempt to commit a duplicate unique value.""" - def __init__(self, cause: str): + def __init__(self, cause: Exception, **kwargs): + super().__init__(f"API key duplicate key error: '{cause}' {kwargs}") self.cause = cause - - def __str__(self) -> str: - return f"Duplicate api_key: {self.cause}" + self.kwargs = kwargs class NullKey(APIKeyError): """Attempt to commit an APIkey row with an empty required column.""" - def __init__(self, cause: str): + def __init__(self, cause: Exception, **kwargs): + super().__init__(f"API key null key error: '{cause}' {kwargs}") self.cause = cause - - def __str__(self) -> str: - return f"Missing required value: {self.cause}" + self.kwargs = kwargs class APIKey(Database.Base): @@ -68,14 +71,14 @@ def add(self): Database.db_session.commit() except Exception as e: Database.db_session.rollback() - self.logger.error("Can't add {} to DB: {}", str(self), str(e)) - decode_exc = decode_integrity_error( - e, on_duplicate=DuplicateApiKey, on_null=NullKey - ) - if decode_exc is e: - raise APIKeyError(str(e)) from e - else: - raise decode_exc from e + raise decode_sql_error( + e, + on_duplicate=DuplicateApiKey, + on_null=NullKey, + fallback=APIKeySqlError, + operation="add", + key=self, + ) from e @staticmethod def query(**kwargs) -> Optional["APIKey"]: @@ -99,7 +102,7 @@ def delete(self): Database.db_session.commit() except Exception as e: Database.db_session.rollback() - raise APIKeyError(f"Error deleting api_key from db : {e}") from e + raise APIKeySqlError(e, operation="delete", key=self) from e def as_json(self) -> JSONOBJECT: """Return a JSON object for this APIkey object. diff --git a/lib/pbench/server/database/models/audit.py b/lib/pbench/server/database/models/audit.py index 9739d2eb45..0fb49a1dd4 100644 --- a/lib/pbench/server/database/models/audit.py +++ b/lib/pbench/server/database/models/audit.py @@ -3,12 +3,12 @@ from typing import Optional from sqlalchemy import Column, Enum, Integer, String -from sqlalchemy.exc import IntegrityError, SQLAlchemyError +from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.sql.sqltypes import JSON from pbench.server import JSONOBJECT, OperationCode from pbench.server.database.database import Database -from pbench.server.database.models import TZDateTime +from pbench.server.database.models import decode_sql_error, TZDateTime from pbench.server.database.models.datasets import Dataset from pbench.server.database.models.users import User @@ -19,6 +19,8 @@ class AuditError(Exception): It is never raised directly, but may be used in "except" clauses. """ + pass + class AuditSqlError(AuditError): """SQLAlchemy errors reported through Audit operations. @@ -27,35 +29,28 @@ class AuditSqlError(AuditError): key; the cause will specify the original SQLAlchemy exception. """ - def __init__(self, operation: str, params: JSONOBJECT, cause: str): - self.operation = operation - self.params = params + def __init__(self, cause: Exception, **kwargs): + super().__init__(f"SQL error on {kwargs}: '{cause}'") self.cause = cause - - def __str__(self) -> str: - return f"Error {self.operation} {self.params!r}: {self.cause}" + self.kwargs = kwargs class AuditDuplicate(AuditError): """Attempt to commit a duplicate unique value.""" - def __init__(self, audit: "Audit", cause: str): - self.audit = audit + def __init__(self, cause: Exception, **kwargs): + super().__init__(f"Duplicate key in {kwargs}: '{cause}'") self.cause = cause - - def __str__(self) -> str: - return f"Duplicate audit setting in {self.audit.as_json()}: {self.cause}" + self.kwargs = kwargs class AuditNullKey(AuditError): """Attempt to commit an Audit row with an empty required column.""" - def __init__(self, audit: "Audit", cause: str): - self.audit = audit + def __init__(self, cause: Exception, **kwargs): + super().__init__(f"Missing required key in {kwargs}: '{cause}'") self.cause = cause - - def __str__(self) -> str: - return f"Missing required key in {self.audit.as_json()}: {self.cause}" + self.kwargs = kwargs class AuditType(enum.Enum): @@ -333,40 +328,11 @@ def query( audit = query.order_by(Audit.timestamp).all() except SQLAlchemyError as e: - args = { - k: v - for k, v in ( - ("start", start), - ("end", end), - ("dataset", dataset), - *kwargs.items(), - ) - if v is not None - } - raise AuditSqlError("finding", args, str(e)) from e + raise AuditSqlError( + e, operation="query", start=start, end=end, dataset=dataset, **kwargs + ) from e return audit - def _decode(self, exception: IntegrityError) -> Exception: - """Decode a SQLAlchemy IntegrityError to look for a recognizable UNIQUE - or NOT NULL constraint violation. - - Return the original exception if it doesn't match. - - Args: - exception : An IntegrityError to decode - - Returns: - a more specific exception, or the original if decoding fails - """ - # Postgres engine returns (code, message) but sqlite3 engine only - # returns (message); so always take the last element. - cause = exception.orig.args[-1] - if "UNIQUE constraint" in cause: - return AuditDuplicate(self, cause) - elif "NOT NULL constraint" in cause: - return AuditNullKey(self, cause) - return exception - def add(self): """Add the Audit object to the database.""" try: @@ -374,9 +340,14 @@ def add(self): Database.db_session.commit() except Exception as e: Database.db_session.rollback() - if isinstance(e, IntegrityError): - raise self._decode(e) from e - raise AuditSqlError("adding", self.as_json(), str(e)) from e + raise decode_sql_error( + e, + on_duplicate=AuditDuplicate, + on_null=AuditNullKey, + fallback=AuditSqlError, + operation="add", + audit=self, + ) from e def as_json(self) -> JSONOBJECT: """Return a JSON object for this Audit object. diff --git a/lib/pbench/server/database/models/datasets.py b/lib/pbench/server/database/models/datasets.py index b7754a8f0c..785b8fcb14 100644 --- a/lib/pbench/server/database/models/datasets.py +++ b/lib/pbench/server/database/models/datasets.py @@ -7,11 +7,11 @@ from dateutil import parser as date_parser from sqlalchemy import Column, Enum, event, ForeignKey, Integer, JSON, String, Text -from sqlalchemy.exc import DataError, IntegrityError, SQLAlchemyError +from sqlalchemy.exc import DataError, SQLAlchemyError from sqlalchemy.orm import Query, relationship, validates from pbench.server.database.database import Database -from pbench.server.database.models import TZDateTime +from pbench.server.database.models import decode_sql_error, TZDateTime from pbench.server.database.models.server_settings import ( OPTION_DATASET_LIFETIME, ServerSetting, @@ -25,51 +25,47 @@ class DatasetError(Exception): It is never raised directly, but may be used in "except" clauses. """ + pass + class DatasetBadName(DatasetError): """Specified filename does not follow Pbench tarball naming rules.""" def __init__(self, name: Path): + super().__init__( + f"File name {name!r} does not end in {Dataset.TARBALL_SUFFIX!r}" + ) self.name: str = str(name) - def __str__(self) -> str: - return f"File name {self.name!r} does not end in {Dataset.TARBALL_SUFFIX!r}" - class DatasetSqlError(DatasetError): """SQLAlchemy errors reported through Dataset operations. The exception will identify the name of the target dataset, along with the - operation being attempted; the __cause__ will specify the original - SQLAlchemy exception. + operation being attempted; the cause is the original SQLAlchemy exception. """ - def __init__(self, operation: str, **kwargs): - self.operation = operation - self.kwargs = [f"{key}={value}" for key, value in kwargs.items()] - - def __str__(self) -> str: - return f"Error {self.operation} dataset {'|'.join(self.kwargs)}" + def __init__(self, cause: Exception, **kwargs): + super().__init__(f"SQL error on {kwargs.get('dataset')}: '{cause}'") + self.cause = cause + self.kwargs = kwargs class DatasetDuplicate(DatasetError): """Attempt to create a Dataset that already exists.""" - def __init__(self, name: str): - self.name = name - - def __str__(self): - return f"Duplicate dataset {self.name!r}" + def __init__(self, cause: Exception, **kwargs): + super().__init__(f"Duplicate dataset {kwargs.get('dataset')}: '{cause}'") + self.cause = cause + self.kwargs = kwargs class DatasetNotFound(DatasetError): """Attempt to locate a Dataset that doesn't exist.""" def __init__(self, **kwargs): - self.kwargs = [f"{key}={value}" for key, value in kwargs.items()] - - def __str__(self) -> str: - return f"No dataset {'|'.join(self.kwargs)}" + super().__init__(f"Dataset not found for {kwargs}") + self.kwargs = kwargs class DatasetBadParameterType(DatasetError): @@ -81,13 +77,13 @@ class DatasetBadParameterType(DatasetError): """ def __init__(self, bad_value: Any, expected_type: Any): - self.bad_value = bad_value self.expected_type = ( expected_type.__name__ if isinstance(expected_type, type) else expected_type ) - - def __str__(self) -> str: - return f'Value "{self.bad_value}" ({type(self.bad_value)}) is not a {self.expected_type}' + super().__init__( + f'Value "{bad_value}" ({type(bad_value).__name__}) is not a {self.expected_type}' + ) + self.bad_value = bad_value class MetadataError(DatasetError): @@ -96,29 +92,21 @@ class MetadataError(DatasetError): It is never raised directly, but may be used in "except" clauses. """ - def __init__(self, dataset: "Dataset", key: str): - self.dataset = dataset - self.key = key - - def __str__(self) -> str: - return f"Generic error on {self.dataset} key {self.key}" + pass class MetadataSqlError(MetadataError): """SQLAlchemy errors reported through Metadata operations. The exception will identify the dataset and the metadata key, along with - the operation being attempted; the __cause__ will specify the original - SQLAlchemy exception. + the operation being attempted; the cause is the original SQLAlchemy + exception. """ - def __init__(self, operation: str, dataset: "Dataset", key: str): - self.operation = operation - super().__init__(dataset, key) - - def __str__(self) -> str: - ds = str(self.dataset) if self.dataset else "no dataset" - return f"Error {self.operation} {ds} key {self.key}" + def __init__(self, cause: Exception, **kwargs): + super().__init__(f"Metadata SQL error '{cause}': {kwargs}") + self.cause = cause + self.kwargs = kwargs class MetadataNotFound(MetadataError): @@ -129,10 +117,9 @@ class MetadataNotFound(MetadataError): """ def __init__(self, dataset: "Dataset", key: str): - super().__init__(dataset, key) - - def __str__(self) -> str: - return f"No metadata {self.key} for {self.dataset}" + super().__init__(f"No metadata {key} for {dataset}") + self.dataset = dataset + self.key = key class MetadataBadStructure(MetadataError): @@ -148,12 +135,13 @@ class MetadataBadStructure(MetadataError): """ def __init__(self, dataset: "Dataset", path: str, element: str): - super().__init__(dataset, path) + super().__init__( + f"Key {element!r} value for {path!r} in {dataset} is not a JSON object" + ) + self.dataset = dataset + self.path = path self.element = element - def __str__(self) -> str: - return f"Key {self.element!r} value for {self.key!r} in {self.dataset} is not a JSON object" - class MetadataBadValue(MetadataError): """An unsupported value was specified for a special metadata key. @@ -176,16 +164,17 @@ def __init__( value: Metadata key value expected: The expected metadata value type """ - super().__init__(dataset, key) + super().__init__( + ( + f"Metadata key {key!r} value {value!r} for dataset " + f"{str(dataset) + ' ' if dataset else ''}must be a {expected}" + ) + ) + self.dataset = dataset + self.key = key self.value = value self.expected = expected - def __str__(self) -> str: - return ( - f"Metadata key {self.key!r} value {self.value!r} for dataset" - f"{' ' + str(self.dataset) if self.dataset else ''} must be a {self.expected}" - ) - class MetadataKeyError(MetadataError): """A base class for metadata key errors in the context of Metadata errors @@ -194,16 +183,16 @@ class MetadataKeyError(MetadataError): It is never raised directly, but may be used in "except" clauses. """ + pass + class MetadataMissingParameter(MetadataKeyError): """A Metadata required parameter was not specified.""" def __init__(self, what: str): + super().__init__(f"Metadata must specify a {what}") self.what = what - def __str__(self) -> str: - return f"Metadata must specify a {self.what}" - class MetadataBadKey(MetadataKeyError): """An unsupported metadata key was specified. @@ -212,15 +201,12 @@ class MetadataBadKey(MetadataKeyError): """ def __init__(self, key: str): + super().__init__(f"Metadata key {key!r} is not supported") self.key = key - def __str__(self) -> str: - return f"Metadata key {self.key!r} is not supported" - class MetadataProtectedKey(MetadataKeyError): - """A metadata key was specified that cannot be modified in the current - context. + """A metadata key cannot be modified in the current context. Usually an internally reserved key that was referenced in an external client API. @@ -229,11 +215,9 @@ class MetadataProtectedKey(MetadataKeyError): """ def __init__(self, key: str): + super().__init__(f"Metadata key {key} cannot be modified by client") self.key = key - def __str__(self) -> str: - return f"Metadata key {self.key} cannot be modified by client" - class MetadataMissingKeyValue(MetadataKeyError): """A value must be specified for the metadata key. @@ -242,11 +226,9 @@ class MetadataMissingKeyValue(MetadataKeyError): """ def __init__(self, key: str): + super().__init__(f"Metadata key {key} value is required") self.key = key - def __str__(self) -> str: - return f"Metadata key {self.key} value is required" - class MetadataDuplicateKey(MetadataError): """An attempt to add a Metadata key that already exists on the dataset. @@ -256,10 +238,9 @@ class MetadataDuplicateKey(MetadataError): """ def __init__(self, dataset: "Dataset", key: str): - super().__init__(dataset, key) - - def __str__(self) -> str: - return f"{self.dataset} already has metadata key {self.key}" + super().__init__(f"{dataset} already has metadata key {key}") + self.dataset = dataset + self.key = key class Dataset(Database.Base): @@ -396,7 +377,7 @@ def query(**kwargs) -> "Dataset": try: dataset = Database.db_session.query(Dataset).filter_by(**kwargs).first() except SQLAlchemyError as e: - raise DatasetSqlError("querying", **kwargs) from e + raise DatasetSqlError(e, operation="querying", **kwargs) from e if dataset is None: raise DatasetNotFound(**kwargs) @@ -446,14 +427,16 @@ def add(self): try: Database.db_session.add(self) Database.db_session.commit() - except IntegrityError as e: - Database.db_session.rollback() - self.logger.warning("Duplicate dataset {}: {}", self.name, e) - raise DatasetDuplicate(self.name) from None except Exception as e: Database.db_session.rollback() - self.logger.error("Can't add {} to DB: {}", str(self), str(e)) - raise DatasetSqlError("adding", name=self.name) from e + raise decode_sql_error( + e, + on_duplicate=DatasetDuplicate, + on_null=DatasetSqlError, + fallback=DatasetSqlError, + operation="add", + dataset=self, + ) from e def update(self): """Update the database row with the modified version of the Dataset @@ -463,8 +446,14 @@ def update(self): Database.db_session.commit() except Exception as e: Database.db_session.rollback() - self.logger.error("Can't update {} in DB: {}", str(self), str(e)) - raise DatasetSqlError("updating", name=self.name) from e + raise decode_sql_error( + e, + on_duplicate=DatasetDuplicate, + on_null=DatasetSqlError, + fallback=DatasetSqlError, + operation="update", + dataset=self, + ) from e def delete(self): """Delete the Dataset from the database.""" @@ -473,8 +462,7 @@ def delete(self): Database.db_session.commit() except Exception as e: Database.db_session.rollback() - self.logger.error("Can't delete {} in DB: {}", str(self), str(e)) - raise DatasetSqlError("updating", name=self.name) from e + raise DatasetSqlError(e, operation="delete", dataset=self) from e class OperationName(enum.Enum): @@ -1091,7 +1079,7 @@ def get(dataset: Dataset, key: str, user: Optional[User] = None) -> "Metadata": meta = __class__._query(dataset, key, user).first() except SQLAlchemyError as e: Metadata.logger.error("Can't get {}>>{} from DB: {}", dataset, key, str(e)) - raise MetadataSqlError("getting", dataset, key) from e + raise MetadataSqlError(e, operation="get", dataset=dataset, key=key) from e else: if meta is None: raise MetadataNotFound(dataset, key) @@ -1115,7 +1103,9 @@ def remove(dataset: Dataset, key: str, user: Optional[User] = None): Metadata.logger.error( "Can't remove {}>>{} from DB: {}", dataset, key, str(e) ) - raise MetadataSqlError("deleting", dataset, key) from e + raise MetadataSqlError( + e, operation="delete", dataset=dataset, key=key + ) from e def __str__(self) -> str: return f"{self.dataset}>>{self.key}" @@ -1138,9 +1128,10 @@ def add(self, dataset: Dataset): Database.db_session.commit() except Exception as e: Database.db_session.rollback() - self.logger.error("Can't add {}>>{} to DB: {}", dataset, self.key, str(e)) dataset.metadatas.remove(self) - raise MetadataSqlError("adding", dataset, self.key) from e + raise MetadataSqlError( + e, operation="add", dataset=dataset, key=self.key + ) from e def update(self): """Update the database with the modified Metadata.""" @@ -1148,8 +1139,9 @@ def update(self): Database.db_session.commit() except Exception as e: Database.db_session.rollback() - self.logger.error("Can't update {} in DB: {}", self, str(e)) - raise MetadataSqlError("updating", self.dataset, self.key) from e + raise MetadataSqlError( + e, operation="update", dataset=self.dataset, key=self.key + ) from e def delete(self): """Remove the Metadata from the database.""" @@ -1158,8 +1150,9 @@ def delete(self): Database.db_session.commit() except Exception as e: Database.db_session.rollback() - self.logger.error("Can't delete {} from DB: {}", self, str(e)) - raise MetadataSqlError("deleting", self.dataset, self.key) from e + raise MetadataSqlError( + e, operation="delete", dataset=self.dataset, key=self.key + ) from e @event.listens_for(Metadata, "init") diff --git a/lib/pbench/server/database/models/index_map.py b/lib/pbench/server/database/models/index_map.py index ad1d6726f6..435965257c 100644 --- a/lib/pbench/server/database/models/index_map.py +++ b/lib/pbench/server/database/models/index_map.py @@ -3,10 +3,11 @@ from typing import Iterator, NewType from sqlalchemy import Column, ForeignKey, Integer, JSON, String -from sqlalchemy.exc import IntegrityError, SQLAlchemyError +from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.orm import relationship from pbench.server.database.database import Database +from pbench.server.database.models import decode_sql_error from pbench.server.database.models.datasets import Dataset @@ -26,30 +27,33 @@ class IndexMapSqlError(IndexMapError): along with the operation being attempted. """ - def __init__(self, operation: str, dataset: Dataset, name: str, cause: str): - self.dataset = dataset.name if dataset else "unknown" - super().__init__(f"Error {operation} index {self.dataset}:{name}: {cause}") - self.operation = operation - self.name = name + def __init__(self, cause: Exception, **kwargs): + super().__init__( + f"Index SQL error on {kwargs.get('operation')} " + f"{kwargs.get('dataset')}:{kwargs.get('name')}: '{cause}'" + ) self.cause = cause + self.kwargs = kwargs class IndexMapDuplicate(IndexMapError): """Attempt to commit a duplicate IndexMap id.""" - def __init__(self, name: str, cause: str): - super().__init__(f"Duplicate index map {name!r}: {cause!r}") - self.name = name + def __init__(self, cause: Exception, **kwargs): + super().__init__(f"Duplicate index map {kwargs.get('name')!r}: '{cause}'") self.cause = cause + self.kwargs = kwargs class IndexMapMissingParameter(IndexMapError): """Attempt to commit a IndexMap with missing parameters.""" - def __init__(self, name: str, cause: str): - super().__init__(f"Missing required parameters in {name!r}: {cause}") - self.name = name + def __init__(self, cause: Exception, **kwargs): + super().__init__( + f"Missing required parameters in {kwargs.get('name')!r}: '{cause}'" + ) self.cause = cause + self.kwargs = kwargs @dataclass @@ -122,7 +126,7 @@ def create(cls, dataset: Dataset, map: IndexMapType): try: Database.db_session.add_all(instances) except Exception as e: - raise IndexMapSqlError("add_all", dataset, "all", e) + raise IndexMapSqlError(e, operation="create", dataset=dataset, name="all") cls.commit(dataset, "create") @@ -199,7 +203,7 @@ def merge(cls, dataset: Dataset, merge_map: IndexMapType): x.extend(merge_map[r][i]) model.documents = x except SQLAlchemyError as e: - raise IndexMapSqlError("merge", dataset, "all", str(e)) + raise IndexMapSqlError(e, operation="merge", dataset=dataset, name="all") cls.commit(dataset, "merge") @@ -224,7 +228,7 @@ def indices(dataset: Dataset, root: str) -> Iterator[str]: .all() ) except SQLAlchemyError as e: - raise IndexMapSqlError("finding", dataset, root, str(e)) + raise IndexMapSqlError(e, operation="indices", dataset=dataset, name=root) return (i.index for i in map) @@ -247,7 +251,7 @@ def exists(dataset: Dataset) -> bool: ) return bool(c) except SQLAlchemyError as e: - raise IndexMapSqlError("checkexist", dataset, "any", str(e)) + raise IndexMapSqlError(e, operation="exists", dataset=dataset, name="any") @staticmethod def stream(dataset: Dataset) -> Iterator[IndexStream]: @@ -270,7 +274,7 @@ def stream(dataset: Dataset) -> Iterator[IndexStream]: .all() ) except SQLAlchemyError as e: - raise IndexMapSqlError("streaming", dataset, "all", str(e)) + raise IndexMapSqlError(e, operation="stream", dataset=dataset, name="all") for m in indices: for id in m.documents: @@ -287,37 +291,19 @@ def __str__(self) -> str: f"{self.dataset.name} [{self.root}:{self.index}]: {len(self.documents)} IDs" ) - @staticmethod - def _decode(index: str, exception: IntegrityError) -> Exception: - """ - Decode a SQLAlchemy IntegrityError to look for a recognizable UNIQUE - or NOT NULL constraint violation. Return the original exception if - it doesn't match. - - Args: - index: The index name - exception: An IntegrityError to decode - - Returns: - a more specific exception, or the original if decoding fails - """ - # Postgres engine returns (code, message) but sqlite3 engine only - # returns (message); so always take the last element. - cause = exception.orig.args[-1] - if "UNIQUE constraint" in cause: - return IndexMapDuplicate(index, cause) - elif "NOT NULL constraint" in cause: - return IndexMapMissingParameter(index, cause) - return exception - @classmethod def commit(cls, dataset: Dataset, operation: str): """Commit changes to the database.""" try: Database.db_session.commit() - except IntegrityError as e: - Database.db_session.rollback() - raise cls._decode("any", e) except Exception as e: Database.db_session.rollback() - raise IndexMapSqlError(operation, dataset, "any", str(e)) + raise decode_sql_error( + e, + on_duplicate=IndexMapDuplicate, + on_null=IndexMapMissingParameter, + fallback=IndexMapSqlError, + operation=operation, + dataset=dataset, + name="all", + ) from e diff --git a/lib/pbench/server/database/models/server_settings.py b/lib/pbench/server/database/models/server_settings.py index 461ca4b478..f99f18398e 100644 --- a/lib/pbench/server/database/models/server_settings.py +++ b/lib/pbench/server/database/models/server_settings.py @@ -11,11 +11,12 @@ from typing import Optional from sqlalchemy import Column, Integer, String -from sqlalchemy.exc import IntegrityError, SQLAlchemyError +from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.sql.sqltypes import JSON from pbench.server import JSONOBJECT, JSONVALUE from pbench.server.database.database import Database +from pbench.server.database.models import decode_sql_error class ServerSettingError(Exception): @@ -24,6 +25,8 @@ class ServerSettingError(Exception): It is never raised directly, but may be used in "except" clauses. """ + pass + class ServerSettingSqlError(ServerSettingError): """SQLAlchemy errors reported through ServerSetting operations. @@ -32,64 +35,57 @@ class ServerSettingSqlError(ServerSettingError): key; the cause will specify the original SQLAlchemy exception. """ - def __init__(self, operation: str, name: str, cause: str): - self.operation = operation - self.name = name + def __init__(self, cause: Exception, **kwargs): + super().__init__( + f"Error on {kwargs.get('operation')} index {kwargs.get('key')!r}: '{cause}'" + ) self.cause = cause - - def __str__(self) -> str: - return f"Error {self.operation} index {self.name!r}: {self.cause}" + self.kwargs = kwargs class ServerSettingDuplicate(ServerSettingError): """Attempt to commit a duplicate ServerSetting.""" - def __init__(self, name: str, cause: str): - self.name = name + def __init__(self, cause: Exception, **kwargs): + super().__init__(f"Duplicate server setting {kwargs.get('key')!r}: '{cause}'") self.cause = cause - - def __str__(self) -> str: - return f"Duplicate server setting {self.name!r}: {self.cause}" + self.kwargs = kwargs class ServerSettingNullKey(ServerSettingError): """Attempt to commit a ServerSetting with an empty key.""" - def __init__(self, name: str, cause: str): - self.name = name + def __init__(self, cause: Exception, **kwargs): + super().__init__(f"Missing key value in {kwargs.get('key')!r}: '{cause}'") self.cause = cause - - def __str__(self) -> str: - return f"Missing key value in {self.name!r}: {self.cause}" + self.kwargs = kwargs class ServerSettingMissingKey(ServerSettingError): """Attempt to set a ServerSetting with a missing key.""" - def __str__(self) -> str: - return "Missing server setting key name" + def __init__(self): + super().__init__("Missing server setting key name") class ServerSettingBadKey(ServerSettingError): """Attempt to commit a ServerSetting with a bad key.""" def __init__(self, key: str): + super().__init__(f"Server setting key {key!r} is unknown") self.key = key - def __str__(self) -> str: - return f"Server setting key {self.key!r} is unknown" - class ServerSettingBadValue(ServerSettingError): """Attempt to assign a bad value to a server setting option.""" def __init__(self, key: str, value: JSONVALUE): + super().__init__( + f"Unsupported value for server setting key {key!r} ({value!r})" + ) self.key = key self.value = value - def __str__(self) -> str: - return f"Unsupported value for server setting key {self.key!r} ({self.value!r})" - # Formal timedelta syntax is "[D day[s], ][H]H:MM:SS[.UUUUUU]"; without an # external package we have no standard way to parse or format timedelta @@ -269,7 +265,7 @@ def get(key: str, use_default: bool = True) -> "ServerSetting": if setting is None and use_default: setting = ServerSetting(key=key, value=__class__._default(key)) except SQLAlchemyError as e: - raise ServerSettingSqlError("finding", key, str(e)) from e + raise ServerSettingSqlError(e, operation="get", key=key) from e return setting @staticmethod @@ -332,27 +328,6 @@ def __str__(self) -> str: """ return f"{self.key}: {self.value!r}" - def _decode(self, exception: IntegrityError) -> Exception: - """Decode a SQLAlchemy IntegrityError to look for a recognizable UNIQUE - or NOT NULL constraint violation. - - Return the original exception if it doesn't match. - - Args: - exception : An IntegrityError to decode - - Returns: - a more specific exception, or the original if decoding fails - """ - # Postgres engine returns (code, message) but sqlite3 engine only - # returns (message); so always take the last element. - cause = exception.orig.args[-1] - if cause.find("UNIQUE constraint") != -1: - return ServerSettingDuplicate(self.key, cause) - elif cause.find("NOT NULL constraint") != -1: - return ServerSettingNullKey(self.key, cause) - return exception - def add(self): """Add the ServerSetting object to the database.""" try: @@ -360,9 +335,14 @@ def add(self): Database.db_session.commit() except Exception as e: Database.db_session.rollback() - if isinstance(e, IntegrityError): - raise self._decode(e) from e - raise ServerSettingSqlError("adding", self.key, str(e)) from e + raise decode_sql_error( + e, + on_duplicate=ServerSettingDuplicate, + on_null=ServerSettingNullKey, + fallback=ServerSettingSqlError, + operation="add", + key=self.key, + ) from e def update(self): """Update the database row with the modified version of the @@ -372,6 +352,11 @@ def update(self): Database.db_session.commit() except Exception as e: Database.db_session.rollback() - if isinstance(e, IntegrityError): - raise self._decode(e) from e - raise ServerSettingSqlError("updating", self.key, str(e)) from e + raise decode_sql_error( + e, + on_duplicate=ServerSettingDuplicate, + on_null=ServerSettingNullKey, + fallback=ServerSettingSqlError, + operation="update", + key=self.key, + ) from e diff --git a/lib/pbench/server/database/models/templates.py b/lib/pbench/server/database/models/templates.py index bd530eee4b..725dcfcbef 100644 --- a/lib/pbench/server/database/models/templates.py +++ b/lib/pbench/server/database/models/templates.py @@ -2,10 +2,11 @@ from pathlib import Path from sqlalchemy import Column, event, Integer, String -from sqlalchemy.exc import IntegrityError, SQLAlchemyError +from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.sql.sqltypes import DateTime, JSON from pbench.server.database.database import Database +from pbench.server.database.models import decode_sql_error class TemplateError(Exception): @@ -14,6 +15,8 @@ class TemplateError(Exception): It is never raised directly, but may be used in "except" clauses. """ + pass + class TemplateSqlError(TemplateError): """SQLAlchemy errors reported through Template operations. @@ -23,55 +26,50 @@ class TemplateSqlError(TemplateError): original SQLAlchemy exception. """ - def __init__(self, operation: str, name: str, cause: str): - self.operation = operation - self.name = name + def __init__(self, cause: Exception, **kwargs): + super().__init__( + f"Error on {kwargs.get('operation')} index {kwargs.get('name')!r}: '{cause}'" + ) self.cause = cause - - def __str__(self) -> str: - return f"Error {self.operation} index {self.name!r}: {self.cause}" + self.kwargs = kwargs class TemplateFileMissing(TemplateError): """Template requires a file name.""" def __init__(self, name: str): + super().__init__(f"Template {name!r} is missing required file") self.name = name - def __str__(self) -> str: - return f"Template {self.name!r} is missing required file" - class TemplateNotFound(TemplateError): """Attempt to find a Template that doesn't exist.""" def __init__(self, name: str): + super().__init__( + f"Document template for index {name!r} not found in the database" + ) self.name = name - def __str__(self) -> str: - return f"Document template for index {self.name!r} not found in the database" - class TemplateDuplicate(TemplateError): """Attempt to commit a duplicate Template.""" - def __init__(self, name: str, cause: str): - self.name = name + def __init__(self, cause: Exception, **kwargs): + super().__init__(f"Duplicate template {kwargs.get('name')!r}: '{cause}'") self.cause = cause - - def __str__(self) -> str: - return f"Duplicate template {self.name!r}: {self.cause}" + self.kwargs = kwargs class TemplateMissingParameter(TemplateError): """Attempt to commit a Template with missing parameters.""" - def __init__(self, name: str, cause: str): - self.name = name + def __init__(self, cause: Exception, **kwargs): + super().__init__( + f"Missing required parameters in {kwargs.get('name')!r}: '{cause}'" + ) self.cause = cause - - def __str__(self) -> str: - return f"Missing required parameters in {self.name!r}: {self.cause}" + self.kwargs = kwargs class Template(Database.Base): @@ -139,7 +137,7 @@ def find(name: str) -> "Template": try: template = Database.db_session.query(Template).filter_by(name=name).first() except SQLAlchemyError as e: - raise TemplateSqlError("finding", name, str(e)) + raise TemplateSqlError(e, operation="find", name=name) if template is None: raise TemplateNotFound(name) @@ -153,38 +151,21 @@ def __str__(self) -> str: """ return f"{self.name}: {self.index_template}" - def _decode(self, exception: IntegrityError) -> Exception: - """Decode a SQLAlchemy IntegrityError to look for a recognizable UNIQUE - or NOT NULL constraint violation. - - Return the original exception if it doesn't match. - - Args: - exception : An IntegrityError to decode - - Returns: - A more specific exception, or the original if decoding fails. - """ - # Postgres engine returns (code, message) but sqlite3 engine only - # returns (message); so always take the last element. - cause = exception.orig.args[-1] - if cause.find("UNIQUE constraint") != -1: - return TemplateDuplicate(self.name, cause) - elif cause.find("NOT NULL constraint") != -1: - return TemplateMissingParameter(self.name, cause) - return exception - def add(self): """Add the Template object to the database.""" try: Database.db_session.add(self) Database.db_session.commit() - except IntegrityError as e: - Database.db_session.rollback() - raise self._decode(e) except Exception as e: Database.db_session.rollback() - raise TemplateSqlError("adding", self.name, str(e)) + raise decode_sql_error( + e, + on_duplicate=TemplateDuplicate, + on_null=TemplateMissingParameter, + fallback=TemplateSqlError, + operation="add", + name=self.name, + ) from e def update(self): """Update the database row with the modified version of the @@ -192,12 +173,16 @@ def update(self): """ try: Database.db_session.commit() - except IntegrityError as e: - Database.db_session.rollback() - raise self._decode(e) except Exception as e: Database.db_session.rollback() - raise TemplateSqlError("updating", self.name, str(e)) + raise decode_sql_error( + e, + on_duplicate=TemplateDuplicate, + on_null=TemplateMissingParameter, + fallback=TemplateSqlError, + operation="update", + name=self.name, + ) from e @event.listens_for(Template, "init") diff --git a/lib/pbench/server/database/models/users.py b/lib/pbench/server/database/models/users.py index 93971735e4..9d284a0e0a 100644 --- a/lib/pbench/server/database/models/users.py +++ b/lib/pbench/server/database/models/users.py @@ -2,11 +2,11 @@ from typing import Optional from sqlalchemy import Column, String -from sqlalchemy.exc import IntegrityError from sqlalchemy.orm import relationship from pbench.server import JSONOBJECT from pbench.server.database.database import Database +from pbench.server.database.models import decode_sql_error class Roles(enum.Enum): @@ -19,6 +19,8 @@ class UserError(Exception): It is never raised directly, but may be used in "except" clauses. """ + pass + class UserSqlError(UserError): """SQLAlchemy errors reported through User operations. @@ -27,35 +29,28 @@ class UserSqlError(UserError): key; the cause will specify the original SQLAlchemy exception. """ - def __init__(self, operation: str, params: JSONOBJECT, cause: str): - self.operation = operation - self.params = params + def __init__(self, cause: Exception, **kwargs): + super().__init__(f"User SQL error: '{cause}'", kwargs) self.cause = cause - - def __str__(self) -> str: - return f"Error {self.operation} {self.params!r}: {self.cause}" + self.kwargs = kwargs class UserDuplicate(UserError): """Attempt to commit a duplicate unique value.""" - def __init__(self, user: "User", cause: str): - self.user = user + def __init__(self, cause: Exception, **kwargs): + super().__init__(f"Duplicate user: '{cause}'", kwargs) self.cause = cause - - def __str__(self) -> str: - return f"Duplicate user entry in {self.user.get_json()}: {self.cause}" + self.kwargs = kwargs class UserNullKey(UserError): """Attempt to commit a User row with an empty required column.""" - def __init__(self, user: "User", cause: str): - self.user = user + def __init__(self, cause: Exception, **kwargs): + super().__init__(f"Missing required key: '{cause}'", kwargs) self.cause = cause - - def __str__(self) -> str: - return f"Missing required key in {self.user.get_json()}: {self.cause}" + self.kwargs = kwargs class User(Database.Base): @@ -82,7 +77,7 @@ def roles(self, value): try: self._roles = ";".join(value) except Exception as e: - raise UserSqlError("Setting role", value, str(e)) from e + raise UserSqlError(e, user=self, operation="setrole", role=value) from e def __str__(self): return f"User, id: {self.id}, username: {self.username}" @@ -95,30 +90,6 @@ def get_json(self) -> JSONOBJECT: """ return {"username": self.username, "id": self.id, "roles": self.roles} - def _decode( - self, exception: IntegrityError, operation: Optional[str] = "" - ) -> Exception: - """Decode a SQLAlchemy IntegrityError to look for a recognizable UNIQUE - or NOT NULL constraint violation. - - Return the original exception if it doesn't match. - - Args: - exception : An IntegrityError to decode - - Returns: - a more specific exception, or the original if decoding fails - """ - # Postgres engine returns (code, message) but sqlite3 engine only - # returns (message); so always take the last element. - cause = exception.orig.args[-1] - if "UNIQUE constraint" in cause: - return UserDuplicate(self, cause) - elif "NOT NULL constraint" in cause: - return UserNullKey(self, cause) - else: - return UserSqlError(operation, self.get_json(), str(exception)) - @staticmethod def query(id: str = None, username: str = None) -> Optional["User"]: """Find a user using one of the provided arguments. @@ -149,7 +120,14 @@ def add(self): Database.db_session.commit() except Exception as e: Database.db_session.rollback() - raise self._decode(e, "adding") from e + raise decode_sql_error( + e, + on_duplicate=UserDuplicate, + on_null=UserNullKey, + fallback=UserSqlError, + user=self, + operation="add", + ) from e def update(self, **kwargs): """Update the current user object with given keyword arguments.""" @@ -159,7 +137,14 @@ def update(self, **kwargs): Database.db_session.commit() except Exception as e: Database.db_session.rollback() - raise self._decode(e, "updating") from e + raise decode_sql_error( + e, + on_duplicate=UserDuplicate, + on_null=UserNullKey, + fallback=UserSqlError, + user=self, + operation="update", + ) from e def is_admin(self) -> bool: """This method checks whether the given user has an admin role. diff --git a/lib/pbench/test/unit/server/database/test_audit_db.py b/lib/pbench/test/unit/server/database/test_audit_db.py index 5fbc26c0b6..e89a859172 100644 --- a/lib/pbench/test/unit/server/database/test_audit_db.py +++ b/lib/pbench/test/unit/server/database/test_audit_db.py @@ -59,9 +59,11 @@ def test_create(self, fake_db): def test_construct_null(self, fake_db): """Test handling of Audit record null value error""" self.session.raise_on_commit = IntegrityError( - statement="", params="", orig=FakeDBOrig("NOT NULL constraint") + statement="", params="", orig=FakeDBOrig("not null constraint") ) - with pytest.raises(AuditNullKey, match="'operation': 'CREATE'"): + with pytest.raises( + AuditNullKey, match=r"Missing required key.*'operation': 'add'" + ): Audit.create(operation=OperationCode.CREATE, status=AuditStatus.BEGIN) self.session.check_session(rolledback=1) @@ -70,7 +72,9 @@ def test_construct_duplicate(self, fake_db): self.session.raise_on_commit = IntegrityError( statement="", params="", orig=FakeDBOrig("UNIQUE constraint") ) - with pytest.raises(AuditDuplicate, match="'id': 1"): + with pytest.raises( + AuditDuplicate, match=r"Duplicate key in.*'operation': 'add'" + ): Audit.create(id=1, operation=OperationCode.CREATE, status=AuditStatus.BEGIN) self.session.check_session(rolledback=1) @@ -79,7 +83,7 @@ def test_construct_error(self, fake_db): self.session.raise_on_commit = DatabaseError( statement="", params="", orig=FakeDBOrig("something else") ) - with pytest.raises(AuditSqlError, match="'id': 1"): + with pytest.raises(AuditSqlError, match=r"SQL error on.*'operation': 'add'"): Audit.create(id=1, operation=OperationCode.CREATE, status=AuditStatus.BEGIN) self.session.check_session(rolledback=1) @@ -139,10 +143,7 @@ def test_override(self, fake_db): def test_exceptional_query(self, fake_db): FakeSession.throw_query = True - with pytest.raises( - AuditSqlError, - match=r"Error finding {'user_name': 'imme'}: \('test', 'because'\)", - ): + with pytest.raises(AuditSqlError, match=r"SQL error on.+'operation': 'query'"): Audit.query(user_name="imme") self.session.reset_context() diff --git a/lib/pbench/test/unit/server/database/test_datasets_metadata_db.py b/lib/pbench/test/unit/server/database/test_datasets_metadata_db.py index e84f9fac05..0ffbe12877 100644 --- a/lib/pbench/test/unit/server/database/test_datasets_metadata_db.py +++ b/lib/pbench/test/unit/server/database/test_datasets_metadata_db.py @@ -256,7 +256,7 @@ def test_get_bad_path(self, attach_dataset): with pytest.raises(MetadataBadStructure) as exc: Metadata.getvalue(ds, "global.contact.email") assert exc.type == MetadataBadStructure - assert exc.value.key == "global.contact.email" + assert exc.value.path == "global.contact.email" assert exc.value.element == "contact" assert ( str(exc.value) @@ -272,7 +272,7 @@ def test_set_bad_path(self, attach_dataset): with pytest.raises(MetadataBadStructure) as exc: Metadata.setvalue(ds, "global.contact.email", "me@example.com") assert exc.type == MetadataBadStructure - assert exc.value.key == "global.contact.email" + assert exc.value.path == "global.contact.email" assert exc.value.element == "contact" assert ( str(exc.value) diff --git a/lib/pbench/test/unit/server/database/test_index_map_db.py b/lib/pbench/test/unit/server/database/test_index_map_db.py index d20c50f306..599beac450 100644 --- a/lib/pbench/test/unit/server/database/test_index_map_db.py +++ b/lib/pbench/test/unit/server/database/test_index_map_db.py @@ -140,21 +140,25 @@ def test_merge_identity(self, db_session, attach_dataset, orig, merge): statement="", params="", orig=BaseException("UNIQUE constraint") ), IndexMapDuplicate, - "Duplicate index map 'any'", + "Duplicate index map ", ), ( IntegrityError( statement="", params="", orig=BaseException("NOT NULL constraint") ), IndexMapMissingParameter, - "Missing required parameters in 'any'", + "Missing required parameters ", ), ( IntegrityError(statement="", params="", orig=BaseException("JUNK")), - IntegrityError, - "(builtins.BaseException) JUNK", + IndexMapSqlError, + "Index SQL error on create (drb)|drb:all: '(builtins.BaseException) JUNK", + ), + ( + Exception("Not me"), + IndexMapSqlError, + "Index SQL error on create (drb)|drb:all", ), - (Exception("Not me"), IndexMapSqlError, "Error create index drb:any"), ), ) def test_commit_error( @@ -211,7 +215,10 @@ def fake_add_all(idx: list[IndexMap]): drb = Dataset.query(name="drb") with pytest.raises(IndexMapSqlError) as e: IndexMap.create(drb, {"root": {"idx": ["id"]}}) - assert str(e.value) == "Error add_all index drb:all: Yeah, that didn't work" + assert ( + str(e.value) + == "Index SQL error on create (drb)|drb:all: 'Yeah, that didn't work'" + ) def test_merge_fail(self, monkeypatch, db_session, attach_dataset): """Test index map creation failure""" @@ -224,7 +231,7 @@ def fake_add(idx: IndexMap): drb = Dataset.query(name="drb") with pytest.raises(IndexMapSqlError) as e: IndexMap.merge(drb, {"root": {"idx": ["id"]}}) - assert str(e.value) == "Error merge index drb:all: That was easy" + assert str(e.value) == "Index SQL error on merge (drb)|drb:all: 'That was easy'" def test_indices_fail(self, monkeypatch, db_session, attach_dataset): """Test index list failure""" @@ -237,7 +244,9 @@ def fake_query(db_type): with pytest.raises(IndexMapSqlError) as e: IndexMap.indices(drb, "idx") - assert str(e.value) == "Error finding index drb:idx: That was easy" + assert ( + str(e.value) == "Index SQL error on indices (drb)|drb:idx: 'That was easy'" + ) def test_exists_fail(self, monkeypatch, db_session, attach_dataset): """Test index existence check failure""" @@ -250,7 +259,9 @@ def fake_query(db_type): with pytest.raises(IndexMapSqlError) as e: IndexMap.exists(drb) - assert str(e.value) == "Error checkexist index drb:any: That was easy" + assert ( + str(e.value) == "Index SQL error on exists (drb)|drb:any: 'That was easy'" + ) def test_stream_fail(self, monkeypatch, db_session, attach_dataset): """Test index stream failure""" @@ -263,4 +274,6 @@ def fake_query(db_type): with pytest.raises(IndexMapSqlError) as e: list(IndexMap.stream(drb)) - assert str(e.value) == "Error streaming index drb:all: That was easy" + assert ( + str(e.value) == "Index SQL error on stream (drb)|drb:all: 'That was easy'" + ) diff --git a/lib/pbench/test/unit/server/database/test_server_settings.py b/lib/pbench/test/unit/server/database/test_server_settings.py index 7323bb4137..c4596f2a7e 100644 --- a/lib/pbench/test/unit/server/database/test_server_settings.py +++ b/lib/pbench/test/unit/server/database/test_server_settings.py @@ -69,7 +69,7 @@ def test_construct_duplicate(self): statement="", params="", orig=FakeDBOrig("UNIQUE constraint") ) ServerSetting.create(key="dataset-lifetime", value=2) - assert str(e).find("dataset-lifetime") != -1 + assert "dataset-lifetime" in str(e) self.session.check_session( committed=[ FakeRow(cls=ServerSetting, id=1, key="dataset-lifetime", value="1") diff --git a/lib/pbench/test/unit/server/database/test_template_db.py b/lib/pbench/test/unit/server/database/test_template_db.py index 366d713b9a..4d74c18c92 100644 --- a/lib/pbench/test/unit/server/database/test_template_db.py +++ b/lib/pbench/test/unit/server/database/test_template_db.py @@ -151,5 +151,4 @@ def test_update_missing(self, fake_mtime, db_session): template.idxname = None with pytest.raises(TemplateMissingParameter) as e: template.update() - assert str(e).find("run") != -1 - assert str(e).find("idxname") != -1 + assert str(e.value).startswith("Missing required parameters in 'run'") diff --git a/lib/pbench/test/unit/server/database/test_users_db.py b/lib/pbench/test/unit/server/database/test_users_db.py index b24751e04c..112a058a65 100644 --- a/lib/pbench/test/unit/server/database/test_users_db.py +++ b/lib/pbench/test/unit/server/database/test_users_db.py @@ -72,14 +72,19 @@ def test_user_survives_dataset_real_session(self, db_session, create_user): def test_construct_duplicate(self, fake_db): """Test handling of User record duplicate value error""" - self.session.raise_on_commit = IntegrityError( - statement="", params="", orig=FakeDBOrig("UNIQUE constraint") + exception = IntegrityError( + statement="", params="", orig=FakeDBOrig("unique constraint") ) + self.session.raise_on_commit = exception with pytest.raises( UserDuplicate, - match="Duplicate user entry in {'username': 'dummy', 'id': [^,]+, 'roles': \\[]}: UNIQUE constraint", - ): + match="Duplicate user", + ) as exc: self.add_dummy_user(fake_db) + assert len(exc.value.args) == 2 + assert exc.value.args[0] == f"Duplicate user: '{exception}'" + assert exc.value.args[1]["operation"] == "add" + assert exc.value.args[1]["user"].username == "dummy" self.session.check_session(rolledback=1) self.session.reset_context() diff --git a/lib/pbench/test/unit/server/test_upload.py b/lib/pbench/test/unit/server/test_upload.py index c96d6a0202..1130b5620f 100644 --- a/lib/pbench/test/unit/server/test_upload.py +++ b/lib/pbench/test/unit/server/test_upload.py @@ -745,7 +745,9 @@ def test_upload_metadata_error( def setvalue( dataset: Dataset, key: str, value: Any, user: Optional[User] = None ): - raise MetadataSqlError("test", dataset, key) + raise MetadataSqlError( + Exception("fake"), operation="test", dataset=dataset, key=key + ) monkeypatch.setattr(Metadata, "setvalue", setvalue) @@ -761,7 +763,7 @@ def setvalue( assert len(audit) == 2 fails = audit[1].attributes["failures"] assert isinstance(fails, dict) - assert fails["server.benchmark"].startswith("Error test ") + assert fails["server.benchmark"].startswith("Metadata SQL error 'fake': ") @pytest.mark.freeze_time("1970-01-01") def test_upload_archive(self, client, pbench_drb_token, server_config, tarball):