diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 9d7ab6592..047a59855 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -1,8 +1,9 @@ +# push will run on every pushed commit to any branch (so this will rerun the tests +# once a branch gets merged to main in addition to any new commits on any branch) +on: push + name: CI -on: - push: - pull_request: - types: [opened, reopened] + concurrency: group: '${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}' cancel-in-progress: true diff --git a/.secrets.baseline b/.secrets.baseline index 9aa531c19..445f02f3a 100644 --- a/.secrets.baseline +++ b/.secrets.baseline @@ -121,7 +121,7 @@ "filename": ".github/workflows/ci.yaml", "hashed_secret": "3e26d6750975d678acb8fa35a0f69237881576b0", "is_verified": false, - "line_number": 13 + "line_number": 14 } ], "deployment/scripts/postgresql/postgresql_init.sql": [ @@ -422,5 +422,5 @@ } ] }, - "generated_at": "2024-07-25T17:19:58Z" + "generated_at": "2024-07-26T20:11:41Z" } diff --git a/Dockerfile b/Dockerfile index f103e44f9..7a5d6881c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -41,7 +41,7 @@ RUN poetry config virtualenvs.create false \ COPY . /$appname COPY ./deployment/uwsgi/uwsgi.ini /etc/uwsgi/uwsgi.ini COPY ./deployment/uwsgi/wsgi.py /$appname/wsgi.py -COPY clear_prometheus_multiproc /$appname/clear_prometheus_multiproc +COPY ./deployment/scripts/metrics/setup_prometheus /$appname/setup_prometheus # install fence RUN poetry config virtualenvs.create false \ diff --git a/README.md b/README.md index fc4ef7025..8bcbe8b74 100644 --- a/README.md +++ b/README.md @@ -128,11 +128,21 @@ Whereas pre-signed URL is a cloud-agnostic solution, services and tools on Googl See [Fence and Google](docs/additional_documentation/google_architecture.md) for more details on data access methods specific to Google. - ## Setup See detailed explanation [here](docs/additional_documentation/setup.md) +## Unit testing + +The easiest way to ensure your environment is getting set up for tests the same +way the CI/CD is by using the setup and test run script the CI/CD uses. + +You can run unit tests (which are run with pytest behind the scenes) like this: + +```shell +bash ./tests/ci_commands_script.sh +``` + ## Additional documentation 1. [Terminologies](docs/additional_documentation/terminology.md) diff --git a/deployment/scripts/metrics/setup_prometheus b/deployment/scripts/metrics/setup_prometheus new file mode 100755 index 000000000..66c031669 --- /dev/null +++ b/deployment/scripts/metrics/setup_prometheus @@ -0,0 +1,33 @@ +#!/usr/bin/env bash +# This script is called by: +# UWSGI during startup, this gets moved to the /fence directory in the Dockerfile +# - It prepares the prometheus_multiproc_dir folder to store the metrics from separate uwsgi workers (per PID) +# run.py +# - So local runs setup necessary environment vars and folders for prometheus metrics +# Test framework in conftest +# - So test runs setup necessary environment vars and folders for prometheus metrics + +# Usage: +# ./script_name.sh [DIR] [true] + +# Default directory if no argument is provided +DIR=${1:-/var/tmp/prometheus_metrics} + +# Determine whether to wipe the directory (default is to wipe) +SETUP_DIR=${2:-true} + +set -ex + +if [[ "$SETUP_DIR" == "true" ]]; then + echo "setting up $PROMETHEUS_MULTIPROC_DIR. clearing existing files, ensuring it exists, chmod 755" + rm -Rf "$DIR" + mkdir -p "$DIR" + chmod 755 "$DIR" +fi + +if id -u nginx &>/dev/null; then + chown "$(id -u nginx)":"$(id -g nginx)" "$DIR" +fi + +export PROMETHEUS_MULTIPROC_DIR="$DIR" +echo "PROMETHEUS_MULTIPROC_DIR is $PROMETHEUS_MULTIPROC_DIR" diff --git a/deployment/uwsgi/uwsgi.ini b/deployment/uwsgi/uwsgi.ini index 0ebedb7ce..e88b0cd3b 100644 --- a/deployment/uwsgi/uwsgi.ini +++ b/deployment/uwsgi/uwsgi.ini @@ -28,8 +28,7 @@ pythonpath = /usr/local/src/* # metrics setup stats = 127.0.0.1:9191 stats-http = true -env = prometheus_multiproc_dir=/var/tmp/uwsgi_flask_metrics -exec-asap = /fence/clear_prometheus_multiproc /var/tmp/uwsgi_flask_metrics +exec-asap = /fence/setup_prometheus /var/tmp/prometheus_metrics # Initialize application in worker processes, not master. This prevents the # workers from all trying to open the same database connections at startup. diff --git a/docs/additional_documentation/fence_create.md b/docs/additional_documentation/fence_create.md index 398544f98..f1f750143 100644 --- a/docs/additional_documentation/fence_create.md +++ b/docs/additional_documentation/fence_create.md @@ -53,7 +53,7 @@ curl --request POST https://FENCE_URL/oauth2/token?grant_type=client_credentials The optional `--expires-in` parameter allows specifying the number of *days* until this client expires. The recommendation is to rotate credentials with the `client_credentials` grant at least once a year (see [Rotate client credentials](#rotate-client-credentials) section). -NOTE: In Gen3, you can grant specific access to a client the same way you would to a user. See the [user.yaml guide](https://github.com/uc-cdis/fence/blob/master/docs/user.yaml_guide.md) for more details. +NOTE: In Gen3, you can grant specific access to a client the same way you would to a user. See the [user.yaml guide](./user.yaml_guide.md) for more details. NOTE: Client credentials tokens are not linked to a user (the claims contain no `sub` or `context.user.name` like other tokens). Some Gen3 endpoints that assume the token is linked to a user, or whose logic require there being a user, do not support them. For an example of how to adapt an endpoint to support client credentials tokens, see [here](https://github.com/uc-cdis/requestor/commit/a5078fae27fa258ac78045cf2bb89cb2104f53cf). For an example of how to explicitly reject client credentials tokens, see [here](https://github.com/uc-cdis/requestor/commit/0f4974c25343d2185c7cdb48dcdeb58f97800672). diff --git a/docs/additional_documentation/setup.md b/docs/additional_documentation/setup.md index 5dcafc37b..cb94efb0c 100644 --- a/docs/additional_documentation/setup.md +++ b/docs/additional_documentation/setup.md @@ -154,4 +154,4 @@ saved by the OAuth client to use with ## Quickstart with Helm You can now deploy individual services via Helm! -Please refer to the Helm quickstart guide HERE (https://github.com/uc-cdis/fence/blob/master/docs/quickstart_helm.md) +Please refer to the Helm quickstart guide [HERE](./quickstart_helm.md) diff --git a/docs/additional_documentation/user.yaml_guide.md b/docs/additional_documentation/user.yaml_guide.md index 893d32045..418d7407c 100644 --- a/docs/additional_documentation/user.yaml_guide.md +++ b/docs/additional_documentation/user.yaml_guide.md @@ -16,7 +16,7 @@ The `user.yaml` file is one way to get authorization information into Gen3. It is ingested via [Fence's `usersync` script](usersync.md). The format of this file is tightly coupled with the notions of resource, role and policy as defined by Gen3's policy engine, [Arborist](https://github.com/uc-cdis/arborist#arborist). -For Gen3 Data Commons that do not use Arborist or that use the Google Data Access method of [Google Service Account Registration](https://github.com/uc-cdis/fence/blob/master/docs/google_architecture.md#google-account-linking-and-service-account-registration), refer to the [Deprecated format](#deprecated-format) section. +For Gen3 Data Commons that do not use Arborist or that use the Google Data Access method of [Google Service Account Registration](./google_architecture.md#google-account-linking-and-service-account-registration), refer to the [Deprecated format](#deprecated-format) section. In a fully deployed Gen3 Commons using [Cloud Automation](https://github.com/uc-cdis/cloud-automation), the `user.yaml` file is usually hosted in S3 and configured via the `global.useryaml_s3path` setting of the Gen3 Data Commons manifest: ``` diff --git a/docs/azure/azure_devops_pipeline.md b/docs/azure/azure_devops_pipeline.md index 7e7be9fd8..1be2c9209 100755 --- a/docs/azure/azure_devops_pipeline.md +++ b/docs/azure/azure_devops_pipeline.md @@ -1,16 +1,16 @@ # Azure DevOps Build Pipeline -The purpose of this [Azure DevOps Pipeline](../../azure-devops-pipeline.yaml) is to build `fence`, run a test suite, and then push the `fence` container into an [Azure Container Registry](https://docs.microsoft.com/en-us/azure/container-registry/container-registry-get-started-portal). +The purpose of this [Azure DevOps Pipeline](../../azure-devops-pipeline.yaml) is to build `fence`, run a test suite, and then push the `fence` container into an [Azure Container Registry](https://learn.microsoft.com/en-us/azure/container-registry/container-registry-get-started-portal). ## Getting Started -If you don't already have access, you can use the free sign up with [Azure Devops](https://docs.microsoft.com/en-us/azure/devops/pipelines/get-started/pipelines-sign-up?view=azure-devops). +If you don't already have access, you can use the free sign up with [Azure Devops](https://learn.microsoft.com/en-us/azure/devops/pipelines/get-started/pipelines-sign-up?view=azure-devops). -You can also import the [pipeline](../../azure-devops-pipeline.yaml), see these [doc notes](https://docs.microsoft.com/en-us/azure/devops/pipelines/get-started/clone-import-pipeline?view=azure-devops&tabs=yaml#export-and-import-a-pipeline) as a guide. +You can also import the [pipeline](../../azure-devops-pipeline.yaml), see these [doc notes](https://learn.microsoft.com/en-us/azure/devops/pipelines/get-started/clone-import-pipeline?view=azure-devops&tabs=yaml#export-and-import-a-pipeline) as a guide. ### Setup Azure Container Registry -[Create a Service Principal](https://docs.microsoft.com/en-us/cli/azure/create-an-azure-service-principal-azure-cli#password-based-authentication) in your Azure Subscription using [Azure CLI](https://docs.microsoft.com/en-us/cli/azure/install-azure-cli). +[Create a Service Principal](https://learn.microsoft.com/en-us/cli/azure/create-an-azure-service-principal-azure-cli#password-based-authentication) in your Azure Subscription using [Azure CLI](https://learn.microsoft.com/en-us/cli/azure/install-azure-cli). First, log into `az` cli: @@ -36,7 +36,7 @@ spTenantId=$(echo $spObject | jq -r ".tenant") > You will need to have appropriate permissions in the AAD directory. If you don't have access, please work with your Azure Subscription administrator to obtain a Service Principal. -You can also create an **Azure Container Registry** using [azure cli](https://docs.microsoft.com/en-us/azure/container-registry/container-registry-get-started-azure-cli) or the [portal](https://docs.microsoft.com/en-us/azure/container-registry/container-registry-get-started-portal). +You can also create an **Azure Container Registry** using [azure cli](https://learn.microsoft.com/en-us/azure/container-registry/container-registry-get-started-azure-cli) or the [portal](https://learn.microsoft.com/en-us/azure/container-registry/container-registry-get-started-portal). You can use the following `az` cli commands in `bash` for reference: @@ -45,7 +45,7 @@ az group create --name myResourceGroup --location eastus az acr create --resource-group myResourceGroup --name myContainerRegistry --sku Basic ``` -Also, make sure that the **Service Principal** has rights to the [Azure Container Registry](https://docs.microsoft.com/en-us/azure/container-registry/container-registry-roles?tabs=azure-cli) to **acrPull** and **acrPush**. +Also, make sure that the **Service Principal** has rights to the [Azure Container Registry](https://learn.microsoft.com/en-us/azure/container-registry/container-registry-roles?tabs=azure-cli) to **acrPull** and **acrPush**. ```bash acrResourceId="$(az acr show -n myContainerRegistry -g myResourceGroup --query "id" -o tsv)" @@ -65,7 +65,7 @@ az login --service-principal --username "$spClientId" --password "$spPassword" - az acr login --name myContainerRegistry ``` -You can also verify that this service principal will have `ACRPush` and `ACRPull` permission with ACR, which you can check how the [getting started with docker guide](https://docs.microsoft.com/en-us/azure/container-registry/container-registry-get-started-docker-cli?tabs=azure-cli) for more details. +You can also verify that this service principal will have `ACRPush` and `ACRPull` permission with ACR, which you can check how the [getting started with docker guide](https://learn.microsoft.com/en-us/azure/container-registry/container-registry-get-started-docker-cli?tabs=azure-cli) for more details. First, pull and tag an image: @@ -99,13 +99,13 @@ az acr repository list --name mycontainerregistry You can set the variables on your **Azure DevOps pipeline**. -First, make sure you have already [imported your Azure DevOps Pipeline](https://docs.microsoft.com/en-us/azure/devops/pipelines/get-started/clone-import-pipeline?view=azure-devops&tabs=yaml#export-and-import-a-pipeline). +First, make sure you have already [imported your Azure DevOps Pipeline](https://learn.microsoft.com/en-us/azure/devops/pipelines/get-started/clone-import-pipeline?view=azure-devops&tabs=yaml#export-and-import-a-pipeline). Click on the pipeline and then click edit, which will let you update the variables in the Azure DevOps pipeline: ![Click on Variables](azure_devops_pipeline_config_1.png) -Variable Name | Description +Variable Name | Description ------ | ------ SP_CLIENT_ID | This is your Service Principal Client ID. SP_CLIENT_PASS | This is your Service Principal Password. You can override this value when running the Azure DevOps pipeline. @@ -121,4 +121,4 @@ After updating the variables, be sure to click **save**: You can run the pipeline to validate the `fence` build and push to ACR. -![Run the pipeline](azure_devops_pipeline_config_3.png) \ No newline at end of file +![Run the pipeline](azure_devops_pipeline_config_3.png) diff --git a/fence/__init__.py b/fence/__init__.py index 31cb76eda..15c38f189 100755 --- a/fence/__init__.py +++ b/fence/__init__.py @@ -27,7 +27,7 @@ ) from fence.auth import logout, build_redirect_url -from fence.metrics import metrics +from fence.metrics import Metrics from fence.blueprints.data.indexd import S3IndexedFileLocation from fence.blueprints.login.utils import allowed_login_redirects, domain from fence.errors import UserError @@ -97,6 +97,8 @@ def app_init( logger.info( f"Prometheus metrics are{'' if config['ENABLE_PROMETHEUS_METRICS'] else ' NOT'} enabled." ) + # Initialize the Metrics instance + app.metrics = Metrics(enabled=config["ENABLE_PROMETHEUS_METRICS"]) def app_sessions(app): @@ -207,7 +209,7 @@ def metrics_endpoint(): /!\ There is no authz control on this endpoint! In cloud-automation setups, access to this endpoint is blocked at the revproxy level. """ - data, content_type = metrics.get_latest_metrics() + data, content_type = flask.current_app.metrics.get_latest_metrics() return flask.Response(data, content_type=content_type) diff --git a/fence/blueprints/data/indexd.py b/fence/blueprints/data/indexd.py index 380fcd43e..63f966246 100755 --- a/fence/blueprints/data/indexd.py +++ b/fence/blueprints/data/indexd.py @@ -49,7 +49,6 @@ from fence.resources.ga4gh.passports import sync_gen3_users_authz_from_ga4gh_passports from fence.resources.audit.utils import enable_audit_logging from fence.utils import get_valid_expiration_from_request -from fence.metrics import metrics from . import multipart_upload from ...models import AssumeRoleCacheAWS, query_for_user, query_for_user_by_id @@ -210,7 +209,7 @@ def _log_signed_url_data_info( f"acl={acl} authz={authz} bucket={bucket} user_sub={user_sub} client_id={client_id}" ) - metrics.add_signed_url_event( + current_app.metrics.add_signed_url_event( action, protocol, acl, diff --git a/fence/blueprints/login/base.py b/fence/blueprints/login/base.py index 08fcab61d..17304c1cb 100644 --- a/fence/blueprints/login/base.py +++ b/fence/blueprints/login/base.py @@ -7,7 +7,6 @@ from fence.blueprints.login.redirect import validate_redirect from fence.config import config from fence.errors import UserError -from fence.metrics import metrics logger = get_logger(__name__) @@ -134,7 +133,7 @@ def get(self): def post_login(self, user=None, token_result=None, **kwargs): prepare_login_log(self.idp_name) - metrics.add_login_event( + flask.current_app.metrics.add_login_event( user_sub=flask.g.user.id, idp=self.idp_name, fence_idp=flask.session.get("fence_idp"), diff --git a/fence/config-default.yaml b/fence/config-default.yaml index 5e43e21dc..010230737 100755 --- a/fence/config-default.yaml +++ b/fence/config-default.yaml @@ -727,7 +727,7 @@ INDEXD_PASSWORD: '' # - Support Azure Blob Data Access Methods # ////////////////////////////////////////////////////////////////////////////////////// -# https://docs.microsoft.com/en-us/azure/storage/common/storage-account-keys-manage?toc=%2Fazure%2Fstorage%2Fblobs%2Ftoc.json&tabs=azure-portal#view-account-access-keys +# https://learn.microsoft.com/en-us/azure/storage/common/storage-account-keys-manage?toc=%2Fazure%2Fstorage%2Fblobs%2Ftoc.json&tabs=azure-portal#view-account-access-keys # AZ_BLOB_CREDENTIALS: 'fake connection string' AZ_BLOB_CREDENTIALS: diff --git a/fence/metrics.py b/fence/metrics.py index acdb200a9..d92aa8ed1 100644 --- a/fence/metrics.py +++ b/fence/metrics.py @@ -1,18 +1,20 @@ """ Metrics are collected by the Prometheus client and exposed at the `/metrics` endpoint. +This defines a class which can be extended and instantiated at the web app-level. For flask, this is +stored on the `app` object. + To add a new metric: - Add a new method to the `Metrics` class below (see `add_login_event` and `add_signed_url_event` for example). - The new method should call the `_increment_counter` and/or `_set_gauge` methods with the appropriate metric name and labels. - Call the new method from the code where relevant, for example: - from fence.metric import metrics - metrics.add_login_event(...) + from flask import current_app + current_app.metrics.add_login_event(...) - Add unit tests to the `tests/test_metrics` file. """ - import os import pathlib @@ -26,24 +28,48 @@ CONTENT_TYPE_LATEST, ) -from fence.config import config - logger = get_logger(__name__) -class Metrics: +class Metrics(object): """ Class to handle Prometheus metrics + Attributes: - registry (CollectorRegistry): Prometheus registry - metrics (dict): Dictionary to store Prometheus metrics + enabled (bool): If this is false, the class functions will be no-ops (no operations), effectively + doing nothing. This is the behavior when metrics are disabled. Why? So application code + doesn't have to check, it always tries to log a metric. + + _registry (CollectorRegistry): Prometheus registry + _metrics (dict): Dictionary to store Prometheus metrics + _counter_descriptions (dict): { "counter_name": "Description" } + _gauge_descriptions (dict): { "gauge_name": "Description" } """ - def __init__(self, prometheus_dir="/var/tmp/uwsgi_flask_metrics"): + def __init__(self, enabled=True, prometheus_dir="/var/tmp/prometheus_metrics"): + """ + Create a metrics class. + + Args: + enabled (bool): If this is false, the class functions will be no-ops (no operations), effectively + doing nothing. This is the behavior when metrics are disabled. Why? So application code + doesn't have to check, it always tries to log a metric. + prometheus_dir (str): Directory to use when setting PROMETHEUS_MULTIPROC_DIR env var (which prometheus requires + for multiprocess metrics collection). Note that this the prometheus client is very + finicky about when the ENV var is set. + """ + self.enabled = enabled + if not enabled: + return + pathlib.Path(prometheus_dir).mkdir(parents=True, exist_ok=True) os.environ["PROMETHEUS_MULTIPROC_DIR"] = prometheus_dir + logger.info( + f"PROMETHEUS_MULTIPROC_DIR is {os.environ['PROMETHEUS_MULTIPROC_DIR']}" + ) + self._registry = CollectorRegistry() multiprocess.MultiProcessCollector(self._registry) self._metrics = {} @@ -67,8 +93,8 @@ def get_latest_metrics(self): """ # When metrics gathering is not enabled, the metrics endpoint should not error, but it should # not return any data. - if not config["ENABLE_PROMETHEUS_METRICS"]: - return "", None + if not self.enabled: + return "", CONTENT_TYPE_LATEST return generate_latest(self._registry), CONTENT_TYPE_LATEST @@ -88,7 +114,7 @@ def _increment_counter(self, name, labels): f"Creating counter '{name}' with description '{description}' and labels: {labels}" ) self._metrics[name] = Counter(name, description, [*labels.keys()]) - elif type(self._metrics[name]) != Counter: + elif type(self._metrics[name]) is not Counter: raise ValueError( f"Trying to create counter '{name}' but a {type(self._metrics[name])} with this name already exists" ) @@ -113,7 +139,7 @@ def _set_gauge(self, name, labels, value): f"Creating gauge '{name}' with description '{description}' and labels: {labels}" ) self._metrics[name] = Gauge(name, description, [*labels.keys()]) - elif type(self._metrics[name]) != Gauge: + elif type(self._metrics[name]) is not Gauge: raise ValueError( f"Trying to create gauge '{name}' but a {type(self._metrics[name])} with this name already exists" ) @@ -125,7 +151,7 @@ def add_login_event(self, user_sub, idp, fence_idp, shib_idp, client_id): """ Record a login event """ - if not config["ENABLE_PROMETHEUS_METRICS"]: + if not self.enabled: return self._increment_counter( "gen3_fence_login", @@ -164,7 +190,7 @@ def add_signed_url_event( """ Record a signed URL event """ - if not config["ENABLE_PROMETHEUS_METRICS"]: + if not self.enabled: return self._increment_counter( "gen3_fence_presigned_url", @@ -193,7 +219,3 @@ def add_signed_url_event( }, size_in_kibibytes, ) - - -# Initialize the Metrics instance -metrics = Metrics() diff --git a/fence/resources/openid/microsoft_oauth2.py b/fence/resources/openid/microsoft_oauth2.py index 916a4a2b1..c4649f558 100755 --- a/fence/resources/openid/microsoft_oauth2.py +++ b/fence/resources/openid/microsoft_oauth2.py @@ -6,7 +6,7 @@ class MicrosoftOauth2Client(Oauth2ClientBase): client for interacting with microsoft oauth 2, as openid connect is supported under oauth2 - Docs at https://docs.microsoft.com/en-us/azure/active-directory/develop/v2-protocols-oidc + Docs at https://learn.microsoft.com/en-us/azure/active-directory/develop/v2-protocols-oidc """ diff --git a/poetry.lock b/poetry.lock index b3ea2a211..7636cc50c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -250,17 +250,17 @@ files = [ [[package]] name = "boto3" -version = "1.34.147" +version = "1.34.151" description = "The AWS SDK for Python" optional = false python-versions = ">=3.8" files = [ - {file = "boto3-1.34.147-py3-none-any.whl", hash = "sha256:e1cef9a1a301866bcdee32ae0c699465eb2345f9a8e613a5835821430165ff6d"}, - {file = "boto3-1.34.147.tar.gz", hash = "sha256:9ec1c6ab22588242a47549f51a63dfc7c21fdf95a94820fc6e629ab060c38bd9"}, + {file = "boto3-1.34.151-py3-none-any.whl", hash = "sha256:35bc76faacf1667d3fbb66c1966acf2230ef26206557efc26d9d9d79337bef43"}, + {file = "boto3-1.34.151.tar.gz", hash = "sha256:30498a76b6f651ee2af7ae8edc1704379279ab8b91f1a8dd1f4ddf51259b0bc2"}, ] [package.dependencies] -botocore = ">=1.34.147,<1.35.0" +botocore = ">=1.34.151,<1.35.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -269,13 +269,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.34.147" +version = "1.34.151" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.34.147-py3-none-any.whl", hash = "sha256:be94a2f4874b1d1705cae2bd512c475047497379651678593acb6c61c50d91de"}, - {file = "botocore-1.34.147.tar.gz", hash = "sha256:2e8f000b77e4ca345146cb2edab6403769a517b564f627bb084ab335417f3dbe"}, + {file = "botocore-1.34.151-py3-none-any.whl", hash = "sha256:9018680d7d4a8060c26d127ceec5ab5b270879f423ea39b863d8a46f3e34c404"}, + {file = "botocore-1.34.151.tar.gz", hash = "sha256:0d0968e427a94378f295b49d59170dad539938487ec948de3d030f06092ec6dc"}, ] [package.dependencies] @@ -351,18 +351,18 @@ files = [ [[package]] name = "cdispyutils" -version = "2.0.1" +version = "2.1.0" description = "This package includes several utility Python tools for the Gen3 stack." optional = false -python-versions = ">=3.6,<4.0" +python-versions = "<4.0,>=3.9" files = [ - {file = "cdispyutils-2.0.1-py3-none-any.whl", hash = "sha256:9a269014c657c87830e00d9b581280bfbe57a8708bbf3e0cf21a141d3810ab06"}, - {file = "cdispyutils-2.0.1.tar.gz", hash = "sha256:b6bfef5b3c77afe1d7705124d021eb579b500f9fcc07a66dc0f8fe8d130e6c23"}, + {file = "cdispyutils-2.1.0-py3-none-any.whl", hash = "sha256:ca1310ebb7e1b971d183823a8294e1dc9d8d55f862aa3c83e0feac7125364308"}, + {file = "cdispyutils-2.1.0.tar.gz", hash = "sha256:1c87830ea1d537f8479364b9473cca037f5ac8906e874471920f69622e3a4431"}, ] [package.dependencies] -cdiserrors = ">=1.0.0,<2.0.0" -cryptography = ">=3.2" +cdiserrors = "*" +cryptography = "*" Flask = "*" PyJWT = "*" requests = "*" @@ -1023,13 +1023,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-api-python-client" -version = "2.137.0" +version = "2.139.0" description = "Google API Client Library for Python" optional = false python-versions = ">=3.7" files = [ - {file = "google_api_python_client-2.137.0-py2.py3-none-any.whl", hash = "sha256:a8b5c5724885e5be9f5368739aa0ccf416627da4ebd914b410a090c18f84d692"}, - {file = "google_api_python_client-2.137.0.tar.gz", hash = "sha256:e739cb74aac8258b1886cb853b0722d47c81fe07ad649d7f2206f06530513c04"}, + {file = "google_api_python_client-2.139.0-py2.py3-none-any.whl", hash = "sha256:1850a92505d91a82e2ca1635ab2b8dff179f4b67082c2651e1db332e8039840c"}, + {file = "google_api_python_client-2.139.0.tar.gz", hash = "sha256:ed4bc3abe2c060a87412465b4e8254620bbbc548eefc5388e2c5ff912d36a68b"}, ] [package.dependencies] @@ -1387,13 +1387,13 @@ files = [ [[package]] name = "importlib-metadata" -version = "8.1.0" +version = "8.2.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-8.1.0-py3-none-any.whl", hash = "sha256:3cd29f739ed65973840b068e3132135ce954c254d48b5b640484467ef7ab3c8c"}, - {file = "importlib_metadata-8.1.0.tar.gz", hash = "sha256:fcdcb1d5ead7bdf3dd32657bb94ebe9d2aabfe89a19782ddc32da5041d6ebfb4"}, + {file = "importlib_metadata-8.2.0-py3-none-any.whl", hash = "sha256:11901fa0c2f97919b288679932bb64febaeacf289d18ac84dd68cb2e74213369"}, + {file = "importlib_metadata-8.2.0.tar.gz", hash = "sha256:72e8d4399996132204f9a16dcc751af254a48f8d1b20b9ff0f98d4a8f901e73d"}, ] [package.dependencies] @@ -1761,22 +1761,22 @@ testing = ["google-api-core (>=1.31.5)"] [[package]] name = "protobuf" -version = "5.27.2" +version = "5.27.3" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "protobuf-5.27.2-cp310-abi3-win32.whl", hash = "sha256:354d84fac2b0d76062e9b3221f4abbbacdfd2a4d8af36bab0474f3a0bb30ab38"}, - {file = "protobuf-5.27.2-cp310-abi3-win_amd64.whl", hash = "sha256:0e341109c609749d501986b835f667c6e1e24531096cff9d34ae411595e26505"}, - {file = "protobuf-5.27.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a109916aaac42bff84702fb5187f3edadbc7c97fc2c99c5ff81dd15dcce0d1e5"}, - {file = "protobuf-5.27.2-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:176c12b1f1c880bf7a76d9f7c75822b6a2bc3db2d28baa4d300e8ce4cde7409b"}, - {file = "protobuf-5.27.2-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:b848dbe1d57ed7c191dfc4ea64b8b004a3f9ece4bf4d0d80a367b76df20bf36e"}, - {file = "protobuf-5.27.2-cp38-cp38-win32.whl", hash = "sha256:4fadd8d83e1992eed0248bc50a4a6361dc31bcccc84388c54c86e530b7f58863"}, - {file = "protobuf-5.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:610e700f02469c4a997e58e328cac6f305f649826853813177e6290416e846c6"}, - {file = "protobuf-5.27.2-cp39-cp39-win32.whl", hash = "sha256:9e8f199bf7f97bd7ecebffcae45ebf9527603549b2b562df0fbc6d4d688f14ca"}, - {file = "protobuf-5.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:7fc3add9e6003e026da5fc9e59b131b8f22b428b991ccd53e2af8071687b4fce"}, - {file = "protobuf-5.27.2-py3-none-any.whl", hash = "sha256:54330f07e4949d09614707c48b06d1a22f8ffb5763c159efd5c0928326a91470"}, - {file = "protobuf-5.27.2.tar.gz", hash = "sha256:f3ecdef226b9af856075f28227ff2c90ce3a594d092c39bee5513573f25e2714"}, + {file = "protobuf-5.27.3-cp310-abi3-win32.whl", hash = "sha256:dcb307cd4ef8fec0cf52cb9105a03d06fbb5275ce6d84a6ae33bc6cf84e0a07b"}, + {file = "protobuf-5.27.3-cp310-abi3-win_amd64.whl", hash = "sha256:16ddf3f8c6c41e1e803da7abea17b1793a97ef079a912e42351eabb19b2cffe7"}, + {file = "protobuf-5.27.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:68248c60d53f6168f565a8c76dc58ba4fa2ade31c2d1ebdae6d80f969cdc2d4f"}, + {file = "protobuf-5.27.3-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:b8a994fb3d1c11156e7d1e427186662b64694a62b55936b2b9348f0a7c6625ce"}, + {file = "protobuf-5.27.3-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:a55c48f2a2092d8e213bd143474df33a6ae751b781dd1d1f4d953c128a415b25"}, + {file = "protobuf-5.27.3-cp38-cp38-win32.whl", hash = "sha256:043853dcb55cc262bf2e116215ad43fa0859caab79bb0b2d31b708f128ece035"}, + {file = "protobuf-5.27.3-cp38-cp38-win_amd64.whl", hash = "sha256:c2a105c24f08b1e53d6c7ffe69cb09d0031512f0b72f812dd4005b8112dbe91e"}, + {file = "protobuf-5.27.3-cp39-cp39-win32.whl", hash = "sha256:c84eee2c71ed83704f1afbf1a85c3171eab0fd1ade3b399b3fad0884cbcca8bf"}, + {file = "protobuf-5.27.3-cp39-cp39-win_amd64.whl", hash = "sha256:af7c0b7cfbbb649ad26132e53faa348580f844d9ca46fd3ec7ca48a1ea5db8a1"}, + {file = "protobuf-5.27.3-py3-none-any.whl", hash = "sha256:8572c6533e544ebf6899c360e91d6bcbbee2549251643d32c52cf8a5de295ba5"}, + {file = "protobuf-5.27.3.tar.gz", hash = "sha256:82460903e640f2b7e34ee81a947fdaad89de796d324bcbc38ff5430bcdead82c"}, ] [[package]] @@ -1908,13 +1908,13 @@ files = [ [[package]] name = "pyjwt" -version = "2.8.0" +version = "2.9.0" description = "JSON Web Token implementation in Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, - {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, + {file = "PyJWT-2.9.0-py3-none-any.whl", hash = "sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850"}, + {file = "pyjwt-2.9.0.tar.gz", hash = "sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c"}, ] [package.dependencies] @@ -1922,8 +1922,8 @@ cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"cryp [package.extras] crypto = ["cryptography (>=3.4.0)"] -dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] -docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] @@ -2268,57 +2268,55 @@ files = [ [[package]] name = "sqlalchemy" -version = "1.4.52" +version = "1.4.53" description = "Database Abstraction Library" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "SQLAlchemy-1.4.52-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:f68016f9a5713684c1507cc37133c28035f29925c75c0df2f9d0f7571e23720a"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24bb0f81fbbb13d737b7f76d1821ec0b117ce8cbb8ee5e8641ad2de41aa916d3"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e93983cc0d2edae253b3f2141b0a3fb07e41c76cd79c2ad743fc27eb79c3f6db"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:84e10772cfc333eb08d0b7ef808cd76e4a9a30a725fb62a0495877a57ee41d81"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:427988398d2902de042093d17f2b9619a5ebc605bf6372f7d70e29bde6736842"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-win32.whl", hash = "sha256:1296f2cdd6db09b98ceb3c93025f0da4835303b8ac46c15c2136e27ee4d18d94"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-win_amd64.whl", hash = "sha256:80e7f697bccc56ac6eac9e2df5c98b47de57e7006d2e46e1a3c17c546254f6ef"}, - {file = "SQLAlchemy-1.4.52-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2f251af4c75a675ea42766880ff430ac33291c8d0057acca79710f9e5a77383d"}, - {file = "SQLAlchemy-1.4.52-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb8f9e4c4718f111d7b530c4e6fb4d28f9f110eb82e7961412955b3875b66de0"}, - {file = "SQLAlchemy-1.4.52-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afb1672b57f58c0318ad2cff80b384e816735ffc7e848d8aa51e0b0fc2f4b7bb"}, - {file = "SQLAlchemy-1.4.52-cp311-cp311-win32.whl", hash = "sha256:6e41cb5cda641f3754568d2ed8962f772a7f2b59403b95c60c89f3e0bd25f15e"}, - {file = "SQLAlchemy-1.4.52-cp311-cp311-win_amd64.whl", hash = "sha256:5bed4f8c3b69779de9d99eb03fd9ab67a850d74ab0243d1be9d4080e77b6af12"}, - {file = "SQLAlchemy-1.4.52-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:49e3772eb3380ac88d35495843daf3c03f094b713e66c7d017e322144a5c6b7c"}, - {file = "SQLAlchemy-1.4.52-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:618827c1a1c243d2540314c6e100aee7af09a709bd005bae971686fab6723554"}, - {file = "SQLAlchemy-1.4.52-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de9acf369aaadb71a725b7e83a5ef40ca3de1cf4cdc93fa847df6b12d3cd924b"}, - {file = "SQLAlchemy-1.4.52-cp312-cp312-win32.whl", hash = "sha256:763bd97c4ebc74136ecf3526b34808c58945023a59927b416acebcd68d1fc126"}, - {file = "SQLAlchemy-1.4.52-cp312-cp312-win_amd64.whl", hash = "sha256:f12aaf94f4d9679ca475975578739e12cc5b461172e04d66f7a3c39dd14ffc64"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:853fcfd1f54224ea7aabcf34b227d2b64a08cbac116ecf376907968b29b8e763"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f98dbb8fcc6d1c03ae8ec735d3c62110949a3b8bc6e215053aa27096857afb45"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e135fff2e84103bc15c07edd8569612ce317d64bdb391f49ce57124a73f45c5"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5b5de6af8852500d01398f5047d62ca3431d1e29a331d0b56c3e14cb03f8094c"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3491c85df263a5c2157c594f54a1a9c72265b75d3777e61ee13c556d9e43ffc9"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-win32.whl", hash = "sha256:427c282dd0deba1f07bcbf499cbcc9fe9a626743f5d4989bfdfd3ed3513003dd"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-win_amd64.whl", hash = "sha256:ca5ce82b11731492204cff8845c5e8ca1a4bd1ade85e3b8fcf86e7601bfc6a39"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:29d4247313abb2015f8979137fe65f4eaceead5247d39603cc4b4a610936cd2b"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a752bff4796bf22803d052d4841ebc3c55c26fb65551f2c96e90ac7c62be763a"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7ea11727feb2861deaa293c7971a4df57ef1c90e42cb53f0da40c3468388000"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d913f8953e098ca931ad7f58797f91deed26b435ec3756478b75c608aa80d139"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a251146b921725547ea1735b060a11e1be705017b568c9f8067ca61e6ef85f20"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-win32.whl", hash = "sha256:1f8e1c6a6b7f8e9407ad9afc0ea41c1f65225ce505b79bc0342159de9c890782"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-win_amd64.whl", hash = "sha256:346ed50cb2c30f5d7a03d888e25744154ceac6f0e6e1ab3bc7b5b77138d37710"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:4dae6001457d4497736e3bc422165f107ecdd70b0d651fab7f731276e8b9e12d"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5d2e08d79f5bf250afb4a61426b41026e448da446b55e4770c2afdc1e200fce"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bbce5dd7c7735e01d24f5a60177f3e589078f83c8a29e124a6521b76d825b85"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:bdb7b4d889631a3b2a81a3347c4c3f031812eb4adeaa3ee4e6b0d028ad1852b5"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c294ae4e6bbd060dd79e2bd5bba8b6274d08ffd65b58d106394cb6abbf35cf45"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-win32.whl", hash = "sha256:bcdfb4b47fe04967669874fb1ce782a006756fdbebe7263f6a000e1db969120e"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-win_amd64.whl", hash = "sha256:7d0dbc56cb6af5088f3658982d3d8c1d6a82691f31f7b0da682c7b98fa914e91"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:a551d5f3dc63f096ed41775ceec72fdf91462bb95abdc179010dc95a93957800"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ab773f9ad848118df7a9bbabca53e3f1002387cdbb6ee81693db808b82aaab0"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2de46f5d5396d5331127cfa71f837cca945f9a2b04f7cb5a01949cf676db7d1"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7027be7930a90d18a386b25ee8af30514c61f3852c7268899f23fdfbd3107181"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99224d621affbb3c1a4f72b631f8393045f4ce647dd3262f12fe3576918f8bf3"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-win32.whl", hash = "sha256:c124912fd4e1bb9d1e7dc193ed482a9f812769cb1e69363ab68e01801e859821"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-win_amd64.whl", hash = "sha256:2c286fab42e49db23c46ab02479f328b8bdb837d3e281cae546cc4085c83b680"}, - {file = "SQLAlchemy-1.4.52.tar.gz", hash = "sha256:80e63bbdc5217dad3485059bdf6f65a7d43f33c8bde619df5c220edf03d87296"}, + {file = "SQLAlchemy-1.4.53-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:b61ac5457d91b5629a3dea2b258deb4cdd35ac8f6fa2031d2b9b2fff5b3396da"}, + {file = "SQLAlchemy-1.4.53-cp310-cp310-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a96aa8d425047551676b0e178ddb0683421e78eda879ab55775128b2e612cae"}, + {file = "SQLAlchemy-1.4.53-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e10ac36f0b994235c13388b39598bf27219ec8bdea5be99bdac612b01cbe525"}, + {file = "SQLAlchemy-1.4.53-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:437592b341a3229dd0443c9c803b0bf0a466f8f539014fef6cdb9c06b7edb7f9"}, + {file = "SQLAlchemy-1.4.53-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:784272ceb5eb71421fea9568749bcbe8bd019261a0e2e710a7efa76057af2499"}, + {file = "SQLAlchemy-1.4.53-cp310-cp310-win32.whl", hash = "sha256:122d7b5722df1a24402c6748bbb04687ef981493bb559d0cc0beffe722e0e6ed"}, + {file = "SQLAlchemy-1.4.53-cp310-cp310-win_amd64.whl", hash = "sha256:4604d42b2abccba266d3f5bbe883684b5df93e74054024c70d3fbb5eea45e530"}, + {file = "SQLAlchemy-1.4.53-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fb8e15dfa47f5de11ab073e12aadd6b502cfb7ac4bafd18bd18cfd1c7d13dbbc"}, + {file = "SQLAlchemy-1.4.53-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc8be4df55e8fde3006d9cb1f6b3df2ba26db613855dc4df2c0fcd5ec15cb3b7"}, + {file = "SQLAlchemy-1.4.53-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86b11640251f9a9789fd96cd6e5d176b1c230230c70ad40299bcbcc568451b4c"}, + {file = "SQLAlchemy-1.4.53-cp311-cp311-win32.whl", hash = "sha256:cd534c716f86bdf95b7b984a34ee278c91d1b1d7d183e7e5ff878600b1696046"}, + {file = "SQLAlchemy-1.4.53-cp311-cp311-win_amd64.whl", hash = "sha256:6dd06572872ca13ef5a90306a3e5af787498ddaa17fb00109b1243642646cd69"}, + {file = "SQLAlchemy-1.4.53-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:2774c24c405136c3ef472e2352bdca7330659d481fbf2283f996c0ef9eb90f22"}, + {file = "SQLAlchemy-1.4.53-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68a614765197b3d13a730d631a78c3bb9b3b72ba58ed7ab295d58d517464e315"}, + {file = "SQLAlchemy-1.4.53-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d13d4dfbc6e52363886b47cf02cf68c5d2a37c468626694dc210d7e97d4ad330"}, + {file = "SQLAlchemy-1.4.53-cp312-cp312-win32.whl", hash = "sha256:197065b91456574d70b6459bfa62bc0b52a4960a29ef923c375ec427274a3e05"}, + {file = "SQLAlchemy-1.4.53-cp312-cp312-win_amd64.whl", hash = "sha256:421306c4b936b0271a3ce2dc074928d5ece4a36f9c482daa5770f44ecfc3a883"}, + {file = "SQLAlchemy-1.4.53-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:13fc34b35d8ddb3fbe3f8fcfdf6c2546e676187f0fb20f5774da362ddaf8fa2d"}, + {file = "SQLAlchemy-1.4.53-cp36-cp36m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:626be971ff89541cfd3e70b54be00b57a7f8557204decb6223ce0428fec058f3"}, + {file = "SQLAlchemy-1.4.53-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:991e42fdfec561ebc6a4fae7161a86d129d6069fa14210b96b8dd752afa7059c"}, + {file = "SQLAlchemy-1.4.53-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:95123f3a1e0e8020848fd32ba751db889a01a44e4e4fef7e58c87ddd0b2fca59"}, + {file = "SQLAlchemy-1.4.53-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c58e011e9e6373b3a091d83f20601fb335a3b4bace80bfcb914ac168aad3b70d"}, + {file = "SQLAlchemy-1.4.53-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:670c7769bf5dcae9aff331247b5d82fe635c63731088a46ce68ba2ba519ef36e"}, + {file = "SQLAlchemy-1.4.53-cp37-cp37m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07ba54f09033d387ae9df8d62cbe211ed7304e0bfbece1f8c55e21db9fae5c11"}, + {file = "SQLAlchemy-1.4.53-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a38834b4c183c33daf58544281395aad2e985f0b47cca1e88ea5ada88344e63"}, + {file = "SQLAlchemy-1.4.53-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:616492f5315128a847f293a7c552f3561ac7e996d2aa5dc46bef4fb0d3781f1d"}, + {file = "SQLAlchemy-1.4.53-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0cf8c0af9563892c6632f7343bc393dfce6eeef8e4d10c5fadba9c0390520bd"}, + {file = "SQLAlchemy-1.4.53-cp37-cp37m-win32.whl", hash = "sha256:c05fe05941424c2f3747a8952381b7725e24cba2ca00141380e54789d5b616b6"}, + {file = "SQLAlchemy-1.4.53-cp37-cp37m-win_amd64.whl", hash = "sha256:93e90aa3e3b2f8e8cbae4d5509f8e0cf82972378d323c740a8df1c1e9f484172"}, + {file = "SQLAlchemy-1.4.53-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:9d7368df54d3ed45a18955f6cec38ebe075290594ac0d5c87a8ddaff7e10de27"}, + {file = "SQLAlchemy-1.4.53-cp38-cp38-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89d8ac4158ef68eea8bb0f6dd0583127d9aa8720606964ba8eee20b254f9c83a"}, + {file = "SQLAlchemy-1.4.53-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16bb9fa4d00b4581b14d9f0e2224dc7745b854aa4687738279af0f48f7056c98"}, + {file = "SQLAlchemy-1.4.53-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4fe5168d0249c23f537950b6d75935ff2709365a113e29938a979aec36668ecf"}, + {file = "SQLAlchemy-1.4.53-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b8608d162d3bd29d807aab32c3fb6e2f8e225a43d1c54c917fed38513785380"}, + {file = "SQLAlchemy-1.4.53-cp38-cp38-win32.whl", hash = "sha256:a9d4d132198844bd6828047135ce7b887687c92925049a2468a605fc775c7a1a"}, + {file = "SQLAlchemy-1.4.53-cp38-cp38-win_amd64.whl", hash = "sha256:c15d1f1fcf1f9bec0499ae1d9132b950fcc7730f2d26d10484c8808b4e077816"}, + {file = "SQLAlchemy-1.4.53-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:edf094a20a386ff2ec73de65ef18014b250259cb860edc61741e240ca22d6981"}, + {file = "SQLAlchemy-1.4.53-cp39-cp39-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83a9c3514ff19d9d30d8a8d378b24cd1dfa5528d20891481cb5f196117db6a48"}, + {file = "SQLAlchemy-1.4.53-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eaaeedbceb4dfd688fff2faf25a9a87a391f548811494f7bff7fa701b639abc3"}, + {file = "SQLAlchemy-1.4.53-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d021699b9007deb7aa715629078830c99a5fec2753d9bdd5ff33290d363ef755"}, + {file = "SQLAlchemy-1.4.53-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0465b8a68f8f4de754c1966c45b187ac784ad97bc9747736f913130f0e1adea0"}, + {file = "SQLAlchemy-1.4.53-cp39-cp39-win32.whl", hash = "sha256:5f67b9e9dcac3241781e96575468d55a42332157dee04bdbf781df573dff5f85"}, + {file = "SQLAlchemy-1.4.53-cp39-cp39-win_amd64.whl", hash = "sha256:a8c2f2a0b2c4e3b86eb58c9b6bb98548205eea2fba9dae4edfd29dc6aebbe95a"}, + {file = "SQLAlchemy-1.4.53.tar.gz", hash = "sha256:5e6ab710c4c064755fd92d1a417bef360228a19bdf0eee32b03aa0f5f8e9fe0d"}, ] [package.dependencies] @@ -2329,17 +2327,17 @@ aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] asyncio = ["greenlet (!=0.4.17)"] asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)", "mariadb (>=1.0.1,!=1.1.2)"] mssql = ["pyodbc"] -mssql-pymssql = ["pymssql"] -mssql-pyodbc = ["pyodbc"] +mssql-pymssql = ["pymssql", "pymssql"] +mssql-pyodbc = ["pyodbc", "pyodbc"] mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] -mysql-connector = ["mysql-connector-python"] +mysql-connector = ["mysql-connector-python", "mysql-connector-python"] oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"] postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] -postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] +postgresql-asyncpg = ["asyncpg", "asyncpg", "greenlet (!=0.4.17)", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)", "pg8000 (>=1.16.6,!=1.29.0)"] postgresql-psycopg2binary = ["psycopg2-binary"] postgresql-psycopg2cffi = ["psycopg2cffi"] pymysql = ["pymysql", "pymysql (<1)"] @@ -2578,4 +2576,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", [metadata] lock-version = "2.0" python-versions = ">=3.9,<4.0.0" -content-hash = "d003418dcc0d68257a215186d21776941f8739bd3b4f898762a93e7895d7c89e" +content-hash = "2e59635abbdda081e891347ab5c488ffa09c8fd50e057544b181b774e950dad8" diff --git a/pyproject.toml b/pyproject.toml index 143ca2940..a464f1314 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "fence" -version = "10.1.0" +version = "10.2.0" description = "Gen3 AuthN/AuthZ OIDC Service" authors = ["CTDS UChicago "] license = "Apache-2.0" @@ -21,7 +21,7 @@ botocore = "*" cached_property = "^1.5.1" cdiserrors = "<2.0.0" cdislogging = "^1.0.0" -cdispyutils = "^2.0.1" +cdispyutils = ">=2.1.0" flask = ">=3.0.0" cryptography = ">=42.0.5" flask-cors = ">=3.0.3" diff --git a/tests/ci_commands_script.sh b/tests/ci_commands_script.sh index fef98a668..e1009e829 100755 --- a/tests/ci_commands_script.sh +++ b/tests/ci_commands_script.sh @@ -1,5 +1,20 @@ #!/usr/bin/env bash -mkdir -p /var/tmp/uwsgi_flask_metrics/ || true -export PROMETHEUS_MULTIPROC_DIR="/var/tmp/uwsgi_flask_metrics/" -poetry run pytest -vv --cov=fence --cov-report xml tests +# Get the directory of the current script +SCRIPT_DIR="$(dirname "$(realpath "$0")")" + +# assumes Fence repo folder structure +RELATIVE_PATH="../deployment/scripts/metrics/setup_prometheus" +METRICS_SETUP_SCRIPT_PATH="$(realpath "$SCRIPT_DIR/$RELATIVE_PATH")" + +echo "The full path to the METRICS_SETUP_SCRIPT_PATH file is: $METRICS_SETUP_SCRIPT_PATH" + +# Check if the script exists and is executable +if [ -x "$METRICS_SETUP_SCRIPT_PATH" ]; then + echo "Running $METRICS_SETUP_SCRIPT_PATH" + source "$METRICS_SETUP_SCRIPT_PATH" /var/tmp/prometheus_metrics +else + echo "$METRICS_SETUP_SCRIPT_PATH does not exist or is not executable. Attempting test run anyway..." +fi + +poetry run pytest -vv --cov=fence --cov-report xml diff --git a/tests/test-fence-config.yaml b/tests/test-fence-config.yaml index 38ccbd147..1529cf5a1 100755 --- a/tests/test-fence-config.yaml +++ b/tests/test-fence-config.yaml @@ -520,7 +520,7 @@ ARBORIST: '/arborist' # - Support Azure Blob Data Access Methods # ////////////////////////////////////////////////////////////////////////////////////// -# https://docs.microsoft.com/en-us/azure/storage/common/storage-account-keys-manage?toc=%2Fazure%2Fstorage%2Fblobs%2Ftoc.json&tabs=azure-portal#view-account-access-keys +# https://learn.microsoft.com/en-us/azure/storage/common/storage-account-keys-manage?toc=%2Fazure%2Fstorage%2Fblobs%2Ftoc.json&tabs=azure-portal#view-account-access-keys AZ_BLOB_CREDENTIALS: 'fake connection string' # AZ_BLOB_CONTAINER_URL: 'https://storageaccount.blob.core.windows.net/container/' diff --git a/tests/test_metrics.py b/tests/test_metrics.py index be7d6b2ab..cd105650e 100644 --- a/tests/test_metrics.py +++ b/tests/test_metrics.py @@ -32,9 +32,9 @@ from unittest.mock import ANY, MagicMock, patch import fence -from fence.metrics import metrics from fence.config import config from fence.blueprints.data.indexd import get_bucket_from_urls +from fence.metrics import Metrics from fence.models import User from fence.resources.audit.utils import _clean_authorization_request_url from tests import utils @@ -709,13 +709,25 @@ def test_login_log_push_to_sqs( ###################### -def test_disabled_prometheus_metrics(client, monkeypatch): +@pytest.fixture(scope="function") +def disable_metrics_app(app): + """ + temporarily disable metrics on the session-scoped app for this function + """ + enabled_metrics = app.metrics + app.metrics = Metrics(enabled=False) + + yield app + + app.metrics = enabled_metrics + + +def test_disabled_prometheus_metrics(disable_metrics_app, client, monkeypatch): """ When metrics gathering is not enabled, the metrics endpoint should not error, but it should not return any data. """ - monkeypatch.setitem(config, "ENABLE_PROMETHEUS_METRICS", False) - metrics.add_login_event( + disable_metrics_app.metrics.add_login_event( user_sub="123", idp="test_idp", fence_idp="shib", @@ -727,7 +739,7 @@ def test_disabled_prometheus_metrics(client, monkeypatch): assert resp.text == "" -def test_record_prometheus_events(prometheus_metrics_before, client): +def test_record_prometheus_events(app, prometheus_metrics_before, client): """ Validate the returned value of the metrics endpoint before any event is logged, after an event is logged, and after more events (one identical to the 1st one, and two different) are logged. @@ -741,7 +753,7 @@ def test_record_prometheus_events(prometheus_metrics_before, client): # record a login event and check that we get both a metric for the specific IDP, and an # IDP-agnostic metric for the total number of login events. The latter should have no IDP # information (no `fence_idp` or `shib_idp`). - metrics.add_login_event( + app.metrics.add_login_event( user_sub="123", idp="test_idp", fence_idp="shib", @@ -777,7 +789,7 @@ def test_record_prometheus_events(prometheus_metrics_before, client): assert_prometheus_metrics(prometheus_metrics_before, resp.text, expected_metrics) # same login: should increase the existing counter by 1 - metrics.add_login_event( + app.metrics.add_login_event( user_sub="123", idp="test_idp", fence_idp="shib", @@ -785,7 +797,7 @@ def test_record_prometheus_events(prometheus_metrics_before, client): client_id="test_azp", ) # login with different IDP labels: should create a new metric - metrics.add_login_event( + app.metrics.add_login_event( user_sub="123", idp="another_idp", fence_idp=None, @@ -793,7 +805,7 @@ def test_record_prometheus_events(prometheus_metrics_before, client): client_id="test_azp", ) # new signed URL event: should create a new metric - metrics.add_signed_url_event( + app.metrics.add_signed_url_event( action="upload", protocol="s3", acl=None,