diff --git a/.gitignore b/.gitignore index 4dece8ab..f296a46b 100644 --- a/.gitignore +++ b/.gitignore @@ -160,3 +160,6 @@ cython_debug/ .idea/ .pdm-python + +.envrc +.direnv/ diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 5083ce07..c1732742 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -134,15 +134,15 @@ Alternatively, you'll want to run PostgreSQL locally or in a container, and run You'll need to have the following environment variables set. | Environment Variable | Type | Default | Description | |---------------------------|------|---------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------| -| `AUTH0_DOMAIN` | str | "vipyrsec.us.auth0.com" | Authentication domain for Auth0 | -| `AUTH0_AUDIENCE` | str | "dragonfly.vipyrsec.com" | Audience field for Auth0 | +| `DRAGONFLY_AUTH0_DOMAIN` | str | "vipyrsec.us.auth0.com" | Authentication domain for Auth0 | +| `DRAGONFLY_AUTH0_AUDIENCE` | str | "dragonfly.vipyrsec.com" | Audience field for Auth0 | | `DRAGONFLY_GITHUB_TOKEN` | str | | Github PAT for accessing YARA rules in the security-intelligence repository | -| `JOB_TIMEOUT` | int | 60 \* 2 | The maximum time to wait for clients to respond with job results. After this time has elapsed, the server will begin distributing this job to other clients | +| `DRAGONFLY_JOB_TIMEOUT` | int | 60 \* 2 | The maximum time to wait for clients to respond with job results. After this time has elapsed, the server will begin distributing this job to other clients | | | | | | -| `REPORTER_URL` | str | "" | The url of the reporter microservice | -| `DB_URL` | str | "postgresql+psycopg2://postgres:postgres@localhost:5432" | PostgreSQL database connection string | -| `DB_CONNECTION_POOL_MAX_SIZE` | int | 15 | The max number of concurrent database connections | -| `DB_CONNECTION_POOL_PERSISTENT_SIZE` | int | 5 | The number of concurrent database connections to maintain in the connection pool | +| `DRAGONFLY_REPORTER_URL` | str | "" | The url of the reporter microservice | +| `DRAGONFLY_DB_URL` | str | "postgresql+psycopg2://postgres:postgres@localhost:5432" | PostgreSQL database connection string | +| `DRAGONFLY_DB_CONNECTION_POOL_MAX_SIZE` | int | 15 | The max number of concurrent database connections | +| `DRAGONFLY_DB_CONNECTION_POOL_PERSISTENT_SIZE` | int | 5 | The number of concurrent database connections to maintain in the connection pool | | | | | | | `SENTRY_DSN` | str | "" | Sentry Data Source Name (DSN) | | `SENTRY_ENVIRONMENT` | str | "" | Sentry environment | diff --git a/alembic/env.py b/alembic/env.py index e24fc6d0..3c47912f 100644 --- a/alembic/env.py +++ b/alembic/env.py @@ -14,7 +14,9 @@ load_dotenv() # modify the config here because ConfigParser can't handle default values -config.set_main_option("sqlalchemy.url", os.getenv("DB_URL", "postgresql+psycopg2://postgres:postgres@localhost:5432")) +config.set_main_option( + "sqlalchemy.url", os.getenv("DRAGONFLY_DB_URL", "postgresql+psycopg2://postgres:postgres@localhost:5432") +) # Interpret the config file for Python logging. # This line sets up loggers basically. diff --git a/compose.yaml b/compose.yaml index e5b6e59e..4152a165 100644 --- a/compose.yaml +++ b/compose.yaml @@ -9,7 +9,7 @@ services: tty: true restart: always environment: - DB_URL: "postgresql+psycopg2://postgres:postgres@db:5432/dragonfly" + DRAGONFLY_DB_URL: "postgresql+psycopg2://postgres:postgres@db:5432/dragonfly" MICROSOFT_TENANT_ID: tenant_id MICROSOFT_CLIENT_ID: client_id MICROSOFT_CLIENT_SECRET: client_secret diff --git a/src/mainframe/constants.py b/src/mainframe/constants.py index 178a774f..8be27fd2 100644 --- a/src/mainframe/constants.py +++ b/src/mainframe/constants.py @@ -17,7 +17,7 @@ class EnvConfig(BaseSettings): ) -class Mainframe(EnvConfig): +class Mainframe(EnvConfig, env_prefix="dragonfly_"): client_origin_url: str = "" auth0_domain: str = "" auth0_audience: str = "" @@ -30,7 +30,7 @@ class Mainframe(EnvConfig): db_connection_pool_persistent_size: int = 5 """The number of concurrent connections to maintain in the connection pool""" - dragonfly_github_token: str + github_token: str job_timeout: int = 60 * 2 diff --git a/src/mainframe/rules.py b/src/mainframe/rules.py index 4ada22f1..d0e35fd6 100644 --- a/src/mainframe/rules.py +++ b/src/mainframe/rules.py @@ -60,7 +60,7 @@ def fetch_zipfile(http_client: httpx.Client, *, repository: str, access_token: s def fetch_rules(http_client: httpx.Client) -> Rules: """Return the commit hash and all the rules""" - access_token = mainframe_settings.dragonfly_github_token + access_token = mainframe_settings.github_token commit_hash = fetch_commit_hash(http_client, repository=REPOSITORY, access_token=access_token) diff --git a/tests/test_rules.py b/tests/test_rules.py index fb109c98..45baed4e 100644 --- a/tests/test_rules.py +++ b/tests/test_rules.py @@ -84,7 +84,7 @@ def test_fetch_rules(monkeypatch: MonkeyPatch): for filename, contents in files.items(): zip.writestr(filename + ".yara", contents) - monkeypatch.setattr("mainframe.constants.mainframe_settings.dragonfly_github_token", "token") + monkeypatch.setattr("mainframe.constants.mainframe_settings.github_token", "token") monkeypatch.setattr("mainframe.rules.fetch_commit_hash", Mock(return_value="test commit hash")) monkeypatch.setattr("mainframe.rules.fetch_zipfile", Mock(return_value=zip))