-
Notifications
You must be signed in to change notification settings - Fork 14
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge branch 'main' of https://github.com/HHS/simpler-grants-gov into…
… 2673/users-token-sub-endpoint
- Loading branch information
Showing
19 changed files
with
349 additions
and
40 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,32 @@ | ||
name: Deploy Metabase | ||
run-name: Deploy ${{ github.ref_name }} to Metabase ${{ inputs.environment || (github.event_name == 'release' && 'prod') || 'nonprod'}} | ||
|
||
on: | ||
workflow_dispatch: | ||
inputs: | ||
environment: | ||
description: "target environment" | ||
required: true | ||
default: "dev" | ||
type: choice | ||
options: | ||
- dev | ||
- staging | ||
- prod | ||
image-tag: | ||
description: "Metabase enterprise image tag to deploy" | ||
required: true | ||
type: string | ||
|
||
jobs: | ||
deploy: | ||
name: Deploy | ||
uses: ./.github/workflows/deploy-metabase.yml | ||
strategy: | ||
max-parallel: 1 | ||
fail-fast: false | ||
matrix: | ||
envs: ${{ github.event_name == 'release' && fromJSON('["prod"]') || github.ref_name == 'main' && fromJSON('["dev", "staging"]') || fromJSON('["dev"]') }} | ||
with: | ||
version: ${{ inputs.image-tag }} | ||
environment: ${{ matrix.envs }} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,39 @@ | ||
name: Chained Deploy | ||
run-name: Chained Deploy Layer for ${{ github.ref_name }} to Metabase ${{ inputs.environment || (github.event_name == 'release' && 'prod') || 'nonprod' }} | ||
|
||
on: | ||
workflow_call: | ||
inputs: | ||
environment: | ||
description: "the name of the application environment (e.g. dev, staging, prod)" | ||
required: true | ||
type: string | ||
version: | ||
description: "git reference to deploy (e.g., a branch, tag, or commit SHA)" | ||
required: true | ||
type: string | ||
|
||
concurrency: cd-${{ inputs.environment }} | ||
|
||
jobs: | ||
deploy: | ||
name: Deploy | ||
runs-on: ubuntu-latest | ||
permissions: | ||
contents: read | ||
id-token: write | ||
steps: | ||
- uses: actions/checkout@v4 | ||
- uses: hashicorp/setup-terraform@v3 | ||
with: | ||
terraform_version: 1.9.7 | ||
terraform_wrapper: false | ||
|
||
- name: Configure AWS credentials | ||
uses: ./.github/actions/configure-aws-credentials | ||
with: | ||
app_name: analytics | ||
environment: ${{ inputs.environment }} | ||
|
||
- name: Deploy metabase | ||
run: make metabase-deploy APP_NAME=metabase ENVIRONMENT=${{ inputs.environment }} IMAGE_TAG=${{ inputs.version }} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,56 @@ | ||
from src.api.schemas.extension import Schema, fields | ||
from src.api.schemas.response_schema import AbstractResponseSchema, FileResponseSchema | ||
from src.constants.lookup_constants import ExtractType | ||
from src.pagination.pagination_schema import generate_pagination_schema | ||
|
||
|
||
class ExtractMetadataFilterV1Schema(Schema): | ||
extract_type = fields.Enum( | ||
ExtractType, | ||
allow_none=True, | ||
metadata={ | ||
"description": "The type of extract to filter by", | ||
"example": "opportunities_csv", | ||
}, | ||
) | ||
start_date = fields.Date( | ||
allow_none=True, | ||
metadata={ | ||
"description": "The start date for filtering extracts", | ||
"example": "2023-10-01", | ||
}, | ||
) | ||
end_date = fields.Date( | ||
allow_none=True, | ||
metadata={ | ||
"description": "The end date for filtering extracts", | ||
"example": "2023-10-07", | ||
}, | ||
) | ||
|
||
|
||
class ExtractMetadataRequestSchema(AbstractResponseSchema): | ||
filters = fields.Nested(ExtractMetadataFilterV1Schema()) | ||
pagination = fields.Nested( | ||
generate_pagination_schema( | ||
"ExtractMetadataPaginationV1Schema", | ||
["created_at"], | ||
), | ||
required=True, | ||
) | ||
|
||
|
||
class ExtractMetadataResponseSchema(FileResponseSchema): | ||
extract_metadata_id = fields.Integer( | ||
metadata={"description": "The ID of the extract metadata", "example": 1} | ||
) | ||
extract_type = fields.String( | ||
metadata={"description": "The type of extract", "example": "opportunity_data_extract"} | ||
) | ||
|
||
|
||
class ExtractMetadataListResponseSchema(AbstractResponseSchema): | ||
data = fields.List( | ||
fields.Nested(ExtractMetadataResponseSchema), | ||
metadata={"description": "A list of extract metadata records"}, | ||
) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,110 @@ | ||
from datetime import date | ||
|
||
import pytest | ||
from marshmallow import ValidationError | ||
|
||
from src.api.extracts_v1.extract_schema import ( | ||
ExtractMetadataListResponseSchema, | ||
ExtractMetadataRequestSchema, | ||
ExtractMetadataResponseSchema, | ||
) | ||
from src.db.models.extract_models import ExtractMetadata | ||
|
||
|
||
@pytest.fixture | ||
def sample_extract_metadata(): | ||
return ExtractMetadata( | ||
extract_metadata_id=1, | ||
extract_type="opportunities_csv", | ||
file_name="test_extract.csv", | ||
file_path="/test/path/test_extract.csv", | ||
file_size_bytes=2048, | ||
) | ||
|
||
|
||
def test_request_schema_validation(): | ||
schema = ExtractMetadataRequestSchema() | ||
|
||
# Test valid data | ||
valid_data = { | ||
"filters": { | ||
"extract_type": "opportunities_csv", | ||
"start_date": "2023-10-01", | ||
"end_date": "2023-10-07", | ||
}, | ||
"pagination": { | ||
"order_by": "created_at", | ||
"page_offset": 1, | ||
"page_size": 25, | ||
"sort_direction": "ascending", | ||
}, | ||
} | ||
result = schema.load(valid_data) | ||
assert result["filters"]["extract_type"] == "opportunities_csv" | ||
assert result["filters"]["start_date"] == date(2023, 10, 1) | ||
assert result["filters"]["end_date"] == date(2023, 10, 7) | ||
|
||
# Test invalid extract_type | ||
invalid_data = {"extract_type": "invalid_type", "start_date": "2023-10-01"} | ||
with pytest.raises(ValidationError): | ||
schema.load(invalid_data) | ||
|
||
|
||
def test_response_schema_single(sample_extract_metadata): | ||
schema = ExtractMetadataResponseSchema() | ||
|
||
sample_extract_metadata.download_path = "http://www.example.com" | ||
extract_metadata = schema.dump(sample_extract_metadata) | ||
|
||
assert extract_metadata["download_path"] == "http://www.example.com" | ||
|
||
assert extract_metadata["extract_metadata_id"] == 1 | ||
assert extract_metadata["extract_type"] == "opportunities_csv" | ||
assert extract_metadata["download_path"] == "http://www.example.com" | ||
assert extract_metadata["file_size_bytes"] == 2048 | ||
|
||
|
||
def test_response_schema_list(sample_extract_metadata): | ||
schema = ExtractMetadataListResponseSchema() | ||
|
||
# Create a list of two metadata records | ||
metadata_list = { | ||
"data": [ | ||
sample_extract_metadata, | ||
ExtractMetadata( | ||
extract_metadata_id=2, | ||
extract_type="opportunities_xml", | ||
file_name="test_extract2.xml", | ||
file_path="/test/path/test_extract2.xml", | ||
file_size_bytes=1024, | ||
), | ||
] | ||
} | ||
|
||
result = schema.dump(metadata_list) | ||
|
||
assert len(result["data"]) == 2 | ||
assert result["data"][0]["extract_metadata_id"] == 1 | ||
assert result["data"][0]["extract_type"] == "opportunities_csv" | ||
assert result["data"][1]["extract_metadata_id"] == 2 | ||
assert result["data"][1]["extract_type"] == "opportunities_xml" | ||
|
||
|
||
def test_request_schema_null_values(): | ||
schema = ExtractMetadataRequestSchema() | ||
|
||
# Test with some null values | ||
data = { | ||
"filters": {"extract_type": None, "start_date": "2023-10-01", "end_date": None}, | ||
"pagination": { | ||
"order_by": "created_at", | ||
"page_offset": 1, | ||
"page_size": 25, | ||
"sort_direction": "ascending", | ||
}, | ||
} | ||
|
||
result = schema.load(data) | ||
assert result["filters"]["extract_type"] is None | ||
assert result["filters"]["start_date"] == date(2023, 10, 1) | ||
assert result["filters"]["end_date"] is None |
Oops, something went wrong.