diff --git a/.github/workflows/integration_tests.yml b/.github/workflows/integration_tests.yml index 1a414d35..6dc4fe26 100644 --- a/.github/workflows/integration_tests.yml +++ b/.github/workflows/integration_tests.yml @@ -10,9 +10,9 @@ on: MCP_VENUE_DEV_AIRFLOW_ENDPOINT: description: "Base URL for the Airflow endpoint in MCP Venue Dev (i.e. http://abc.def.ghi:port-number)" type: string - # MCP_VENUE_TEST_AIRFLOW_ENDPOINT: - # description: "Base URL for the Airflow endpoint in MCP Venue Test (i.e. http://abc.def.ghi:port-number)" - # type: string + MCP_VENUE_TEST_AIRFLOW_ENDPOINT: + description: "Base URL for the Airflow endpoint in MCP Venue Test (i.e. http://abc.def.ghi:port-number)" + type: string jobs: integration-tests: runs-on: ubuntu-latest @@ -29,6 +29,7 @@ jobs: python -m pip install --upgrade pip pip install -e ".[test]" + - name: MCP Venue Dev - Integration tests id: mcp_venue_dev_integration_tests continue-on-error: true @@ -37,23 +38,27 @@ jobs: run: | pytest -vv --gherkin-terminal-reporter \ unity-test/system/integration \ + --venue="dev" \ --airflow-endpoint=${{ github.event.inputs.MCP_VENUE_DEV_AIRFLOW_ENDPOINT || vars.MCP_VENUE_DEV_AIRFLOW_ENDPOINT }} - # - name: MCP Venue Test - Integration tests - # id: mcp_venue_test_integration_tests - # continue-on-error: true - # run: | - # pytest -vv --gherkin-terminal-reporter \ - # unity-test/system/integration \ - # --airflow-endpoint=${{ github.event.inputs.MCP_VENUE_TEST_AIRFLOW_ENDPOINT || vars.MCP_VENUE_TEST_AIRFLOW_ENDPOINT }} + - name: MCP Venue Test - Integration tests + id: mcp_venue_test_integration_tests + continue-on-error: true + env: + AIRFLOW_WEBSERVER_PASSWORD: ${{ secrets.MCP_VENUE_TEST_AIRFLOW_WEBSERVER_PASSWORD }} + run: | + pytest -vv --gherkin-terminal-reporter \ + unity-test/system/integration \ + --venue="test" \ + --airflow-endpoint=${{ github.event.inputs.MCP_VENUE_TEST_AIRFLOW_ENDPOINT || vars.MCP_VENUE_TEST_AIRFLOW_ENDPOINT }} - name: Check Integration Tests Results if: always() run: | dev_status=${{ steps.mcp_venue_dev_integration_tests.outcome }} - # test_status=${{ steps.mcp_venue_test_integration_tests.outcome }} - echo "Dev Integration Tests: $dev_status" - # echo "Test Integration Tests: $test_status" + test_status=${{ steps.mcp_venue_test_integration_tests.outcome }} + echo "Dev Venue Integration Tests status: $dev_status" + echo "Test Venue Integration Tests status: $test_status" if [ "$dev_status" != "success" ]; then echo "MCP Venue Dev Integration Tests failed." @@ -61,7 +66,7 @@ jobs: fi # Uncomment this block when MCP Venue Test Integration tests are re-enabled - # if [ "$test_status" != "success" ]; then - # echo "MCP Venue Test Integration Tests failed." - # exit 1 - # fi + if [ "$test_status" != "success" ]; then + echo "MCP Venue Test Integration Tests failed." + exit 1 + fi diff --git a/.github/workflows/smoke_tests.yml b/.github/workflows/smoke_tests.yml index d7326d93..a9888e37 100644 --- a/.github/workflows/smoke_tests.yml +++ b/.github/workflows/smoke_tests.yml @@ -57,17 +57,16 @@ jobs: --airflow-endpoint=${{ github.event.inputs.MCP_VENUE_DEV_AIRFLOW_ENDPOINT || vars.MCP_VENUE_DEV_AIRFLOW_ENDPOINT }} \ --ogc-processes-endpoint=${{ github.event.inputs.MCP_VENUE_DEV_OGC_PROCESSES_ENDPOINT || vars.MCP_VENUE_DEV_OGC_PROCESSES_ENDPOINT }} -# Temporary: comment out checks on MCP venue test until the SPS is redeployed -# - name: MCP Venue Test - Smoke tests -# id: mcp_venue_test_smoke_tests -# env: -# AIRFLOW_WEBSERVER_PASSWORD: ${{ secrets.MCP_VENUE_TEST_AIRFLOW_WEBSERVER_PASSWORD }} -# continue-on-error: true -# run: | -# pytest -vv --gherkin-terminal-reporter \ -# unity-test/system/smoke \ -# --airflow-endpoint=${{ github.event.inputs.MCP_VENUE_TEST_AIRFLOW_ENDPOINT || vars.MCP_VENUE_TEST_AIRFLOW_ENDPOINT }} \ -# --ogc-processes-endpoint=${{ github.event.inputs.MCP_VENUE_TEST_OGC_PROCESSES_ENDPOINT || vars.MCP_VENUE_TEST_OGC_PROCESSES_ENDPOINT }} + - name: MCP Venue Test - Smoke tests + id: mcp_venue_test_smoke_tests + env: + AIRFLOW_WEBSERVER_PASSWORD: ${{ secrets.MCP_VENUE_TEST_AIRFLOW_WEBSERVER_PASSWORD }} + continue-on-error: true + run: | + pytest -vv --gherkin-terminal-reporter \ + unity-test/system/smoke \ + --airflow-endpoint=${{ github.event.inputs.MCP_VENUE_TEST_AIRFLOW_ENDPOINT || vars.MCP_VENUE_TEST_AIRFLOW_ENDPOINT }} \ + --ogc-processes-endpoint=${{ github.event.inputs.MCP_VENUE_TEST_OGC_PROCESSES_ENDPOINT || vars.MCP_VENUE_TEST_OGC_PROCESSES_ENDPOINT }} - name: MCP Venue Ops - Smoke tests id: mcp_venue_ops_smoke_tests @@ -76,7 +75,7 @@ jobs: continue-on-error: true run: | pytest -vv --gherkin-terminal-reporter \ - unity-test/system/smoke/step_defs/test_airflow_api_health.py \ + unity-test/system/smoke/ \ --airflow-endpoint=${{ github.event.inputs.MCP_VENUE_OPS_AIRFLOW_ENDPOINT || vars.MCP_VENUE_OPS_AIRFLOW_ENDPOINT }} \ --ogc-processes-endpoint=${{ github.event.inputs.MCP_VENUE_OPS_OGC_PROCESSES_ENDPOINT || vars.MCP_VENUE_OPS_OGC_PROCESSES_ENDPOINT }} @@ -96,24 +95,30 @@ jobs: if: always() run: | dev_status=${{ steps.mcp_venue_dev_smoke_tests.outcome }} - ops_status=${{ steps.mcp_venue_ops_smoke_tests.outcome }} + test_status=${{ steps.mcp_venue_test_smoke_tests.outcome }} sbg_dev_status=${{ steps.mcp_sbg_dev_smoke_tests.outcome }} + ops_status=${{ steps.mcp_venue_ops_smoke_tests.outcome }} echo "Dev Smoke Tests: $dev_status" - echo "Ops Smoke Tests: $ops_status" + echo "Test Smoke Tests: $test_status" echo "SBG Dev Smoke Tests: $sbg_dev_status" + echo "Ops Smoke Tests: $ops_status" - # FIXME: must re-enable [ "$test_status" != "success" ] - if [ "$dev_status" != "success" ] || [ "$ops_status" != "success" ] || [ "$sbg_dev_status" != "success" ]; then + if [ "$dev_status" != "success" ] || [ "$test_status" != "success" ] \ + || [ "$sbg_dev_status" != "success" ] \ + || [ "$ops_status" != "success" ]; then echo "One or more smoke tests failed." if [ "$dev_status" != "success" ]; then echo "MCP Venue Dev Smoke Tests failed." fi - if [ "$ops_status" != "success" ]; then - echo "MCP Venue Ops Smoke Tests failed." + if [ "test_status" != "success" ]; then + echo "MCP Venue Test Smoke Tests failed." fi if [ "$sbg_dev_status" != "success" ]; then echo "MCP Venue SBG Dev Smoke Tests failed." fi + if [ "$ops_status" != "success" ]; then + echo "MCP Venue Ops Smoke Tests failed." + fi exit 1 else echo "All smoke tests passed." diff --git a/.github/workflows/static_analysis.yml b/.github/workflows/static_analysis.yml index db9d1d57..aa809510 100644 --- a/.github/workflows/static_analysis.yml +++ b/.github/workflows/static_analysis.yml @@ -1,6 +1,8 @@ name: Static Analysis -on: [pull_request] +on: + pull_request: + workflow_dispatch: jobs: pre-commit: diff --git a/.gitignore b/.gitignore index 670de40d..2d405b66 100644 --- a/.gitignore +++ b/.gitignore @@ -212,9 +212,10 @@ $RECYCLE.BIN/ # Local .terraform directories **/.terraform/* -# .tfstate files +# Terraform files *.tfstate *.tfstate.* +**.hcl # Crash log files crash.log @@ -247,3 +248,5 @@ terraform.rc /lambda/deployment_packages/* !/lambda/deployment_packages/.gitkeep + +**/*.cfg diff --git a/.pre-commit-config-ci.yaml b/.pre-commit-config-ci.yaml index 5c57ede5..e8918c28 100644 --- a/.pre-commit-config-ci.yaml +++ b/.pre-commit-config-ci.yaml @@ -8,8 +8,7 @@ repos: - id: check-yaml - id: check-xml - id: check-added-large-files - args: - - --maxkb=50000 + args: [--maxkb=50000] - id: check-json # Checks json files for parsable syntax. - id: pretty-format-json # Sets a standard for formatting json files. args: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7404fca6..43127352 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -8,8 +8,7 @@ repos: - id: check-yaml - id: check-xml - id: check-added-large-files - args: - - --maxkb=50000 + args: ["--maxkb=50000"] - id: check-json # Checks json files for parsable syntax. - id: pretty-format-json # Sets a standard for formatting json files. args: diff --git a/CHANGELOG.md b/CHANGELOG.md index bf6a60fc..b16ac6af 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,59 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +# [Unity Release 24.3] - 2024-09-22 + +## Tags + +- SPS Version 2.2.0 +- OGC API Version 2.0.0 +- OGC Python Client Version 2.0.0 + +## Repositories + +- unity-sps: +- unity-sps-ogc-processes-api: +- unity-sps-ogc-processes-api-client-python: + +## Epics + +- EPIC: `Security` + - [[Bug]: Upgrade EKS 1.27 AMIs](https://github.com/unity-sds/unity-sps/issues/159) +- EPIC: `Scaling` + - [[New Feature]: Increase ephemeral disk space for Airflow workers](https://github.com/unity-sds/unity-sps/issues/152) + - [[New Feature]: Enable users to select the EC2 type to execute a workload](https://github.com/unity-sds/unity-sps/issues/153) + - [[New Feature]: Set the DAG run status to "failed" if the main worker task failed](https://github.com/unity-sds/unity-sps/issues/189) + - [[New Feature]: Demonstrate use of ECR within an Airflow DAG (https://github.com/unity-sds/unity-sps/issues/186) +- EPIC: `Airflow/WPS-T Integration` + - [[New Feature]: Create test to deploy, execute and undeploy the CWL DAG](https://github.com/unity-sds/unity-sps/issues/131) + - [[New Feature]: Enable execution of OGC data processing requests with arbitrary parameter values](https://github.com/unity-sds/unity-sps/issues/129) +- EPIC: `Production Venue Deployments` + - [[New Feature]: Airflow HTTPD Proxy development and configuration](https://github.com/unity-sds/unity-sps/issues/125) + - [[New Feature]: Expose SPS health check endpoints](https://github.com/unity-sds/unity-sps/issues/127) +- EPIC: `SPS Infrastructure` + - [[New Feature]: Update documentation for SPS deployment](https://github.com/unity-sds/unity-sps/issues/116) + - [[New Feature]: Review the SPS GitBook documentation](https://github.com/unity-sds/unity-sps/issues/118) + - [[New Feature]: Store SPS Terraform state on S3](https://github.com/unity-sds/unity-sps/issues/132) + - [[New Feature]: Parametrize the SPS Integration Tests](https://github.com/unity-sds/unity-sps/issues/155) + - [[New Feature] Upgrade SPS to latest version of Airflow 2.10.0](https://github.com/unity-sds/unity-sps/issues/195) + +## Docker Containers + +- ghcr.io/unity-sds/unity-sps/sps-airflow:2.2.0 +- ghcr.io/unity-sds/unity-sps/sps-docker-cwl:2.2.0 +- ghcr.io/unity-sds/unity-sps-ogc-processes-api/unity-sps-ogc-processes-api:2.0.0 + +## Documentation + +- For Administrators: + - [SPS Deployment with Terraform](https://app.gitbook.com/o/xZRqGQeQXJ0RP4VMj7Lq/s/UMIRhLdbRQTvMWop8Il9/developer-docs/science-processing/docs/admin-guide/sps-deployment-with-terraform) + - [Interacting with an Existing SPS Deployment](https://app.gitbook.com/o/xZRqGQeQXJ0RP4VMj7Lq/s/UMIRhLdbRQTvMWop8Il9/developer-docs/science-processing/docs/admin-guide/interacting-with-an-existing-sps-deployment) + - [SPS Airflow Custom Docker Image Build Instructions](https://app.gitbook.com/o/xZRqGQeQXJ0RP4VMj7Lq/s/UMIRhLdbRQTvMWop8Il9/developer-docs/science-processing/docs/admin-guide/sps-airflow-custom-docker-image-build-instructions) + - [SPS Post Deployment Operations](https://app.gitbook.com/o/xZRqGQeQXJ0RP4VMj7Lq/s/UMIRhLdbRQTvMWop8Il9/developer-docs/science-processing/docs/admin-guide/sps-post-deployment-operations) +- For Deverlopers: + - [Tutorial: Deploy, Execute, and Undeploy a Process using the OGC API - Processes](https://app.gitbook.com/o/xZRqGQeQXJ0RP4VMj7Lq/s/UMIRhLdbRQTvMWop8Il9/developer-docs/science-processing/docs/developers-guide/tutorial-deploy-execute-and-undeploy-a-process-using-the-ogc-api-processes) +- For Users: + - [Tutorial: Register and Execute a CWL Workflow](https://app.gitbook.com/o/xZRqGQeQXJ0RP4VMj7Lq/s/UMIRhLdbRQTvMWop8Il9/developer-docs/science-processing/docs/users-guide/tutorial-register-and-execute-a-cwl-workflow) # [Unity Release 24.2] - 2024-07-01 @@ -38,12 +91,15 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - [[Task] Add TESTING.md file to SPS repo](https://github.com/unity-sds/unity-sps/issues/99) - EPIC: `SPS Infrastructure` - [[New Feature] Store SPS Terraform state on S3](https://github.com/unity-sds/unity-sps/issues/132) +- EPIC: `SPS Security` + - [[Bug]: Upgrade EKS 1.27 AMIs](https://github.com/unity-sds/unity-sps/issues/159) + - [[Bug]: Upgrade to EKS 1.29 AMIs](https://github.com/unity-sds/unity-sps/issues/206) ## Docker Containers - ghcr.io/unity-sds/unity-sps/sps-airflow:2.1.0 - ghcr.io/unity-sds/unity-sps/sps-docker-cwl:2.1.0 -- ghcr.io/unity-sds/unity-sps-ogc-processes-api/unity-sps-ogc-processes-api:2.1.0 +- ghcr.io/unity-sds/unity-sps-ogc-processes-api/unity-sps-ogc-processes-api:1.0.0 ## Documentation diff --git a/lambda/deployment_packages/.gitkeep b/CONVENTIONS.md similarity index 100% rename from lambda/deployment_packages/.gitkeep rename to CONVENTIONS.md diff --git a/unity-test/TESTING.md b/TESTING.md similarity index 99% rename from unity-test/TESTING.md rename to TESTING.md index 76034a8e..c155a553 100644 --- a/unity-test/TESTING.md +++ b/TESTING.md @@ -181,7 +181,7 @@ The below list of test categories are included in our testing setup. Further det ### Security Tests -- Location: `/unity` +- Location: `/.github/workflows` - Purpose: Identify potential security vulnerabilities. - Running Tests: - Manually: N/A diff --git a/airflow/dags/busybox.py b/airflow/dags/busybox.py index 166f2740..4adc75be 100644 --- a/airflow/dags/busybox.py +++ b/airflow/dags/busybox.py @@ -16,7 +16,7 @@ POD_TEMPLATE_FILE = "/opt/airflow/dags/docker_cwl_pod.yaml" # The Kubernetes namespace within which the Pod is run (it must already exist) -POD_NAMESPACE = "airflow" +POD_NAMESPACE = "sps" # The path of the working directory where the CWL workflow is executed # (aka the starting directory for cwl-runner). diff --git a/airflow/dags/cwl_dag.py b/airflow/dags/cwl_dag.py index dfb8fbef..016d4c10 100644 --- a/airflow/dags/cwl_dag.py +++ b/airflow/dags/cwl_dag.py @@ -16,17 +16,20 @@ from airflow.models.baseoperator import chain from airflow.models.param import Param from airflow.operators.python import PythonOperator, get_current_context -from airflow.providers.cncf.kubernetes.operators.pod import KubernetesPodOperator from airflow.utils.trigger_rule import TriggerRule from kubernetes.client import models as k8s -from unity_sps_utils import get_affinity +from unity_sps_utils import SpsKubernetesPodOperator, get_affinity from airflow import DAG # The Kubernetes namespace within which the Pod is run (it must already exist) -POD_NAMESPACE = "airflow" +POD_NAMESPACE = "sps" POD_LABEL = "cwl_task" -SPS_DOCKER_CWL_IMAGE = "ghcr.io/unity-sds/unity-sps/sps-docker-cwl:2.1.0" +# SPS_DOCKER_CWL_IMAGE = "ghcr.io/unity-sds/unity-sps/sps-docker-cwl:2.1.0" +SPS_DOCKER_CWL_IMAGE = "ghcr.io/unity-sds/unity-sps/sps-docker-cwl:2.2.0" + +NODE_POOL_DEFAULT = "airflow-kubernetes-pod-operator" +NODE_POOL_HIGH_WORKLOAD = "airflow-kubernetes-pod-operator-high-workload" # The path of the working directory where the CWL workflow is executed # (aka the starting directory for cwl-runner). @@ -39,6 +42,7 @@ ) DEFAULT_CWL_ARGUMENTS = json.dumps({"message": "Hello Unity"}) + # Alternative arguments to execute SBG Pre-Process # DEFAULT_CWL_WORKFLOW = "https://raw.githubusercontent.com/unity-sds/sbg-workflows/main/preprocess/sbg-preprocess-workflow.cwl" # DEFAULT_CWL_ARGUMENTS = "https://raw.githubusercontent.com/unity-sds/sbg-workflows/main/preprocess/sbg-preprocess-workflow.dev.yml" @@ -53,7 +57,9 @@ requests={ # "cpu": "2660m", # 2.67 vCPUs, specified in milliCPUs # "memory": "22Gi", # Rounded to 22 GiB for easier specification - "ephemeral-storage": "10Gi" + "memory": "{{ params.request_memory }}", + "cpu": "{{ params.request_cpu }} ", + "ephemeral-storage": "{{ params.request_storage }} ", }, # limits={ # # "cpu": "2660m", # Optional: set the same as requests if you want a fixed allocation @@ -69,12 +75,10 @@ "start_date": datetime.utcfromtimestamp(0), } -# common parameters -INPUT_PROCESSING_LABELS = ["label1", "label2"] - dag = DAG( dag_id="cwl_dag", description="CWL DAG", + dag_display_name="CWL DAG", tags=["CWL"], is_paused_upon_creation=False, catchup=False, @@ -90,15 +94,35 @@ DEFAULT_CWL_ARGUMENTS, type="string", title="CWL workflow parameters", - description="The job parameters encoded as a JSON string, or the URL of a JSON or YAML file", + description=("The job parameters encoded as a JSON string," "or the URL of a JSON or YAML file"), + ), + "request_memory": Param( + "4Gi", + type="string", + enum=["4Gi", "8Gi", "16Gi", "32Gi", "64Gi", "128Gi", "256Gi"], + title="Docker container memory", + ), + "request_cpu": Param( + "4", + type="string", + enum=["2", "4", "8", "16", "32"], + title="Docker container CPU", ), + "request_storage": Param( + "10Gi", + type="string", + enum=["10Gi", "50Gi", "100Gi", "150Gi", "200Gi", "250Gi"], + title="Docker container storage", + ), + "use_ecr": Param(False, type="boolean", title="Log into AWS Elastic Container Registry (ECR)"), }, ) def setup(ti=None, **context): """ - Task that creates the working directory on the shared volume. + Task that creates the working directory on the shared volume + and parses the input parameter values. """ context = get_current_context() dag_run_id = context["dag_run"].run_id @@ -107,10 +131,31 @@ def setup(ti=None, **context): os.makedirs(local_dir, exist_ok=True) logging.info(f"Created directory: {local_dir}") + # select the node pool based on what resources were requested + node_pool = NODE_POOL_DEFAULT + storage = context["params"]["request_storage"] # 100Gi + storage = int(storage[0:-2]) # 100 + memory = context["params"]["request_memory"] # 32Gi + memory = int(memory[0:-2]) # 32 + cpu = int(context["params"]["request_cpu"]) # 8 + + logging.info(f"Requesting storage={storage}Gi memory={memory}Gi CPU={cpu}") + if (storage > 30) or (memory > 32) or (cpu > 8): + node_pool = NODE_POOL_HIGH_WORKLOAD + logging.info(f"Selecting node pool={node_pool}") + ti.xcom_push(key="node_pool", value=node_pool) + + # select "use_ecr" argument and determine if ECR login is required + logging.info("Use ECR: %s", context["params"]["use_ecr"]) + if context["params"]["use_ecr"]: + ecr_login = os.environ["AIRFLOW_VAR_ECR_URI"] + ti.xcom_push(key="ecr_login", value=ecr_login) + logging.info("ECR login: %s", ecr_login) + setup_task = PythonOperator(task_id="Setup", python_callable=setup, dag=dag) -cwl_task = KubernetesPodOperator( +cwl_task = SpsKubernetesPodOperator( retries=0, task_id="cwl_task", namespace=POD_NAMESPACE, @@ -120,7 +165,14 @@ def setup(ti=None, **context): in_cluster=True, get_logs=True, startup_timeout_seconds=1800, - arguments=["{{ params.cwl_workflow }}", "{{ params.cwl_args }}"], + arguments=[ + "-w", + "{{ params.cwl_workflow }}", + "-j", + "{{ params.cwl_args }}", + "-e", + "{{ ti.xcom_pull(task_ids='Setup', key='ecr_login') }}", + ], container_security_context={"privileged": True}, container_resources=CONTAINER_RESOURCES, container_logs=True, @@ -134,12 +186,13 @@ def setup(ti=None, **context): ) ], dag=dag, - node_selector={"karpenter.sh/nodepool": "airflow-kubernetes-pod-operator"}, + node_selector={"karpenter.sh/nodepool": "{{ti.xcom_pull(task_ids='Setup', key='node_pool')}}"}, labels={"app": POD_LABEL}, annotations={"karpenter.sh/do-not-disrupt": "true"}, + # note: 'affinity' cannot yet be templated affinity=get_affinity( capacity_type=["spot"], - instance_type=["r7i.xlarge"], + # instance_type=["t3.2xlarge"], anti_affinity_label=POD_LABEL, ), on_finish_action="keep_pod", @@ -165,4 +218,4 @@ def cleanup(**context): task_id="Cleanup", python_callable=cleanup, dag=dag, trigger_rule=TriggerRule.ALL_DONE ) -chain(setup_task, cwl_task, cleanup_task) +chain(setup_task.as_setup(), cwl_task, cleanup_task.as_teardown(setups=setup_task)) diff --git a/airflow/dags/docker_cwl_pod.yaml b/airflow/dags/docker_cwl_pod.yaml index ead0a727..71458072 100644 --- a/airflow/dags/docker_cwl_pod.yaml +++ b/airflow/dags/docker_cwl_pod.yaml @@ -21,7 +21,7 @@ spec: containers: - name: cwl-docker - image: ghcr.io/unity-sds/unity-sps/sps-docker-cwl:2.1.0 + image: ghcr.io/unity-sds/unity-sps/sps-docker-cwl:2.2.0 imagePullPolicy: Always command: ["/usr/share/cwl/docker_cwl_entrypoint.sh"] securityContext: diff --git a/airflow/dags/env_test.py b/airflow/dags/env_test.py index 44409675..78ae8bbb 100644 --- a/airflow/dags/env_test.py +++ b/airflow/dags/env_test.py @@ -32,11 +32,9 @@ check_vars = BashOperator( task_id="check", - bash_command="echo {} {} {} {} {}".format( + bash_command="echo {} {} {}".format( Variable.get("unity_project"), Variable.get("unity_venue"), - Variable.get("unity_deployment_name"), - Variable.get("unity_counter"), Variable.get("unity_cluster_name"), ), ) diff --git a/airflow/dags/karpenter_test.py b/airflow/dags/karpenter_test.py index 82d88a1e..3553bc08 100644 --- a/airflow/dags/karpenter_test.py +++ b/airflow/dags/karpenter_test.py @@ -5,7 +5,7 @@ from airflow import DAG -POD_NAMESPACE = "airflow" +POD_NAMESPACE = "sps" POD_LABEL = "karpenter_test_task" default_args = { diff --git a/airflow/dags/sbg_L1_to_L2_e2e_cwl_dag.py b/airflow/dags/sbg_L1_to_L2_e2e_cwl_dag.py index d0f1901e..789a9722 100644 --- a/airflow/dags/sbg_L1_to_L2_e2e_cwl_dag.py +++ b/airflow/dags/sbg_L1_to_L2_e2e_cwl_dag.py @@ -16,7 +16,7 @@ POD_TEMPLATE_FILE = "/opt/airflow/dags/docker_cwl_pod.yaml" # The Kubernetes namespace within which the Pod is run (it must already exist) -POD_NAMESPACE = "airflow" +POD_NAMESPACE = "sps" # The path of the working directory where the CWL workflow is executed # (aka the starting directory for cwl-runner). diff --git a/airflow/dags/sbg_L1_to_L2_e2e_cwl_step_by_step_dag.py b/airflow/dags/sbg_L1_to_L2_e2e_cwl_step_by_step_dag.py index 4949917a..962953aa 100644 --- a/airflow/dags/sbg_L1_to_L2_e2e_cwl_step_by_step_dag.py +++ b/airflow/dags/sbg_L1_to_L2_e2e_cwl_step_by_step_dag.py @@ -21,9 +21,9 @@ from airflow import DAG # The Kubernetes namespace within which the Pod is run (it must already exist) -POD_NAMESPACE = "airflow" +POD_NAMESPACE = "sps" POD_LABEL = "sbg_task" -SPS_DOCKER_CWL_IMAGE = "ghcr.io/unity-sds/unity-sps/sps-docker-cwl:2.1.0" +SPS_DOCKER_CWL_IMAGE = "ghcr.io/unity-sds/unity-sps/sps-docker-cwl:2.2.0" # The path of the working directory where the CWL workflow is executed # (aka the starting directory for cwl-runner). @@ -573,9 +573,9 @@ def cleanup(**context): ) chain( - setup_task, + setup_task.as_setup(), preprocess_task, [isofit_task, reflect_correct_task], [resample_task, frcover_task], - cleanup_task, + cleanup_task.as_teardown(setups=setup_task), ) diff --git a/airflow/dags/sbg_preprocess_cwl_dag.py b/airflow/dags/sbg_preprocess_cwl_dag.py index 7002af78..d84b4fd8 100644 --- a/airflow/dags/sbg_preprocess_cwl_dag.py +++ b/airflow/dags/sbg_preprocess_cwl_dag.py @@ -15,9 +15,9 @@ from airflow import DAG # The Kubernetes namespace within which the Pod is run (it must already exist) -POD_NAMESPACE = "airflow" +POD_NAMESPACE = "sps" POD_LABEL = "sbg_preprocess_task" -SPS_DOCKER_CWL_IMAGE = "ghcr.io/unity-sds/unity-sps/sps-docker-cwl:2.1.0" +SPS_DOCKER_CWL_IMAGE = "ghcr.io/unity-sds/unity-sps/sps-docker-cwl:2.2.0" # The path of the working directory where the CWL workflow is executed # (aka the starting directory for cwl-runner). @@ -140,5 +140,4 @@ def cleanup(**context): task_id="Cleanup", python_callable=cleanup, dag=dag, trigger_rule=TriggerRule.ALL_DONE ) - -setup_task >> cwl_task >> cleanup_task +setup_task.as_setup() >> cwl_task >> cleanup_task.as_teardown(setups=setup_task) diff --git a/airflow/docker/custom_airflow/Dockerfile b/airflow/docker/custom_airflow/Dockerfile index e11dc637..ceb0199d 100644 --- a/airflow/docker/custom_airflow/Dockerfile +++ b/airflow/docker/custom_airflow/Dockerfile @@ -1,4 +1,4 @@ -FROM apache/airflow:2.9.1-python3.11 +FROM apache/airflow:2.10.0-python3.11 RUN pip install cwltool==3.1.20240112164112 RUN pip install boto3==1.34.89 diff --git a/airflow/docker/cwl/docker_cwl_entrypoint.sh b/airflow/docker/cwl/docker_cwl_entrypoint.sh index b2a4e59a..dbaabaf3 100755 --- a/airflow/docker/cwl/docker_cwl_entrypoint.sh +++ b/airflow/docker/cwl/docker_cwl_entrypoint.sh @@ -1,22 +1,30 @@ #!/bin/sh # Script to execute a CWL workflow that includes Docker containers # The Docker engine is started before the CWL execution, and stopped afterwards. -# $1: the CWL workflow URL +# -w: the CWL workflow URL # (example: https://github.com/unity-sds/sbg-workflows/blob/main/L1-to-L2-e2e.cwl) -# $2: a) the CWL job parameters as a JSON formatted string +# -j: a) the CWL job parameters as a JSON formatted string # (example: { "name": "John Doe" }) # OR b) The URL of a YAML or JSON file containing the job parameters # (example: https://github.com/unity-sds/sbg-workflows/blob/main/L1-to-L2-e2e.dev.yml) -# $3: optional path to an output JSON file that needs to be shared as Airflow "xcom" data +# -e: the ECR login URL where the AWS account ID and region are specific to the Airflow installation +# (example: .dkr.ecr..amazonaws.com) [optional] +# -o: path to an output JSON file that needs to be shared as Airflow "xcom" data [optional] # Must be the same as the path of the Persistent Volume mounted by the Airflow KubernetesPodOperator # that executes this script WORKING_DIR="/scratch" set -ex -cwl_workflow=$1 -job_args=$2 -json_output=$3 +while getopts w:j:e:o: flag +do + case "${flag}" in + w) cwl_workflow=${OPTARG};; + j) job_args=${OPTARG};; + e) ecr_login=${OPTARG};; + o) json_output=${OPTARG};; + esac +done # create working directory if it doesn't exist mkdir -p "$WORKING_DIR" @@ -48,12 +56,23 @@ do sleep 1 done +# Activate Python virtual environments for executables +. /usr/share/cwl/venv/bin/activate + +# Log into AWS ECR repository +if [ "$ecr_login" != "None" ]; then +IFS=. read account_id dkr ecr aws_region amazonaws com < k8s.V1Affinity: + """ + Function that builds a Kubernetes Pod affinity constraint for allocating + Pods onto Kubernetes Nodes (eiter already available or to be provisioned). + + Parameters + ---------- + capacity_type: list of "spot" and "on-demand" elements. Defaults to "spot" + instance_type: optional list of specific EC2 types + anti_affinity_label: optional label to guarantee that each Pod will be allocated to a separate Node + + Returns + ------- + k8s.V1Affinity: object containing the Pod placement constraints + + """ + + if capacity_type is None: + capacity_type = ["spot"] + node_constraints = [{"key": "karpenter.sh/capacity-type", "operator": "In", "values": capacity_type}] + if instance_type is not None: + node_constraints.append( + { + "key": "node.kubernetes.io/instance-type", + "operator": "In", + "values": instance_type, + } + ) + + pod_anti_affinity = None + if anti_affinity_label: + pod_anti_affinity = k8s.V1PodAntiAffinity( + required_during_scheduling_ignored_during_execution=[ + k8s.V1PodAffinityTerm( + k8s.V1LabelSelector( + match_expressions=[{"key": "pod", "operator": "In", "values": [anti_affinity_label]}] + ), + topology_key="kubernetes.io/hostname", + ), ] - }, - } + ) + + affinity = k8s.V1Affinity( + node_affinity=k8s.V1NodeAffinity( + required_during_scheduling_ignored_during_execution=k8s.V1NodeSelector( + node_selector_terms=[ + k8s.V1NodeSelectorTerm( + match_expressions=node_constraints, + ) + ] + ), + ), + pod_anti_affinity=pod_anti_affinity, + ) + return affinity diff --git a/lambda/src/airflow-dag-trigger/airflow_dag_trigger.py b/lambda/src/airflow-dag-trigger/airflow_dag_trigger.py deleted file mode 100644 index 53e6e4e6..00000000 --- a/lambda/src/airflow-dag-trigger/airflow_dag_trigger.py +++ /dev/null @@ -1,47 +0,0 @@ -import logging -import os -import uuid -from datetime import datetime -from typing import List -from urllib.parse import unquote_plus - -import requests -from aws_lambda_powertools.utilities.parser import envelopes, event_parser -from aws_lambda_powertools.utilities.parser.models import S3Model -from aws_lambda_powertools.utilities.typing import LambdaContext - -logger = logging.getLogger() -logger.setLevel(logging.INFO) - -AIRFLOW_BASE_API_ENDPOINT = os.environ.get("AIRFLOW_BASE_API_ENDPOINT") -AIRFLOW_USERNAME = os.environ.get("AIRFLOW_USERNAME") -AIRFLOW_PASSWORD = os.environ.get("AIRFLOW_PASSWORD") - - -def trigger_airflow_dag(dag_id: str): - url = f"{AIRFLOW_BASE_API_ENDPOINT}/dags/{dag_id}/dagRuns" - dag_run_id = str(uuid.uuid4()) - logical_date = datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%fZ") - headers = {"Content-Type": "application/json", "Accept": "application/json"} - auth = (AIRFLOW_USERNAME, AIRFLOW_PASSWORD) - payload = {"dag_run_id": dag_run_id, "logical_date": logical_date, "conf": {}, "note": ""} - response = requests.post(url, auth=auth, headers=headers, json=payload) - if response.status_code == 200 or response.status_code == 201: - logger.info(f"Successfully triggered Airflow DAG {dag_id}: {response.json()}") - else: - logger.error(f"Failed to trigger Airflow DAG {dag_id}: {response.text}") - - -@event_parser(model=S3Model, envelope=envelopes.SnsSqsEnvelope) -def lambda_handler(event: List[S3Model], context: LambdaContext) -> dict: - try: - object_key = unquote_plus(event[0].Records[0].s3.object.key) - bucket_name = unquote_plus(event[0].Records[0].s3.bucket.name) - logger.info(f"Source bucket: {bucket_name}, Source key: {object_key}") - dag_id = "sbg-l1-to-l2-e2e-cwl-step-by-step-dag" - trigger_airflow_dag(dag_id) - except Exception as e: - logger.error(f"An unexpected error occurred: {e}") - return {"statusCode": 500, "body": "An unexpected error occurred: " + str(e)} - - return {"statusCode": 200, "body": "Success!"} diff --git a/ogc-application-packages/cwl_dag.json b/ogc-application-packages/cwl_dag.json new file mode 100644 index 00000000..0fbf9ebb --- /dev/null +++ b/ogc-application-packages/cwl_dag.json @@ -0,0 +1,74 @@ +{ + "executionUnit": { + "image": "ghcr.io/unity-sds/unity-sps/sps-docker-cwl:2.1.0", + "type": "docker" + }, + "processDescription": { + "description": "This process executes any CWL workflow.", + "id": "cwl_dag", + "inputs": { + "cwl_args": { + "description": "The URL of the CWL workflow's YAML parameters file", + "maxOccurs": 1, + "minOccurs": 1, + "schema": { + "format": "uri", + "type": "string" + }, + "title": "CWL Workflow Parameters URL" + }, + "cwl_workflow": { + "description": "The URL of the CWL workflow", + "maxOccurs": 1, + "minOccurs": 1, + "schema": { + "format": "uri", + "type": "string" + }, + "title": "CWL Workflow URL" + }, + "request_cpu": { + "description": "The number of CPU cores requested for the job", + "maxOccurs": 1, + "minOccurs": 1, + "schema": { + "type": "string" + }, + "title": "Requested CPU" + }, + "request_memory": { + "default": "8Gi", + "description": "The amount of memory requested for the job", + "maxOccurs": 1, + "minOccurs": 1, + "schema": { + "type": "string" + }, + "title": "Requested Memory" + }, + "request_storage": { + "description": "The amount of storage requested for the job", + "maxOccurs": 1, + "minOccurs": 1, + "schema": { + "type": "string" + }, + "title": "Requested Storage" + } + }, + "jobControlOptions": [ + "async-execute" + ], + "outputs": { + "result": { + "description": "The result of the SBG Preprocess Workflow execution", + "schema": { + "$ref": "some-ref" + }, + "title": "Process Result" + } + }, + "title": "Generic CWL Process", + "version": "1.0.0" + } +} diff --git a/ogc-application-packages/karpenter_test.json b/ogc-application-packages/karpenter_test.json new file mode 100644 index 00000000..253b7327 --- /dev/null +++ b/ogc-application-packages/karpenter_test.json @@ -0,0 +1,36 @@ +{ + "executionUnit": { + "image": "busybox", + "type": "docker" + }, + "processDescription": { + "description": "This process tests Karpenter node provisioning with different instance types.", + "id": "karpenter_test", + "inputs": { + "placeholder": { + "default": 1, + "description": "A placeholder parameter", + "maxOccurs": 1, + "minOccurs": 1, + "schema": { + "type": "integer" + }, + "title": "Placeholder" + } + }, + "jobControlOptions": [ + "async-execute" + ], + "outputs": { + "result": { + "description": "The result of the Karpenter test execution", + "schema": { + "$ref": "some-ref" + }, + "title": "Process Result" + } + }, + "title": "Karpenter Test Process", + "version": "1.0.0" + } +} diff --git a/ogc-application-packages/sbg_preprocess_cwl_dag.json b/ogc-application-packages/sbg_preprocess_cwl_dag.json new file mode 100644 index 00000000..95eeea42 --- /dev/null +++ b/ogc-application-packages/sbg_preprocess_cwl_dag.json @@ -0,0 +1,48 @@ +{ + "executionUnit": { + "image": "ghcr.io/unity-sds/unity-sps/sps-docker-cwl:2.2.0", + "type": "docker" + }, + "processDescription": { + "description": "This process executes the SBG Preprocess Workflow using CWL.", + "id": "sbg_preprocess_cwl_dag", + "inputs": { + "cwl_args": { + "default": "https://raw.githubusercontent.com/unity-sds/sbg-workflows/main/preprocess/sbg-preprocess-workflow.dev.yml", + "description": "The SBG Pre-process YAML parameters URL", + "maxOccurs": 1, + "minOccurs": 1, + "schema": { + "format": "uri", + "type": "string" + }, + "title": "CWL Workflow Parameters" + }, + "cwl_workflow": { + "default": "https://raw.githubusercontent.com/unity-sds/sbg-workflows/main/preprocess/sbg-preprocess-workflow.cwl", + "description": "The SBG Pre-process CWL workflow URL", + "maxOccurs": 1, + "minOccurs": 1, + "schema": { + "format": "uri", + "type": "string" + }, + "title": "CWL Workflow" + } + }, + "jobControlOptions": [ + "async-execute" + ], + "outputs": { + "result": { + "description": "The result of the SBG Preprocess Workflow execution", + "schema": { + "$ref": "some-ref" + }, + "title": "Process Result" + } + }, + "title": "SBG Preprocess CWL Workflow", + "version": "1.0.0" + } +} diff --git a/pyproject.toml b/pyproject.toml index 55a61425..80b29c9b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,9 +4,12 @@ build-backend = "setuptools.build_meta" [project] name = "unity-sps" -version = "2.1.0" +version = "2.2.0" authors = [ { name = "Drew Meyers", email = "drew.meyers@jpl.nasa.gov" }, + { name = "Luca Cinquini", email = "luca.cinquini@jpl.nasa.gov" }, + { name = "Nikki Tebaldi", email = "nicole.tebaldi@jpl.nasa.gov" }, + { name = "Brad Lunsford", email = "bradley.t.lunsford@jpl.nasa.gov" } ] description = "The science processing service area of Unity." classifiers = [ @@ -37,11 +40,12 @@ test = [ "pytest-bdd==7.1.1", "pytest-mock==3.12.0", "requests==2.31.0", - "apache-airflow==2.9.2", - "kubernetes==30.1.0", - "boto3==1.34.143", + "apache-airflow==2.10.0", + "kubernetes==29.0.0", + "boto3==1.34.46", "backoff==2.2.1", - "apache-airflow-providers-cncf-kubernetes==8.3.2" + "apache-airflow-providers-cncf-kubernetes==8.4.1", + "unity_sps_ogc_processes_api_python_client @ git+https://github.com/unity-sds/unity-sps-ogc-processes-api-client-python.git@2.0.0" ] experiment = [] lambda-airflow-dag-trigger = [ diff --git a/terraform-unity/.terraform.lock.hcl b/terraform-unity/.terraform.lock.hcl index 35ad8b71..8ef7a4f8 100644 --- a/terraform-unity/.terraform.lock.hcl +++ b/terraform-unity/.terraform.lock.hcl @@ -2,25 +2,25 @@ # Manual edits may be lost in future updates. provider "registry.terraform.io/hashicorp/aws" { - version = "5.47.0" - constraints = "5.47.0" + version = "5.50.0" + constraints = ">= 5.50.0, 5.50.0" hashes = [ - "h1:T0tupfn2Ubj18Y7xmO0pFMvti1Qns2K6EGXenR6Hg30=", - "zh:06037a14e47e8f82d0b3b326cd188566272b808b7970a9249a11db26d475b83d", - "zh:116b7dd58ca964a1056249d2b6550f399b0a6bc9a7920b7ee134242114432c9f", - "zh:1aa089c81459071c1d65ba7454f1122159e1fa1b5384e6e9ef85c8264f8a9ecb", - "zh:2c1471acba40c4944aa88dda761093c0c969db6408bdc1a4fb62417788cd6bb6", - "zh:3b950bea06ea4bf1ec359a97a4f1745b7efca7fc2da368843666020dd0ebc5d4", - "zh:7191c5c2fce834d584153dcd5269ed3042437f224d341ad85df06b2247bd09b2", - "zh:76d841b3f247f9bb3899dec3b4d871613a4ae8a83a581a827655d34b1bbee0ee", - "zh:7c656ce252fafc2c915dad43a0a7da17dba975207d75841a02f3f2b92d51ec25", - "zh:8ec97118cbdef64139c52b719e4e22443e67a1f37ea1597cd45b2e9b97332a35", + "h1:LevuTzPS4S7t+Vh6Kpz77pBNDAwChaos91/6+CVnD4w=", + "zh:19be42f5a545d6712dee4bdb704b018d23bacf5d902ac3cb061eb1750dfe6a20", + "zh:1d880bdba95ce96efde37e5bcf457a57df2c1effa9b47bc67fa29c1a264ae53b", + "zh:1e9c78e324d7492be5e7744436ed71d66fe4eca3fb6af07a28efd0d1e3bf7640", + "zh:27ac672aa61b3795931561fdbe4a306ad1132af517d7711c14569429b2cc694f", + "zh:3b978423dead02f9a98d25de118adf264a2331acdc4550ea93bed01feabc12e7", + "zh:490d7eb4b922ba1b57e0ab8dec1a08df6517485febcab1e091fd6011281c3472", + "zh:64e7c84e18dac1af5778d6f516e01a46f9c91d710867c39fbc7efa3cd972dc62", + "zh:73867ac2956dcdd377121b3aa8fe2e1085e77fae9b61d018f56a863277ea4b6e", + "zh:7ed899d0d5c49f009b445d7816e4bf702d9c48205c24cf884cd2ae0247160455", "zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425", - "zh:a369deca7938236a7da59f7ad1fe18137f736764c9015ed10e88edb6e8505980", - "zh:a743882fb099401eae0c86d9388a6faadbbc27b2ac9477aeef643e5de4eec3f9", - "zh:d5f960f58aff06fc58e244fea6e665800384cacb8cd64a556f8e145b98650372", - "zh:e31ffcfd560132ffbff2f574928ba392e663202a750750ed39a8950031b75623", - "zh:ebd9061b92a772144564f35a63d5a08cb45e14a9d39294fda185f2e0de9c8e28", + "zh:9b93784b3fb13d08cf95a4131c49b56bf7e1cd35daad6156b3658a89ce6fb58f", + "zh:b29d77eb75de474e46eb47e539c48916628d85599bcf14e5cc500b14a4578e75", + "zh:bbd9cec8ca705452e4a3d21d56474eacb8cc7b1b74b7f310fdea4bdcffebab32", + "zh:c352eb3169efa0e27a29b99a2630e8298710a084453c519caa39e5972ff6d1fc", + "zh:e32f4744b43be1708b309a734e0ac10b5c0f9f92e5849298cf1a90f2b906f6f3", ] } @@ -64,6 +64,26 @@ provider "registry.terraform.io/hashicorp/kubernetes" { ] } +provider "registry.terraform.io/hashicorp/local" { + version = "2.5.1" + constraints = ">= 2.5.1" + hashes = [ + "h1:/GAVA/xheGQcbOZEq0qxANOg+KVLCA7Wv8qluxhTjhU=", + "zh:0af29ce2b7b5712319bf6424cb58d13b852bf9a777011a545fac99c7fdcdf561", + "zh:126063ea0d79dad1f68fa4e4d556793c0108ce278034f101d1dbbb2463924561", + "zh:196bfb49086f22fd4db46033e01655b0e5e036a5582d250412cc690fa7995de5", + "zh:37c92ec084d059d37d6cffdb683ccf68e3a5f8d2eb69dd73c8e43ad003ef8d24", + "zh:4269f01a98513651ad66763c16b268f4c2da76cc892ccfd54b401fff6cc11667", + "zh:51904350b9c728f963eef0c28f1d43e73d010333133eb7f30999a8fb6a0cc3d8", + "zh:73a66611359b83d0c3fcba2984610273f7954002febb8a57242bbb86d967b635", + "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", + "zh:7ae387993a92bcc379063229b3cce8af7eaf082dd9306598fcd42352994d2de0", + "zh:9e0f365f807b088646db6e4a8d4b188129d9ebdbcf2568c8ab33bddd1b82c867", + "zh:b5263acbd8ae51c9cbffa79743fbcadcb7908057c87eb22fd9048268056efbc4", + "zh:dfcd88ac5f13c0d04e24be00b686d069b4879cc4add1b7b1a8ae545783d97520", + ] +} + provider "registry.terraform.io/hashicorp/null" { version = "3.2.2" hashes = [ diff --git a/terraform-unity/README.md b/terraform-unity/README.md index ade96e49..3dce1dad 100644 --- a/terraform-unity/README.md +++ b/terraform-unity/README.md @@ -151,42 +151,54 @@ terraform apply -no-color 2>&1 | tee apply_output.txt | Name | Version | |------|---------| | [terraform](#requirement\_terraform) | ~> 1.8.2 | -| [aws](#requirement\_aws) | 5.47.0 | +| [aws](#requirement\_aws) | 5.50.0 | | [helm](#requirement\_helm) | 2.13.1 | | [kubernetes](#requirement\_kubernetes) | 2.29.0 | | [null](#requirement\_null) | 3.2.2 | -| [random](#requirement\_random) | 3.6.1 | | [time](#requirement\_time) | 0.11.1 | ## Providers -No providers. +| Name | Version | +|------|---------| +| [aws](#provider\_aws) | 5.50.0 | +| [kubernetes](#provider\_kubernetes) | 2.29.0 | ## Modules | Name | Source | Version | |------|--------|---------| | [unity-sps-airflow](#module\_unity-sps-airflow) | ./modules/terraform-unity-sps-airflow | n/a | +| [unity-sps-database](#module\_unity-sps-database) | ./modules/terraform-unity-sps-database | n/a | +| [unity-sps-efs](#module\_unity-sps-efs) | ./modules/terraform-unity-sps-efs | n/a | +| [unity-sps-initiators](#module\_unity-sps-initiators) | ./modules/terraform-unity-sps-initiators | n/a | +| [unity-sps-karpenter-node-config](#module\_unity-sps-karpenter-node-config) | ./modules/terraform-unity-sps-karpenter-node-config | n/a | +| [unity-sps-ogc-processes-api](#module\_unity-sps-ogc-processes-api) | ./modules/terraform-unity-sps-ogc-processes-api | n/a | ## Resources -No resources. +| Name | Type | +|------|------| +| [kubernetes_namespace.service_area](https://registry.terraform.io/providers/hashicorp/kubernetes/2.29.0/docs/resources/namespace) | resource | +| [aws_eks_cluster.cluster](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/data-sources/eks_cluster) | data source | +| [aws_eks_cluster_auth.cluster](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/data-sources/eks_cluster_auth) | data source | ## Inputs | Name | Description | Type | Default | Required | |------|-------------|------|---------|:--------:| +| [airflow\_docker\_images](#input\_airflow\_docker\_images) | Docker images for the associated Airflow services. |
object({
airflow = object({
name = string
tag = string
})
})
|
{
"airflow": {
"name": "ghcr.io/unity-sds/unity-sps/sps-airflow",
"tag": "2.2.0"
}
}
| no | | [airflow\_webserver\_password](#input\_airflow\_webserver\_password) | The password for the Airflow webserver and UI. | `string` | n/a | yes | -| [counter](#input\_counter) | Identifier used to uniquely distinguish resources. This is used in the naming convention of the resource. If left empty, a random hexadecimal value will be generated and used instead. | `string` | `""` | no | +| [airflow\_webserver\_username](#input\_airflow\_webserver\_username) | The username for the Airflow webserver and UI. | `string` | `"admin"` | no | | [dag\_catalog\_repo](#input\_dag\_catalog\_repo) | Git repository that stores the catalog of Airflow DAGs. |
object({
url = string
ref = string
dags_directory_path = string
})
|
{
"dags_directory_path": "airflow/dags",
"ref": "develop",
"url": "https://github.com/unity-sds/unity-sps.git"
}
| no | -| [deployment\_name](#input\_deployment\_name) | The name of the deployment. | `string` | n/a | yes | -| [docker\_images](#input\_docker\_images) | Docker images for the associated services. |
object({
airflow = object({
name = string
tag = string
}),
ogc_processes_api = object({
name = string
tag = string
})
git_sync = object({
name = string
tag = string
})
redis = object({
name = string
tag = string
})
})
|
{
"airflow": {
"name": "ghcr.io/unity-sds/unity-sps/sps-airflow",
"tag": "2.1.0"
},
"git_sync": {
"name": "registry.k8s.io/git-sync/git-sync",
"tag": "v4.2.3"
},
"ogc_processes_api": {
"name": "ghcr.io/unity-sds/unity-sps-ogc-processes-api/unity-sps-ogc-processes-api",
"tag": "1.0.0"
},
"redis": {
"name": "redis",
"tag": "7.2.4"
}
}
| no | | [helm\_charts](#input\_helm\_charts) | Helm charts for the associated services. |
map(object({
repository = string
chart = string
version = string
}))
|
{
"airflow": {
"chart": "airflow",
"repository": "https://airflow.apache.org",
"version": "1.13.1"
},
"keda": {
"chart": "keda",
"repository": "https://kedacore.github.io/charts",
"version": "v2.14.2"
}
}
| no | -| [karpenter\_node\_pools](#input\_karpenter\_node\_pools) | Configuration for Karpenter node pools |
map(object({
requirements : list(object({
key : string
operator : string
values : list(string)
}))
limits : object({
cpu : string
memory : string
})
disruption : object({
consolidationPolicy : string
consolidateAfter : string
})
}))
|
{
"airflow-celery-workers": {
"disruption": {
"consolidateAfter": "1m",
"consolidationPolicy": "WhenEmpty"
},
"limits": {
"cpu": "80",
"memory": "320Gi"
},
"requirements": [
{
"key": "karpenter.k8s.aws/instance-family",
"operator": "In",
"values": [
"m7i",
"m6i",
"m5",
"t3",
"c7i",
"c6i",
"c5",
"r7i",
"r6i",
"r5"
]
},
{
"key": "karpenter.k8s.aws/instance-cpu",
"operator": "Gt",
"values": [
"1"
]
},
{
"key": "karpenter.k8s.aws/instance-cpu",
"operator": "Lt",
"values": [
"9"
]
},
{
"key": "karpenter.k8s.aws/instance-memory",
"operator": "Gt",
"values": [
"8191"
]
},
{
"key": "karpenter.k8s.aws/instance-memory",
"operator": "Lt",
"values": [
"32769"
]
},
{
"key": "karpenter.k8s.aws/instance-hypervisor",
"operator": "In",
"values": [
"nitro"
]
}
]
},
"airflow-core-components": {
"disruption": {
"consolidateAfter": "1m",
"consolidationPolicy": "WhenEmpty"
},
"limits": {
"cpu": "40",
"memory": "160Gi"
},
"requirements": [
{
"key": "karpenter.k8s.aws/instance-family",
"operator": "In",
"values": [
"m7i",
"m6i",
"m5",
"t3",
"c7i",
"c6i",
"c5",
"r7i",
"r6i",
"r5"
]
},
{
"key": "karpenter.k8s.aws/instance-cpu",
"operator": "Gt",
"values": [
"1"
]
},
{
"key": "karpenter.k8s.aws/instance-cpu",
"operator": "Lt",
"values": [
"17"
]
},
{
"key": "karpenter.k8s.aws/instance-memory",
"operator": "Gt",
"values": [
"8191"
]
},
{
"key": "karpenter.k8s.aws/instance-memory",
"operator": "Lt",
"values": [
"32769"
]
},
{
"key": "karpenter.k8s.aws/instance-hypervisor",
"operator": "In",
"values": [
"nitro"
]
}
]
},
"airflow-kubernetes-pod-operator": {
"disruption": {
"consolidateAfter": "1m",
"consolidationPolicy": "WhenEmpty"
},
"limits": {
"cpu": "100",
"memory": "400Gi"
},
"requirements": [
{
"key": "karpenter.k8s.aws/instance-family",
"operator": "In",
"values": [
"m7i",
"m6i",
"m5",
"t3",
"c7i",
"c6i",
"c5",
"r7i",
"r6i",
"r5"
]
},
{
"key": "karpenter.k8s.aws/instance-cpu",
"operator": "Gt",
"values": [
"1"
]
},
{
"key": "karpenter.k8s.aws/instance-cpu",
"operator": "Lt",
"values": [
"17"
]
},
{
"key": "karpenter.k8s.aws/instance-memory",
"operator": "Gt",
"values": [
"8191"
]
},
{
"key": "karpenter.k8s.aws/instance-memory",
"operator": "Lt",
"values": [
"32769"
]
},
{
"key": "karpenter.k8s.aws/instance-hypervisor",
"operator": "In",
"values": [
"nitro"
]
}
]
}
}
| no | +| [karpenter\_node\_classes](#input\_karpenter\_node\_classes) | Configuration for karpenter\_node\_classes |
map(object({
volume_size = string
}))
|
{
"airflow-kubernetes-pod-operator-high-workload": {
"volume_size": "300Gi"
},
"default": {
"volume_size": "30Gi"
}
}
| no | +| [karpenter\_node\_pools](#input\_karpenter\_node\_pools) | Configuration for Karpenter node pools |
map(object({
requirements : list(object({
key : string
operator : string
values : list(string)
}))
nodeClassRef : string
limits : object({
cpu : string
memory : string
})
disruption : object({
consolidationPolicy : string
consolidateAfter : string
})
}))
|
{
"airflow-celery-workers": {
"disruption": {
"consolidateAfter": "1m",
"consolidationPolicy": "WhenEmpty"
},
"limits": {
"cpu": "80",
"memory": "320Gi"
},
"nodeClassRef": "default",
"requirements": [
{
"key": "karpenter.k8s.aws/instance-family",
"operator": "In",
"values": [
"m7i",
"m6i",
"m5",
"t3",
"c7i",
"c6i",
"c5",
"r7i",
"r6i",
"r5"
]
},
{
"key": "karpenter.k8s.aws/instance-cpu",
"operator": "Gt",
"values": [
"1"
]
},
{
"key": "karpenter.k8s.aws/instance-cpu",
"operator": "Lt",
"values": [
"9"
]
},
{
"key": "karpenter.k8s.aws/instance-memory",
"operator": "Gt",
"values": [
"8191"
]
},
{
"key": "karpenter.k8s.aws/instance-memory",
"operator": "Lt",
"values": [
"32769"
]
},
{
"key": "karpenter.k8s.aws/instance-hypervisor",
"operator": "In",
"values": [
"nitro"
]
}
]
},
"airflow-core-components": {
"disruption": {
"consolidateAfter": "1m",
"consolidationPolicy": "WhenEmpty"
},
"limits": {
"cpu": "40",
"memory": "160Gi"
},
"nodeClassRef": "default",
"requirements": [
{
"key": "karpenter.k8s.aws/instance-family",
"operator": "In",
"values": [
"m7i",
"m6i",
"m5",
"t3",
"c7i",
"c6i",
"c5",
"r7i",
"r6i",
"r5"
]
},
{
"key": "karpenter.k8s.aws/instance-cpu",
"operator": "Gt",
"values": [
"1"
]
},
{
"key": "karpenter.k8s.aws/instance-cpu",
"operator": "Lt",
"values": [
"17"
]
},
{
"key": "karpenter.k8s.aws/instance-memory",
"operator": "Gt",
"values": [
"8191"
]
},
{
"key": "karpenter.k8s.aws/instance-memory",
"operator": "Lt",
"values": [
"32769"
]
},
{
"key": "karpenter.k8s.aws/instance-hypervisor",
"operator": "In",
"values": [
"nitro"
]
}
]
},
"airflow-kubernetes-pod-operator": {
"disruption": {
"consolidateAfter": "1m",
"consolidationPolicy": "WhenEmpty"
},
"limits": {
"cpu": "100",
"memory": "400Gi"
},
"nodeClassRef": "default",
"requirements": [
{
"key": "karpenter.k8s.aws/instance-family",
"operator": "In",
"values": [
"m7i",
"m6i",
"m5",
"t3",
"c7i",
"c6i",
"c5",
"r7i",
"r6i",
"r5"
]
},
{
"key": "karpenter.k8s.aws/instance-cpu",
"operator": "Gt",
"values": [
"1"
]
},
{
"key": "karpenter.k8s.aws/instance-cpu",
"operator": "Lt",
"values": [
"17"
]
},
{
"key": "karpenter.k8s.aws/instance-memory",
"operator": "Gt",
"values": [
"8191"
]
},
{
"key": "karpenter.k8s.aws/instance-memory",
"operator": "Lt",
"values": [
"32769"
]
},
{
"key": "karpenter.k8s.aws/instance-hypervisor",
"operator": "In",
"values": [
"nitro"
]
}
]
},
"airflow-kubernetes-pod-operator-high-workload": {
"disruption": {
"consolidateAfter": "1m",
"consolidationPolicy": "WhenEmpty"
},
"limits": {
"cpu": "528",
"memory": "1056Gi"
},
"nodeClassRef": "airflow-kubernetes-pod-operator-high-workload",
"requirements": [
{
"key": "karpenter.k8s.aws/instance-family",
"operator": "In",
"values": [
"m7i",
"m6i",
"m5",
"t3",
"c7i",
"c6i",
"c5",
"r7i",
"r6i",
"r5"
]
},
{
"key": "karpenter.k8s.aws/instance-cpu",
"operator": "Gt",
"values": [
"1"
]
},
{
"key": "karpenter.k8s.aws/instance-cpu",
"operator": "Lt",
"values": [
"49"
]
},
{
"key": "karpenter.k8s.aws/instance-memory",
"operator": "Gt",
"values": [
"8191"
]
},
{
"key": "karpenter.k8s.aws/instance-memory",
"operator": "Lt",
"values": [
"98305"
]
},
{
"key": "karpenter.k8s.aws/instance-hypervisor",
"operator": "In",
"values": [
"nitro"
]
}
]
}
}
| no | | [kubeconfig\_filepath](#input\_kubeconfig\_filepath) | The path to the kubeconfig file for the Kubernetes cluster. | `string` | n/a | yes | | [mcp\_ami\_owner\_id](#input\_mcp\_ami\_owner\_id) | The owner ID of the MCP AMIs | `string` | `"794625662971"` | no | +| [ogc\_processes\_docker\_images](#input\_ogc\_processes\_docker\_images) | Docker images for the associated OGC Processes API services. |
object({
ogc_processes_api = object({
name = string
tag = string
})
git_sync = object({
name = string
tag = string
})
redis = object({
name = string
tag = string
})
})
|
{
"git_sync": {
"name": "registry.k8s.io/git-sync/git-sync",
"tag": "v4.2.4"
},
"ogc_processes_api": {
"name": "ghcr.io/unity-sds/unity-sps-ogc-processes-api/unity-sps-ogc-processes-api",
"tag": "2.0.0"
},
"redis": {
"name": "redis",
"tag": "7.4.0"
}
}
| no | | [project](#input\_project) | The project or mission deploying Unity SPS. | `string` | `"unity"` | no | -| [release](#input\_release) | The software release version. | `string` | `"24.2"` | no | +| [release](#input\_release) | The software release version. | `string` | `"24.3"` | no | | [service\_area](#input\_service\_area) | The service area owner of the resources being deployed. | `string` | `"sps"` | no | | [venue](#input\_venue) | The MCP venue in which the resources will be deployed. | `string` | n/a | yes | @@ -194,5 +206,5 @@ No resources. | Name | Description | |------|-------------| -| [resources](#output\_resources) | SSM parameter IDs for pipeline resources. | +| [resources](#output\_resources) | SSM parameter IDs for SPS resources. | diff --git a/terraform-unity/data.tf b/terraform-unity/data.tf new file mode 100644 index 00000000..e08bd146 --- /dev/null +++ b/terraform-unity/data.tf @@ -0,0 +1,7 @@ +data "aws_eks_cluster" "cluster" { + name = format(local.resource_name_prefix, "eks") +} + +data "aws_eks_cluster_auth" "cluster" { + name = format(local.resource_name_prefix, "eks") +} diff --git a/terraform-unity/locals.tf b/terraform-unity/locals.tf new file mode 100644 index 00000000..7050a5e8 --- /dev/null +++ b/terraform-unity/locals.tf @@ -0,0 +1,4 @@ + +locals { + resource_name_prefix = join("-", compact([var.project, var.venue, var.service_area, "%s"])) +} diff --git a/terraform-unity/main.tf b/terraform-unity/main.tf index cd62f530..6b816d47 100644 --- a/terraform-unity/main.tf +++ b/terraform-unity/main.tf @@ -1,28 +1,90 @@ -# S3 backend terraform { backend "s3" { - # full path to Terraform state file: - # s3:/// - bucket = "" - key = "" - region = "us-west-2" - encrypt = true + bucket = "unity-unity-dev-bucket" + workspace_key_prefix = "sps/tfstates" + key = "terraform.tfstate" + region = "us-west-2" + encrypt = true } } +resource "kubernetes_namespace" "service_area" { + metadata { + name = var.service_area + } +} + +module "unity-sps-database" { + source = "./modules/terraform-unity-sps-database" + project = var.project + venue = var.venue + service_area = var.service_area + release = var.release +} + +module "unity-sps-efs" { + source = "./modules/terraform-unity-sps-efs" + project = var.project + venue = var.venue + service_area = var.service_area + release = var.release +} + +module "unity-sps-karpenter-node-config" { + source = "./modules/terraform-unity-sps-karpenter-node-config" + project = var.project + venue = var.venue + service_area = var.service_area + release = var.release + kubeconfig_filepath = var.kubeconfig_filepath + mcp_ami_owner_id = var.mcp_ami_owner_id + karpenter_node_classes = var.karpenter_node_classes + karpenter_node_pools = var.karpenter_node_pools +} + module "unity-sps-airflow" { source = "./modules/terraform-unity-sps-airflow" project = var.project venue = var.venue service_area = var.service_area - deployment_name = var.deployment_name - counter = var.counter release = var.release kubeconfig_filepath = var.kubeconfig_filepath + kubernetes_namespace = kubernetes_namespace.service_area.metadata[0].name + db_instance_identifier = module.unity-sps-database.db_instance_identifier + db_secret_arn = module.unity-sps-database.db_secret_arn + efs_file_system_id = module.unity-sps-efs.file_system_id + airflow_webserver_username = var.airflow_webserver_username airflow_webserver_password = var.airflow_webserver_password - docker_images = var.docker_images + docker_images = var.airflow_docker_images helm_charts = var.helm_charts - mcp_ami_owner_id = var.mcp_ami_owner_id - karpenter_node_pools = var.karpenter_node_pools + karpenter_node_pools = module.unity-sps-karpenter-node-config.karpenter_node_pools +} + +module "unity-sps-ogc-processes-api" { + source = "./modules/terraform-unity-sps-ogc-processes-api" + project = var.project + venue = var.venue + service_area = var.service_area + release = var.release + kubernetes_namespace = kubernetes_namespace.service_area.metadata[0].name + db_instance_identifier = module.unity-sps-database.db_instance_identifier + db_secret_arn = module.unity-sps-database.db_secret_arn + airflow_deployed_dags_pvc = module.unity-sps-airflow.airflow_deployed_dags_pvc + airflow_webserver_username = var.airflow_webserver_username + airflow_webserver_password = var.airflow_webserver_password + docker_images = var.ogc_processes_docker_images dag_catalog_repo = var.dag_catalog_repo + karpenter_node_pools = module.unity-sps-karpenter-node-config.karpenter_node_pools +} + +module "unity-sps-initiators" { + source = "./modules/terraform-unity-sps-initiators" + project = var.project + venue = var.venue + service_area = var.service_area + release = var.release + airflow_api_url_ssm_param = module.unity-sps-airflow.airflow_urls["rest_api"].ssm_param_id + airflow_webserver_username = var.airflow_webserver_username + airflow_webserver_password = var.airflow_webserver_password + ogc_processes_api_url_ssm_param = module.unity-sps-ogc-processes-api.ogc_processes_urls["rest_api"].ssm_param_id } diff --git a/terraform-unity/modules/README.md b/terraform-unity/modules/README.md deleted file mode 100644 index cfd4be9f..00000000 --- a/terraform-unity/modules/README.md +++ /dev/null @@ -1,5 +0,0 @@ -# Auto-generated Documentation of Unity SPS Terraform Sub-modules - -Each sub-module contained in this directory contains its own auto-generated documentation: - -- [terraform-unity-sps-hysds-cluster](terraform-unity-sps-hysds-cluster/README.md) diff --git a/terraform-unity/modules/terraform-unity-sps-airflow/.terraform.lock.hcl b/terraform-unity/modules/terraform-unity-sps-airflow/.terraform.lock.hcl index dd7e7590..a7c983fd 100644 --- a/terraform-unity/modules/terraform-unity-sps-airflow/.terraform.lock.hcl +++ b/terraform-unity/modules/terraform-unity-sps-airflow/.terraform.lock.hcl @@ -1,87 +1,65 @@ # This file is maintained automatically by "terraform init". # Manual edits may be lost in future updates. -provider "registry.terraform.io/alekc/kubectl" { - version = "2.0.4" - constraints = "2.0.4" - hashes = [ - "h1:mCz0lOwNsFCZEcFf7DBSe6b4hZgn5piiy0mZDwRGUIU=", - "zh:15c227886bac78c8b8827f85595648212574ec81febc39e1055e1a6bf048fe65", - "zh:2211ebeeb0918dbb3587d206e32adca9e1f343a93bbffcd37d8d99bf4d8dea9a", - "zh:2303836cdea12ece8dbe39c2d7d30a9378fd06e9c2ebda66cbe5e01cc096ee2e", - "zh:3687f69e531c70845682b214888a9959b93f2be3c2531801228a4b1965d59921", - "zh:4dd686b4c55e2eedd80464984c9bb736c2df7a96d9dd59a692d91d09173f5f64", - "zh:51e29c13a87e56867b4be0b0c68da874149bf6d4014d7259b62d91162142c1bd", - "zh:5d9d99260f2adfb8867068a3d7644336d57cfa7710062c5221dcbb5a7ec90c7d", - "zh:901c19d73da6688437b19a85e3cd60e8f2090c84699e108b31953bb87f6d3141", - "zh:9547743606a36fa6b6748c5e2e1959b6f185730a1da53a3c351cfa0d8c096687", - "zh:9772a30704e69b54de5a332858a39591f52286121cffcba702346830b1c6e362", - "zh:b44792f99d7c90b9a364dd922f861e459ae1b1edc039f6b3078549021fec4511", - "zh:b5eb871ed2e39b9236dce06170b1fd5dda29f3c1d53f8e08285ccb9a4f574201", - "zh:e8bb4c3d9f680977b560e9dec24662650f790259b2c1311ee07a72157f6492b3", - "zh:f4772cfa0f9c73fdef008bb917cd268620009dc7ff270a4d819125c642b5acce", - ] -} - provider "registry.terraform.io/hashicorp/aws" { - version = "5.43.0" - constraints = ">= 5.40.0, 5.43.0" + version = "5.50.0" + constraints = ">= 5.50.0, 5.50.0" hashes = [ - "h1:3w6NCYy+mbc9odXmM7K5Xag2ggtapraacZqJR3WpJKc=", - "zh:07fb2abb9cf4d2042b41b2b2c642d4c4bd2feccbd856cd7040a7d15158fed478", - "zh:1373339e796d8d8473c267c0ecddb701559fce454c2cdd192cf8b0eadf759b48", - "zh:1644b4e0fd2e0b28d465bb5cf08b1f594a623324d176e879e5052f78cd2ea8cb", - "zh:385943b8d4170c5269b8e13e876636b7edc0ad2576edc7eb5d81cd4286a461d8", - "zh:48cf103f4fa866b67b686e8c085ac15264d6f020b6ad4a90f496b7283d31faa6", - "zh:4a4c4b4236542089d1bdb688c248e0b7c941ce42887da87e487bfb15038dcaf9", - "zh:5d84f3e12100bdd62a8c295b56358b82afc130642dca80d104bd868fdc28ed7c", - "zh:68294a601ce588a8838bcf4e136bb5ed8d2b1ee410f8871d88e35ce4861cf33f", - "zh:7ae1af6e9b95bd6c33dd0922216ac2b59f2f5b22fedbeab1db7a80b2f4358919", - "zh:89c718d41b2eeeaefd1acdbd839f1326a8c866bd49752648b0b32d3dd4a38163", - "zh:96e54ccb0f5ddf60465edf5c9f46e64f7d2f392507b851f102723797b4a15d09", + "h1:LevuTzPS4S7t+Vh6Kpz77pBNDAwChaos91/6+CVnD4w=", + "zh:19be42f5a545d6712dee4bdb704b018d23bacf5d902ac3cb061eb1750dfe6a20", + "zh:1d880bdba95ce96efde37e5bcf457a57df2c1effa9b47bc67fa29c1a264ae53b", + "zh:1e9c78e324d7492be5e7744436ed71d66fe4eca3fb6af07a28efd0d1e3bf7640", + "zh:27ac672aa61b3795931561fdbe4a306ad1132af517d7711c14569429b2cc694f", + "zh:3b978423dead02f9a98d25de118adf264a2331acdc4550ea93bed01feabc12e7", + "zh:490d7eb4b922ba1b57e0ab8dec1a08df6517485febcab1e091fd6011281c3472", + "zh:64e7c84e18dac1af5778d6f516e01a46f9c91d710867c39fbc7efa3cd972dc62", + "zh:73867ac2956dcdd377121b3aa8fe2e1085e77fae9b61d018f56a863277ea4b6e", + "zh:7ed899d0d5c49f009b445d7816e4bf702d9c48205c24cf884cd2ae0247160455", "zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425", - "zh:b102ce204ebbbf32d68ff47b5224eeb60873bef5b58a7fd7790f6b4020801578", - "zh:cae4cb16d15ac4b15c8de5bc9dddc2032583e12c4f31e23b3a7ef22da60657dc", - "zh:fecbcbd63111c9518de261bcb37482cb06ee149e7298f567d45b2a55674faa75", + "zh:9b93784b3fb13d08cf95a4131c49b56bf7e1cd35daad6156b3658a89ce6fb58f", + "zh:b29d77eb75de474e46eb47e539c48916628d85599bcf14e5cc500b14a4578e75", + "zh:bbd9cec8ca705452e4a3d21d56474eacb8cc7b1b74b7f310fdea4bdcffebab32", + "zh:c352eb3169efa0e27a29b99a2630e8298710a084453c519caa39e5972ff6d1fc", + "zh:e32f4744b43be1708b309a734e0ac10b5c0f9f92e5849298cf1a90f2b906f6f3", ] } provider "registry.terraform.io/hashicorp/helm" { - version = "2.12.1" - constraints = "2.12.1" + version = "2.13.1" + constraints = "2.13.1" hashes = [ - "h1:aBfcqM4cbywa7TAxfT1YoFS+Cst9waerlm4XErFmJlk=", - "zh:1d623fb1662703f2feb7860e3c795d849c77640eecbc5a776784d08807b15004", - "zh:253a5bc62ba2c4314875139e3fbd2feaad5ef6b0fb420302a474ab49e8e51a38", - "zh:282358f4ad4f20d0ccaab670b8645228bfad1c03ac0d0df5889f0aea8aeac01a", - "zh:4fd06af3091a382b3f0d8f0a60880f59640d2b6d9d6a31f9a873c6f1bde1ec50", - "zh:6816976b1830f5629ae279569175e88b497abbbac30ee809948a1f923c67a80d", - "zh:7d82c4150cdbf48cfeec867be94c7b9bd7682474d4df0ebb7e24e148f964844f", - "zh:83f062049eea2513118a4c6054fb06c8600bac96196f25aed2cc21898ec86e93", - "zh:a79eec0cf4c08fca79e44033ec6e470f25ff23c3e2c7f9bc707ed7771c1072c0", - "zh:b2b2d904b2821a6e579910320605bc478bbef063579a23fbfdd6fcb5871b81f8", - "zh:e91177ca06a15487fc570cb81ecef6359aa399459ea2aa7c4f7367ba86f6fcad", - "zh:e976bcb82996fc4968f8382bbcb6673efb1f586bf92074058a232028d97825b1", + "h1:crwHSTDCQ6fS8dQYGkoi700MI5UpbA2BDLgMZgL3B+E=", + "zh:1bf0ae1ecfd2a5d5a57f695a33b2328ef197138f27ff372fed820c975eac9783", + "zh:4676295e3a929848b98869d3040f54f17fbed3d133342b6a1f7b72d5797239e0", + "zh:4bf3705e061e28d16a525aad9229fdd842cdc96f7c23d040d3148957ba3149d8", + "zh:69db9550eacd61d85cf456d438f08addfefea4fcbc4f4a8119105093ea3d950a", + "zh:6e11560e3ea61b141f03842771bfad143ff1c56bd0d1bc01069496107cad0ab6", + "zh:733ea41e2eb4bd63cfdae6886ed47d224dabb0cd37959c6e2b213b1914a80121", + "zh:74caefb2dc8e6055259d716c11194cc0709261c592d41466abf2dc0b21d88297", + "zh:89682ab50b5cf1f1c41eabfc76f53a56482ac7b4bf77d9cb087d789524fd3e31", + "zh:a5ff95092f2f123027b89f585612a225c9bce7e65977b4ffaf4de3ae3e7870bc", + "zh:c85fce024cb5a387702ceb42a3a06e32519cd1e61bc9dd820a762da21110ab96", + "zh:d828ef2db612798179322bcb3fe829a43dd47e740cabb67e3654c8561ae661ff", "zh:f569b65999264a9416862bca5cd2a6177d94ccb0424f3a4ef424428912b9cb3c", ] } provider "registry.terraform.io/hashicorp/kubernetes" { - version = "2.25.2" - constraints = "2.25.2" + version = "2.29.0" + constraints = "2.29.0" hashes = [ - "h1:T1WAQt40cAk721H0AM/eZ5YuodJaIfS8r3Tu7rKCJJE=", - "zh:044788ac936e0e8ece8f78a2e4e366ecd435ea8235388eaf2cbc8e7975d9d970", - "zh:24f5ff01df91f51f00ee7ff39430adeb63bb2ca4ea0042e68f06d6b65808c02f", - "zh:49984aa0aa1faa8c4f01e8faa039322f1e6fdaeab0b7e32f5c6e96edfde36a38", - "zh:4eeceaff56bac9fc782e7e33f157fa2c7e9a47b2c3c3d12da2642c312ace73f6", - "zh:4f49b6419345960d5af475e0200c243af4c9c140b0ee64799fe1fc9b023c49ea", - "zh:7958414d516867a2263a978792a24843f80023fb233cf051ff4095adc9803d85", - "zh:c633a755fc95e9ff0cd73656f052947afd85883a0987dde5198113aa48474156", - "zh:cbfe958d119795004ce1e8001449d01c056fa2a062b51d07843d98be216337d7", - "zh:cfb85392e18768578d4c943438897083895719be678227fd90efbe3500702a56", - "zh:d705a661ed5da425dd236a48645bec39fe78a67d2e70e8460b720417cbf260ac", - "zh:ddd7a01263da3793df4f3b5af65f166307eed5acf525e51e058cda59009cc856", + "h1:7C1MinWhowW8EnlSYhhAFV3bte8x5YcSF5QxUPdoXDk=", + "zh:3edd5dc319b95fe94e61b82d10c1ce7fb53a2f21b067ddb742f2d7d0d19dd113", + "zh:4b9096e6d0cfa0efd4c89270e3d25fea49db570e2cfbe49c5d1de085a15f2578", + "zh:5397573838bcb8844248c8d6ac93cca7f39a0b707ac3ce7a7b306c50c261c195", + "zh:5d635370720d356b7bcb5756ca28de3275ca32ca1ef0201414caecd3a14759ac", + "zh:71a52280408f3fb0ff1866a9ab8059b0d9bde5481869658798e0773461f22eff", + "zh:748663ef0248d2d95f5dea2974332432a395165657856878c5dc6f000b37cc25", + "zh:7fbc1e084bbbb51e31afd3df0c77e833ae59e88cf42b9e2c17b0b1a1e3894723", + "zh:ae89b4be473b446270fa24dc1ef51b0cc4c2a528d9838ec15246d28bac165df3", + "zh:b6433970d680a0cc9898f915224508b5ece86ae4418372fa6bebd2a9d344f226", + "zh:bf871955cf49015e6a0433e814a22a109c1537a775b8b5dc7b37ad05c324904a", + "zh:c16fac91b2197b443a191d98cf37424feed550387ab11bd1427bde819722005e", "zh:f569b65999264a9416862bca5cd2a6177d94ccb0424f3a4ef424428912b9cb3c", ] } @@ -107,22 +85,22 @@ provider "registry.terraform.io/hashicorp/null" { } provider "registry.terraform.io/hashicorp/random" { - version = "3.6.0" - constraints = "3.6.0" + version = "3.6.1" + constraints = "3.6.1" hashes = [ - "h1:I8MBeauYA8J8yheLJ8oSMWqB0kovn16dF/wKZ1QTdkk=", - "zh:03360ed3ecd31e8c5dac9c95fe0858be50f3e9a0d0c654b5e504109c2159287d", - "zh:1c67ac51254ba2a2bb53a25e8ae7e4d076103483f55f39b426ec55e47d1fe211", - "zh:24a17bba7f6d679538ff51b3a2f378cedadede97af8a1db7dad4fd8d6d50f829", - "zh:30ffb297ffd1633175d6545d37c2217e2cef9545a6e03946e514c59c0859b77d", - "zh:454ce4b3dbc73e6775f2f6605d45cee6e16c3872a2e66a2c97993d6e5cbd7055", + "h1:a+Goawwh6Qtg4/bRWzfDtIdrEFfPlnVy0y4LdUQY3nI=", + "zh:2a0ec154e39911f19c8214acd6241e469157489fc56b6c739f45fbed5896a176", + "zh:57f4e553224a5e849c99131f5e5294be3a7adcabe2d867d8a4fef8d0976e0e52", + "zh:58f09948c608e601bd9d0a9e47dcb78e2b2c13b4bda4d8f097d09152ea9e91c5", + "zh:5c2a297146ed6fb3fe934c800e78380f700f49ff24dbb5fb5463134948e3a65f", "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", - "zh:91df0a9fab329aff2ff4cf26797592eb7a3a90b4a0c04d64ce186654e0cc6e17", - "zh:aa57384b85622a9f7bfb5d4512ca88e61f22a9cea9f30febaa4c98c68ff0dc21", - "zh:c4a3e329ba786ffb6f2b694e1fd41d413a7010f3a53c20b432325a94fa71e839", - "zh:e2699bc9116447f96c53d55f2a00570f982e6f9935038c3810603572693712d0", - "zh:e747c0fd5d7684e5bfad8aa0ca441903f15ae7a98a737ff6aca24ba223207e2c", - "zh:f1ca75f417ce490368f047b63ec09fd003711ae48487fba90b4aba2ccf71920e", + "zh:7ce41e26f0603e31cdac849085fc99e5cd5b3b73414c6c6d955c0ceb249b593f", + "zh:8c9e8d30c4ef08ee8bcc4294dbf3c2115cd7d9049c6ba21422bd3471d92faf8a", + "zh:93e91be717a7ffbd6410120eb925ebb8658cc8f563de35a8b53804d33c51c8b0", + "zh:982542e921970d727ce10ed64795bf36c4dec77a5db0741d4665230d12250a0d", + "zh:b9d1873f14d6033e216510ef541c891f44d249464f13cc07d3f782d09c7d18de", + "zh:cfe27faa0bc9556391c8803ade135a5856c34a3fe85b9ae3bdd515013c0c87c1", + "zh:e4aabf3184bbb556b89e4b195eab1514c86a2914dd01c23ad9813ec17e863a8a", ] } diff --git a/terraform-unity/modules/terraform-unity-sps-airflow/README.md b/terraform-unity/modules/terraform-unity-sps-airflow/README.md index 95e0b642..2314b4b8 100644 --- a/terraform-unity/modules/terraform-unity-sps-airflow/README.md +++ b/terraform-unity/modules/terraform-unity-sps-airflow/README.md @@ -6,7 +6,7 @@ | Name | Version | |------|---------| | [terraform](#requirement\_terraform) | ~> 1.8.2 | -| [aws](#requirement\_aws) | 5.47.0 | +| [aws](#requirement\_aws) | 5.50.0 | | [helm](#requirement\_helm) | 2.13.1 | | [kubernetes](#requirement\_kubernetes) | 2.29.0 | | [null](#requirement\_null) | 3.2.2 | @@ -17,11 +17,11 @@ | Name | Version | |------|---------| -| [aws](#provider\_aws) | 5.43.0 | -| [helm](#provider\_helm) | 2.12.1 | -| [kubernetes](#provider\_kubernetes) | 2.25.2 | +| [aws](#provider\_aws) | 5.50.0 | +| [helm](#provider\_helm) | 2.13.1 | +| [kubernetes](#provider\_kubernetes) | 2.29.0 | | [null](#provider\_null) | 3.2.2 | -| [random](#provider\_random) | 3.6.0 | +| [random](#provider\_random) | 3.6.1 | | [time](#provider\_time) | 0.11.1 | ## Modules @@ -32,55 +32,28 @@ No modules. | Name | Type | |------|------| -| [aws_cloudwatch_log_group.airflow_dag_trigger](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/cloudwatch_log_group) | resource | -| [aws_db_instance.sps_db](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/db_instance) | resource | -| [aws_db_subnet_group.sps_db](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/db_subnet_group) | resource | -| [aws_efs_access_point.airflow_deployed_dags](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/efs_access_point) | resource | -| [aws_efs_access_point.airflow_kpo](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/efs_access_point) | resource | -| [aws_efs_file_system.airflow](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/efs_file_system) | resource | -| [aws_efs_mount_target.airflow](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/efs_mount_target) | resource | -| [aws_iam_policy.airflow_worker_policy](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/iam_policy) | resource | -| [aws_iam_policy.lambda_sqs_access](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/iam_policy) | resource | -| [aws_iam_role.airflow_worker_role](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/iam_role) | resource | -| [aws_iam_role.lambda](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/iam_role) | resource | -| [aws_iam_role_policy_attachment.airflow_worker_policy_attachment](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/iam_role_policy_attachment) | resource | -| [aws_iam_role_policy_attachment.lambda_logs](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/iam_role_policy_attachment) | resource | -| [aws_iam_role_policy_attachment.lambda_sqs_access_attach](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/iam_role_policy_attachment) | resource | -| [aws_lambda_event_source_mapping.lambda_airflow_dag_trigger](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/lambda_event_source_mapping) | resource | -| [aws_lambda_function.airflow_dag_trigger](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/lambda_function) | resource | -| [aws_s3_bucket.airflow_logs](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/s3_bucket) | resource | -| [aws_s3_bucket.inbound_staging_location](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/s3_bucket) | resource | -| [aws_s3_bucket.lambdas](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/s3_bucket) | resource | -| [aws_s3_bucket_notification.isl_bucket_notification](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/s3_bucket_notification) | resource | -| [aws_s3_object.lambdas](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/s3_object) | resource | -| [aws_secretsmanager_secret.sps_db](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/secretsmanager_secret) | resource | -| [aws_secretsmanager_secret_version.sps_db](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/secretsmanager_secret_version) | resource | -| [aws_security_group.airflow_efs](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/security_group) | resource | -| [aws_security_group.rds_sg](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/security_group) | resource | -| [aws_security_group_rule.airflow_efs](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/security_group_rule) | resource | -| [aws_security_group_rule.eks_egress_to_rds](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/security_group_rule) | resource | -| [aws_security_group_rule.rds_ingress_from_eks](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/security_group_rule) | resource | -| [aws_sns_topic.s3_isl_event_topic](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/sns_topic) | resource | -| [aws_sns_topic_policy.s3_isl_event_topic_policy](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/sns_topic_policy) | resource | -| [aws_sns_topic_subscription.s3_isl_event_subscription](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/sns_topic_subscription) | resource | -| [aws_sqs_queue.s3_isl_event_queue](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/sqs_queue) | resource | -| [aws_sqs_queue_policy.s3_isl_event_queue_policy](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/sqs_queue_policy) | resource | -| [aws_ssm_parameter.airflow_api_url](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/ssm_parameter) | resource | -| [aws_ssm_parameter.airflow_dag_trigger_lambda_package](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/ssm_parameter) | resource | -| [aws_ssm_parameter.airflow_logs](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/ssm_parameter) | resource | -| [aws_ssm_parameter.airflow_ui_url](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/ssm_parameter) | resource | -| [aws_ssm_parameter.isl_bucket](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/ssm_parameter) | resource | -| [aws_ssm_parameter.ogc_processes_api_url](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/ssm_parameter) | resource | -| [aws_ssm_parameter.ogc_processes_ui_url](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/ssm_parameter) | resource | +| [aws_efs_access_point.airflow_deployed_dags](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/resources/efs_access_point) | resource | +| [aws_efs_access_point.airflow_kpo](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/resources/efs_access_point) | resource | +| [aws_efs_mount_target.airflow](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/resources/efs_mount_target) | resource | +| [aws_iam_policy.airflow_worker_policy](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/resources/iam_policy) | resource | +| [aws_iam_role.airflow_worker_role](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/resources/iam_role) | resource | +| [aws_iam_role_policy_attachment.airflow_worker_policy_attachment](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/resources/iam_role_policy_attachment) | resource | +| [aws_lambda_invocation.unity_proxy_lambda_invocation](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/resources/lambda_invocation) | resource | +| [aws_s3_bucket.airflow_logs](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/resources/s3_bucket) | resource | +| [aws_security_group.airflow_efs](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/resources/security_group) | resource | +| [aws_security_group.airflow_ingress_sg](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/resources/security_group) | resource | +| [aws_security_group_rule.airflow_efs](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/resources/security_group_rule) | resource | +| [aws_ssm_parameter.airflow_api_health_check_endpoint](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/resources/ssm_parameter) | resource | +| [aws_ssm_parameter.airflow_api_url](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/resources/ssm_parameter) | resource | +| [aws_ssm_parameter.airflow_logs](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/resources/ssm_parameter) | resource | +| [aws_ssm_parameter.airflow_ui_health_check_endpoint](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/resources/ssm_parameter) | resource | +| [aws_ssm_parameter.airflow_ui_url](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/resources/ssm_parameter) | resource | +| [aws_ssm_parameter.unity_proxy_airflow_ui](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/resources/ssm_parameter) | resource | +| [aws_vpc_security_group_ingress_rule.airflow_ingress_sg_jpl_rule](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/resources/vpc_security_group_ingress_rule) | resource | +| [aws_vpc_security_group_ingress_rule.airflow_ingress_sg_proxy_rule](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/resources/vpc_security_group_ingress_rule) | resource | | [helm_release.airflow](https://registry.terraform.io/providers/hashicorp/helm/2.13.1/docs/resources/release) | resource | | [helm_release.keda](https://registry.terraform.io/providers/hashicorp/helm/2.13.1/docs/resources/release) | resource | -| [kubernetes_deployment.ogc_processes_api](https://registry.terraform.io/providers/hashicorp/kubernetes/2.29.0/docs/resources/deployment) | resource | -| [kubernetes_deployment.redis](https://registry.terraform.io/providers/hashicorp/kubernetes/2.29.0/docs/resources/deployment) | resource | | [kubernetes_ingress_v1.airflow_ingress](https://registry.terraform.io/providers/hashicorp/kubernetes/2.29.0/docs/resources/ingress_v1) | resource | -| [kubernetes_ingress_v1.ogc_processes_api_ingress](https://registry.terraform.io/providers/hashicorp/kubernetes/2.29.0/docs/resources/ingress_v1) | resource | -| [kubernetes_manifest.karpenter_node_class](https://registry.terraform.io/providers/hashicorp/kubernetes/2.29.0/docs/resources/manifest) | resource | -| [kubernetes_manifest.karpenter_node_pools](https://registry.terraform.io/providers/hashicorp/kubernetes/2.29.0/docs/resources/manifest) | resource | -| [kubernetes_namespace.airflow](https://registry.terraform.io/providers/hashicorp/kubernetes/2.29.0/docs/resources/namespace) | resource | | [kubernetes_namespace.keda](https://registry.terraform.io/providers/hashicorp/kubernetes/2.29.0/docs/resources/namespace) | resource | | [kubernetes_persistent_volume.airflow_deployed_dags](https://registry.terraform.io/providers/hashicorp/kubernetes/2.29.0/docs/resources/persistent_volume) | resource | | [kubernetes_persistent_volume.airflow_kpo](https://registry.terraform.io/providers/hashicorp/kubernetes/2.29.0/docs/resources/persistent_volume) | resource | @@ -90,41 +63,36 @@ No modules. | [kubernetes_role_binding.airflow_pod_creator_binding](https://registry.terraform.io/providers/hashicorp/kubernetes/2.29.0/docs/resources/role_binding) | resource | | [kubernetes_secret.airflow_metadata](https://registry.terraform.io/providers/hashicorp/kubernetes/2.29.0/docs/resources/secret) | resource | | [kubernetes_secret.airflow_webserver](https://registry.terraform.io/providers/hashicorp/kubernetes/2.29.0/docs/resources/secret) | resource | -| [kubernetes_service.ogc_processes_api](https://registry.terraform.io/providers/hashicorp/kubernetes/2.29.0/docs/resources/service) | resource | -| [kubernetes_service.redis](https://registry.terraform.io/providers/hashicorp/kubernetes/2.29.0/docs/resources/service) | resource | | [kubernetes_storage_class.efs](https://registry.terraform.io/providers/hashicorp/kubernetes/2.29.0/docs/resources/storage_class) | resource | -| [null_resource.build_lambda_packages](https://registry.terraform.io/providers/hashicorp/null/3.2.2/docs/resources/resource) | resource | | [null_resource.remove_keda_finalizers](https://registry.terraform.io/providers/hashicorp/null/3.2.2/docs/resources/resource) | resource | -| [null_resource.remove_node_class_finalizers](https://registry.terraform.io/providers/hashicorp/null/3.2.2/docs/resources/resource) | resource | | [random_id.airflow_webserver_secret](https://registry.terraform.io/providers/hashicorp/random/3.6.1/docs/resources/id) | resource | -| [random_id.counter](https://registry.terraform.io/providers/hashicorp/random/3.6.1/docs/resources/id) | resource | -| [random_password.sps_db](https://registry.terraform.io/providers/hashicorp/random/3.6.1/docs/resources/password) | resource | | [time_sleep.wait_for_efs_mount_target_dns_propagation](https://registry.terraform.io/providers/hashicorp/time/0.11.1/docs/resources/sleep) | resource | -| [aws_ami.al2_eks_optimized](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/data-sources/ami) | data source | -| [aws_caller_identity.current](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/data-sources/caller_identity) | data source | -| [aws_eks_cluster.cluster](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/data-sources/eks_cluster) | data source | -| [aws_eks_cluster_auth.cluster](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/data-sources/eks_cluster_auth) | data source | -| [aws_iam_role.cluster_iam_role](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/data-sources/iam_role) | data source | -| [aws_security_group.default](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/data-sources/security_group) | data source | -| [aws_ssm_parameter.al2_eks_optimized_ami](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/data-sources/ssm_parameter) | data source | -| [aws_ssm_parameter.subnet_ids](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/data-sources/ssm_parameter) | data source | -| [aws_vpc.cluster_vpc](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/data-sources/vpc) | data source | +| [aws_caller_identity.current](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/data-sources/caller_identity) | data source | +| [aws_db_instance.db](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/data-sources/db_instance) | data source | +| [aws_efs_file_system.efs](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/data-sources/efs_file_system) | data source | +| [aws_eks_cluster.cluster](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/data-sources/eks_cluster) | data source | +| [aws_lambda_functions.lambda_check_all](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/data-sources/lambda_functions) | data source | +| [aws_secretsmanager_secret_version.db](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/data-sources/secretsmanager_secret_version) | data source | +| [aws_security_groups.venue_proxy_sg](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/data-sources/security_groups) | data source | +| [aws_ssm_parameter.subnet_ids](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/data-sources/ssm_parameter) | data source | +| [aws_vpc.cluster_vpc](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/data-sources/vpc) | data source | | [kubernetes_ingress_v1.airflow_ingress](https://registry.terraform.io/providers/hashicorp/kubernetes/2.29.0/docs/data-sources/ingress_v1) | data source | -| [kubernetes_ingress_v1.ogc_processes_api_ingress](https://registry.terraform.io/providers/hashicorp/kubernetes/2.29.0/docs/data-sources/ingress_v1) | data source | +| [kubernetes_namespace.service_area](https://registry.terraform.io/providers/hashicorp/kubernetes/2.29.0/docs/data-sources/namespace) | data source | ## Inputs | Name | Description | Type | Default | Required | |------|-------------|------|---------|:--------:| | [airflow\_webserver\_password](#input\_airflow\_webserver\_password) | The password for the Airflow webserver and UI. | `string` | n/a | yes | -| [counter](#input\_counter) | Identifier used to uniquely distinguish resources. This is used in the naming convention of the resource. If left empty, a random hexadecimal value will be generated and used instead. | `string` | n/a | yes | -| [dag\_catalog\_repo](#input\_dag\_catalog\_repo) | Git repository that stores the catalog of Airflow DAGs. |
object({
url = string
ref = string
dags_directory_path = string
})
| n/a | yes | -| [deployment\_name](#input\_deployment\_name) | The name of the deployment. | `string` | n/a | yes | -| [docker\_images](#input\_docker\_images) | Docker images for the associated services. |
object({
airflow = object({
name = string
tag = string
}),
ogc_processes_api = object({
name = string
tag = string
})
git_sync = object({
name = string
tag = string
})
redis = object({
name = string
tag = string
})
})
| n/a | yes | +| [airflow\_webserver\_username](#input\_airflow\_webserver\_username) | The username for the Airflow webserver and UI. | `string` | n/a | yes | +| [db\_instance\_identifier](#input\_db\_instance\_identifier) | The AWS DB instance identifier | `string` | n/a | yes | +| [db\_secret\_arn](#input\_db\_secret\_arn) | The version of the database secret in AWS Secrets Manager | `string` | n/a | yes | +| [docker\_images](#input\_docker\_images) | Docker images for the associated services. |
object({
airflow = object({
name = string
tag = string
})
})
| n/a | yes | +| [efs\_file\_system\_id](#input\_efs\_file\_system\_id) | The EFS file system ID | `string` | n/a | yes | | [helm\_charts](#input\_helm\_charts) | Helm charts for the associated services. |
map(object({
repository = string
chart = string
version = string
}))
| n/a | yes | -| [karpenter\_node\_pools](#input\_karpenter\_node\_pools) | Configuration for Karpenter node pools |
map(object({
requirements : list(object({
key : string
operator : string
values : list(string)
}))
limits : object({
cpu : string
memory : string
})
disruption : object({
consolidationPolicy : string
consolidateAfter : string
})
}))
| n/a | yes | +| [karpenter\_node\_pools](#input\_karpenter\_node\_pools) | Names of the Karpenter node pools | `list(string)` | n/a | yes | | [kubeconfig\_filepath](#input\_kubeconfig\_filepath) | The path to the kubeconfig file for the Kubernetes cluster. | `string` | n/a | yes | -| [mcp\_ami\_owner\_id](#input\_mcp\_ami\_owner\_id) | The ID of the MCP AMIs | `string` | n/a | yes | +| [kubernetes\_namespace](#input\_kubernetes\_namespace) | The kubernetes namespace for Airflow resources. | `string` | n/a | yes | | [project](#input\_project) | The project or mission deploying Unity SPS | `string` | n/a | yes | | [release](#input\_release) | The software release version. | `string` | n/a | yes | | [service\_area](#input\_service\_area) | The service area owner of the resources being deployed | `string` | n/a | yes | @@ -134,7 +102,7 @@ No modules. | Name | Description | |------|-------------| +| [airflow\_deployed\_dags\_pvc](#output\_airflow\_deployed\_dags\_pvc) | n/a | | [airflow\_urls](#output\_airflow\_urls) | SSM parameter IDs and URLs for the various Airflow endpoints. | -| [ogc\_processes\_urls](#output\_ogc\_processes\_urls) | SSM parameter IDs and URLs for the various OGC Processes endpoints. | | [s3\_buckets](#output\_s3\_buckets) | SSM parameter IDs and bucket names for the various buckets used in the pipeline. | diff --git a/terraform-unity/modules/terraform-unity-sps-airflow/data.tf b/terraform-unity/modules/terraform-unity-sps-airflow/data.tf index e85050b8..5584e16a 100644 --- a/terraform-unity/modules/terraform-unity-sps-airflow/data.tf +++ b/terraform-unity/modules/terraform-unity-sps-airflow/data.tf @@ -4,53 +4,35 @@ data "aws_eks_cluster" "cluster" { name = format(local.resource_name_prefix, "eks") } -data "aws_eks_cluster_auth" "cluster" { - name = format(local.resource_name_prefix, "eks") -} - data "aws_vpc" "cluster_vpc" { id = data.aws_eks_cluster.cluster.vpc_config[0].vpc_id } data "aws_ssm_parameter" "subnet_ids" { - name = "/unity/cs/account/network/subnet_list" -} - -data "aws_ssm_parameter" "al2_eks_optimized_ami" { - name = "/mcp/amis/aml2-eks-${replace(data.aws_eks_cluster.cluster.version, ".", "-")}" -} - -data "aws_iam_role" "cluster_iam_role" { - name = "${format(local.resource_name_prefix, "eks")}-eks-node-role" + name = "/unity/account/network/subnet_list" } -data "aws_security_group" "default" { - vpc_id = data.aws_eks_cluster.cluster.vpc_config[0].vpc_id - filter { - name = "tag:Name" - values = ["${format(local.resource_name_prefix, "eks")}-node"] +data "kubernetes_namespace" "service_area" { + metadata { + name = var.kubernetes_namespace } } data "kubernetes_ingress_v1" "airflow_ingress" { metadata { name = kubernetes_ingress_v1.airflow_ingress.metadata[0].name - namespace = kubernetes_namespace.airflow.metadata[0].name + namespace = data.kubernetes_namespace.service_area.metadata[0].name } } -data "kubernetes_ingress_v1" "ogc_processes_api_ingress" { - metadata { - name = kubernetes_ingress_v1.ogc_processes_api_ingress.metadata[0].name - namespace = kubernetes_namespace.airflow.metadata[0].name - } +data "aws_db_instance" "db" { + db_instance_identifier = var.db_instance_identifier } +data "aws_secretsmanager_secret_version" "db" { + secret_id = var.db_secret_arn +} -data "aws_ami" "al2_eks_optimized" { - filter { - name = "image-id" - values = [data.aws_ssm_parameter.al2_eks_optimized_ami.value] - } - owners = [var.mcp_ami_owner_id] +data "aws_efs_file_system" "efs" { + file_system_id = var.efs_file_system_id } diff --git a/terraform-unity/modules/terraform-unity-sps-airflow/locals.tf b/terraform-unity/modules/terraform-unity-sps-airflow/locals.tf index 080cf388..58f31a8c 100644 --- a/terraform-unity/modules/terraform-unity-sps-airflow/locals.tf +++ b/terraform-unity/modules/terraform-unity-sps-airflow/locals.tf @@ -1,7 +1,6 @@ locals { - counter = var.counter != "" ? var.counter : random_id.counter.hex - resource_name_prefix = join("-", compact([var.project, var.venue, var.service_area, "%s", var.deployment_name, local.counter])) + resource_name_prefix = join("-", compact([var.project, var.venue, var.service_area, "%s"])) common_tags = { Name = "" Venue = var.venue @@ -14,8 +13,10 @@ locals { mission = var.project Stack = "" } - oidc_provider_url = replace(data.aws_eks_cluster.cluster.identity[0].oidc[0].issuer, "https://", "") - airflow_webserver_username = "admin" + load_balancer_port = 5000 + oidc_provider_url = replace(data.aws_eks_cluster.cluster.identity[0].oidc[0].issuer, "https://", "") + airflow_metadata_kubernetes_secret = "airflow-metadata-secret" + airflow_webserver_kubernetes_secret = "airflow-webserver-secret" airflow_webserver_navbar_color = { "ops" = "#bf4f4f" "prod" = "#bf4f4f" diff --git a/terraform-unity/modules/terraform-unity-sps-airflow/main.tf b/terraform-unity/modules/terraform-unity-sps-airflow/main.tf index 39009264..6bfa1810 100644 --- a/terraform-unity/modules/terraform-unity-sps-airflow/main.tf +++ b/terraform-unity/modules/terraform-unity-sps-airflow/main.tf @@ -1,7 +1,3 @@ -resource "random_id" "counter" { - byte_length = 2 -} - resource "kubernetes_namespace" "keda" { metadata { name = "keda" @@ -34,25 +30,19 @@ resource "null_resource" "remove_keda_finalizers" { triggers = { always_run = timestamp() kubeconfig_filepath = var.kubeconfig_filepath - airflow_namespace = kubernetes_namespace.airflow.metadata[0].name + airflow_namespace = data.kubernetes_namespace.service_area.metadata[0].name } depends_on = [helm_release.keda, helm_release.airflow] } -resource "kubernetes_namespace" "airflow" { - metadata { - name = "airflow" - } -} - resource "random_id" "airflow_webserver_secret" { byte_length = 16 } resource "kubernetes_secret" "airflow_webserver" { metadata { - name = "airflow-webserver-secret" - namespace = kubernetes_namespace.airflow.metadata[0].name + name = local.airflow_webserver_kubernetes_secret + namespace = data.kubernetes_namespace.service_area.metadata[0].name } data = { "webserver-secret-key" = random_id.airflow_webserver_secret.hex @@ -63,7 +53,7 @@ resource "kubernetes_secret" "airflow_webserver" { resource "kubernetes_role" "airflow_pod_creator" { metadata { name = "airflow-job-launcher-and-reader-role" - namespace = kubernetes_namespace.airflow.metadata[0].name + namespace = data.kubernetes_namespace.service_area.metadata[0].name } # rule { @@ -100,7 +90,7 @@ resource "kubernetes_role" "airflow_pod_creator" { resource "kubernetes_role_binding" "airflow_pod_creator_binding" { metadata { name = "airflow-pod-creator-binding" - namespace = kubernetes_namespace.airflow.metadata[0].name + namespace = data.kubernetes_namespace.service_area.metadata[0].name } role_ref { api_group = "rbac.authorization.k8s.io" @@ -110,114 +100,17 @@ resource "kubernetes_role_binding" "airflow_pod_creator_binding" { subject { kind = "ServiceAccount" name = "airflow-worker" - namespace = kubernetes_namespace.airflow.metadata[0].name + namespace = data.kubernetes_namespace.service_area.metadata[0].name } subject { kind = "ServiceAccount" name = "airflow-webserver" - namespace = kubernetes_namespace.airflow.metadata[0].name + namespace = data.kubernetes_namespace.service_area.metadata[0].name } subject { kind = "ServiceAccount" name = "airflow-triggerer" - namespace = kubernetes_namespace.airflow.metadata[0].name - } -} - - -resource "random_password" "sps_db" { - length = 16 - special = true - override_special = "_!%^" -} - -resource "aws_secretsmanager_secret" "sps_db" { - name = format(local.resource_name_prefix, "db") - recovery_window_in_days = 0 - tags = merge(local.common_tags, { - Name = format(local.resource_name_prefix, "db") - Component = "processing" - Stack = "processing" - }) -} - -resource "aws_secretsmanager_secret_version" "sps_db" { - secret_id = aws_secretsmanager_secret.sps_db.id - secret_string = random_password.sps_db.result -} - -resource "aws_db_subnet_group" "sps_db" { - name = format(local.resource_name_prefix, "db") - subnet_ids = jsondecode(data.aws_ssm_parameter.subnet_ids.value)["private"] - tags = merge(local.common_tags, { - Name = format(local.resource_name_prefix, "db") - Component = "processing" - Stack = "processing" - }) -} - -# Security group for RDS -resource "aws_security_group" "rds_sg" { - name = format(local.resource_name_prefix, "RdsEc2") - description = "Security group for RDS instance to allow traffic from EKS nodes" - vpc_id = data.aws_eks_cluster.cluster.vpc_config[0].vpc_id - tags = merge(local.common_tags, { - Name = format(local.resource_name_prefix, "RdsEc2") - Component = "airflow" - Stack = "airflow" - }) -} - -# Ingress rule for RDS security group to allow PostgreSQL traffic from EKS nodes security group -resource "aws_security_group_rule" "rds_ingress_from_eks" { - type = "ingress" - from_port = 5432 - to_port = 5432 - protocol = "tcp" - security_group_id = aws_security_group.rds_sg.id - source_security_group_id = data.aws_security_group.default.id -} - -# Egress rule for EKS nodes security group to allow PostgreSQL traffic to RDS security group -resource "aws_security_group_rule" "eks_egress_to_rds" { - type = "egress" - from_port = 5432 - to_port = 5432 - protocol = "tcp" - security_group_id = data.aws_security_group.default.id - source_security_group_id = aws_security_group.rds_sg.id -} - -resource "aws_db_instance" "sps_db" { - identifier = format(local.resource_name_prefix, "spsdb") - allocated_storage = 100 - storage_type = "gp3" - engine = "postgres" - engine_version = "13.13" - instance_class = "db.m5d.large" - db_name = "sps_db" - username = "sps_db_user" - password = aws_secretsmanager_secret_version.sps_db.secret_string - parameter_group_name = "default.postgres13" - skip_final_snapshot = true - publicly_accessible = false - db_subnet_group_name = aws_db_subnet_group.sps_db.name - vpc_security_group_ids = [aws_security_group.rds_sg.id] - tags = merge(local.common_tags, { - Name = format(local.resource_name_prefix, "db") - Component = "processing" - Stack = "processing" - }) -} - -resource "kubernetes_secret" "airflow_metadata" { - metadata { - name = "airflow-metadata-secret" - namespace = kubernetes_namespace.airflow.metadata[0].name - } - data = { - kedaConnection = "postgresql://${aws_db_instance.sps_db.username}:${urlencode(aws_secretsmanager_secret_version.sps_db.secret_string)}@${aws_db_instance.sps_db.endpoint}/${aws_db_instance.sps_db.db_name}" - connection = "postgresql://${aws_db_instance.sps_db.username}:${urlencode(aws_secretsmanager_secret_version.sps_db.secret_string)}@${aws_db_instance.sps_db.endpoint}/${aws_db_instance.sps_db.db_name}" + namespace = data.kubernetes_namespace.service_area.metadata[0].name } } @@ -250,7 +143,9 @@ resource "aws_iam_policy" "airflow_worker_policy" { "sqs:SendMessage", "sqs:ReceiveMessage", "sns:Publish", + "ecr:GetAuthorizationToken", "ecr:GetDownloadUrlForLayer", + "ecr:BatchCheckLayerAvailability", "ecr:BatchGetImage", "secretsmanager:GetSecretValue", "ssm:GetParameters", @@ -278,7 +173,7 @@ resource "aws_iam_role" "airflow_worker_role" { "Action" : "sts:AssumeRoleWithWebIdentity", "Condition" : { "StringEquals" : { - "${local.oidc_provider_url}:sub" : "system:serviceaccount:${kubernetes_namespace.airflow.metadata[0].name}:airflow-worker" + "${local.oidc_provider_url}:sub" : "system:serviceaccount:${data.kubernetes_namespace.service_area.metadata[0].name}:airflow-worker" } } } @@ -302,15 +197,6 @@ resource "kubernetes_storage_class" "efs" { storage_provisioner = "efs.csi.aws.com" } -resource "aws_efs_file_system" "airflow" { - creation_token = format(local.resource_name_prefix, "AirflowEfs") - tags = merge(local.common_tags, { - Name = format(local.resource_name_prefix, "AirflowEfs") - Component = "airflow" - Stack = "airflow" - }) -} - resource "aws_security_group" "airflow_efs" { name = format(local.resource_name_prefix, "AirflowEfsSg") description = "Security group for the EFS used in Airflow" @@ -333,13 +219,13 @@ resource "aws_security_group_rule" "airflow_efs" { resource "aws_efs_mount_target" "airflow" { for_each = nonsensitive(toset(jsondecode(data.aws_ssm_parameter.subnet_ids.value)["private"])) - file_system_id = aws_efs_file_system.airflow.id + file_system_id = data.aws_efs_file_system.efs.id subnet_id = each.value security_groups = [aws_security_group.airflow_efs.id] } resource "aws_efs_access_point" "airflow_kpo" { - file_system_id = aws_efs_file_system.airflow.id + file_system_id = data.aws_efs_file_system.efs.id posix_user { gid = 0 uid = 50000 @@ -360,7 +246,7 @@ resource "aws_efs_access_point" "airflow_kpo" { } resource "aws_efs_access_point" "airflow_deployed_dags" { - file_system_id = aws_efs_file_system.airflow.id + file_system_id = data.aws_efs_file_system.efs.id posix_user { gid = 0 uid = 50000 @@ -401,7 +287,7 @@ resource "kubernetes_persistent_volume" "airflow_kpo" { persistent_volume_source { csi { driver = "efs.csi.aws.com" - volume_handle = "${aws_efs_file_system.airflow.id}::${aws_efs_access_point.airflow_kpo.id}" + volume_handle = "${data.aws_efs_file_system.efs.id}::${aws_efs_access_point.airflow_kpo.id}" } } storage_class_name = kubernetes_storage_class.efs.metadata[0].name @@ -411,7 +297,7 @@ resource "kubernetes_persistent_volume" "airflow_kpo" { resource "kubernetes_persistent_volume_claim" "airflow_kpo" { metadata { name = "airflow-kpo" - namespace = kubernetes_namespace.airflow.metadata[0].name + namespace = data.kubernetes_namespace.service_area.metadata[0].name } spec { access_modes = ["ReadWriteMany"] @@ -438,7 +324,7 @@ resource "kubernetes_persistent_volume" "airflow_deployed_dags" { persistent_volume_source { csi { driver = "efs.csi.aws.com" - volume_handle = "${aws_efs_file_system.airflow.id}::${aws_efs_access_point.airflow_deployed_dags.id}" + volume_handle = "${data.aws_efs_file_system.efs.id}::${aws_efs_access_point.airflow_deployed_dags.id}" } } storage_class_name = kubernetes_storage_class.efs.metadata[0].name @@ -448,7 +334,7 @@ resource "kubernetes_persistent_volume" "airflow_deployed_dags" { resource "kubernetes_persistent_volume_claim" "airflow_deployed_dags" { metadata { name = "airflow-deployed-dags" - namespace = kubernetes_namespace.airflow.metadata[0].name + namespace = data.kubernetes_namespace.service_area.metadata[0].name } spec { access_modes = ["ReadWriteMany"] @@ -462,19 +348,30 @@ resource "kubernetes_persistent_volume_claim" "airflow_deployed_dags" { } } +resource "kubernetes_secret" "airflow_metadata" { + metadata { + name = local.airflow_metadata_kubernetes_secret + namespace = data.kubernetes_namespace.service_area.metadata[0].name + } + data = { + kedaConnection = "postgresql://${data.aws_db_instance.db.master_username}:${urlencode(data.aws_secretsmanager_secret_version.db.secret_string)}@${data.aws_db_instance.db.endpoint}/${data.aws_db_instance.db.db_name}" + connection = "postgresql://${data.aws_db_instance.db.master_username}:${urlencode(data.aws_secretsmanager_secret_version.db.secret_string)}@${data.aws_db_instance.db.endpoint}/${data.aws_db_instance.db.db_name}" + } +} + resource "helm_release" "airflow" { name = "airflow" repository = var.helm_charts.airflow.repository chart = var.helm_charts.airflow.chart version = var.helm_charts.airflow.version - namespace = kubernetes_namespace.airflow.metadata[0].name + namespace = data.kubernetes_namespace.service_area.metadata[0].name values = [ templatefile("${path.module}/../../../airflow/helm/values.tmpl.yaml", { airflow_image_repo = var.docker_images.airflow.name airflow_image_tag = var.docker_images.airflow.tag - kubernetes_namespace = kubernetes_namespace.airflow.metadata[0].name - metadata_secret_name = "airflow-metadata-secret" - webserver_secret_name = "airflow-webserver-secret" + kubernetes_namespace = data.kubernetes_namespace.service_area.metadata[0].name + metadata_secret_name = local.airflow_metadata_kubernetes_secret + webserver_secret_name = local.airflow_webserver_kubernetes_secret airflow_logs_s3_location = "s3://${aws_s3_bucket.airflow_logs.id}" airflow_worker_role_arn = aws_iam_role.airflow_worker_role.arn workers_pvc_name = kubernetes_persistent_volume_claim.airflow_kpo.metadata[0].name @@ -485,246 +382,81 @@ resource "helm_release" "airflow" { service_area_version = var.release unity_project = var.project unity_venue = var.venue - unity_deployment_name = var.deployment_name - unity_counter = var.counter unity_cluster_name = data.aws_eks_cluster.cluster.name + karpenter_node_pools = join(",", var.karpenter_node_pools) + cwl_dag_ecr_uri = "${data.aws_caller_identity.current.account_id}.dkr.ecr.us-west-2.amazonaws.com" }) ] + set_sensitive { + name = "webserver.defaultUser.username" + value = var.airflow_webserver_username + } set_sensitive { name = "webserver.defaultUser.password" value = var.airflow_webserver_password } timeout = 1200 depends_on = [ - aws_db_instance.sps_db, helm_release.keda, kubernetes_secret.airflow_metadata, kubernetes_secret.airflow_webserver, - kubernetes_manifest.karpenter_node_pools, ] } -resource "kubernetes_deployment" "redis" { - metadata { - name = "ogc-processes-api-redis-lock" - namespace = kubernetes_namespace.airflow.metadata[0].name - } - spec { - replicas = 1 - selector { - match_labels = { - app = "redis" - } - } - template { - metadata { - labels = { - app = "redis" - } - } - spec { - container { - name = "redis" - image = "${var.docker_images.redis.name}:${var.docker_images.redis.tag}" - port { - container_port = 6379 - } - } - } - } - } +resource "aws_security_group" "airflow_ingress_sg" { + name = "${var.project}-${var.venue}-airflow-ingress-sg" + description = "SecurityGroup for Airflow LoadBalancer ingress" + vpc_id = data.aws_vpc.cluster_vpc.id + tags = merge(local.common_tags, { + Name = format(local.resource_name_prefix, "AirflowLBSg") + Component = "airflow" + Stack = "airflow" + }) } -resource "kubernetes_service" "redis" { - metadata { - name = "ogc-processes-api-redis-lock" - namespace = kubernetes_namespace.airflow.metadata[0].name - } - spec { - selector = { - app = "redis" - } - port { - name = "redis" - port = 6379 - target_port = 6379 - } - type = "ClusterIP" - } +#tfsec:ignore:AVD-AWS-0107 +resource "aws_vpc_security_group_ingress_rule" "airflow_ingress_sg_jpl_rule" { + for_each = toset(["128.149.0.0/16", "137.78.0.0/16", "137.79.0.0/16"]) + security_group_id = aws_security_group.airflow_ingress_sg.id + description = "SecurityGroup ingress rule for JPL-local addresses" + ip_protocol = "tcp" + from_port = local.load_balancer_port + to_port = local.load_balancer_port + cidr_ipv4 = each.key } -resource "kubernetes_deployment" "ogc_processes_api" { - metadata { - name = "ogc-processes-api" - namespace = kubernetes_namespace.airflow.metadata[0].name +data "aws_security_groups" "venue_proxy_sg" { + filter { + name = "group-name" + values = ["${var.project}-${var.venue}-ecs_service_sg"] } - spec { - replicas = 2 - selector { - match_labels = { - app = "ogc-processes-api" - } - } - template { - metadata { - labels = { - app = "ogc-processes-api" - } - } - spec { - affinity { - node_affinity { - required_during_scheduling_ignored_during_execution { - node_selector_term { - match_expressions { - key = "karpenter.sh/nodepool" - operator = "In" - values = ["airflow-core-components"] - } - match_expressions { - key = "karpenter.sh/capacity-type" - operator = "In" - values = ["on-demand"] - } - match_expressions { - key = "karpenter.k8s.aws/instance-family" - operator = "In" - values = ["c6i", "c5"] - } - match_expressions { - key = "karpenter.k8s.aws/instance-cpu" - operator = "In" - values = ["2", "4"] - } - } - } - } - } - container { - name = "ogc-processes-api" - image = "${var.docker_images.ogc_processes_api.name}:${var.docker_images.ogc_processes_api.tag}" - port { - container_port = 80 - } - env { - name = "DB_URL" - value = "postgresql://${aws_db_instance.sps_db.username}:${urlencode(aws_secretsmanager_secret_version.sps_db.secret_string)}@${aws_db_instance.sps_db.endpoint}/${aws_db_instance.sps_db.db_name}" - } - env { - name = "REDIS_HOST" - value = "${kubernetes_service.redis.metadata[0].name}.${kubernetes_namespace.airflow.metadata[0].name}.svc.cluster.local" - - } - env { - name = "REDIS_PORT" - value = 6379 - } - env { - name = "EMS_API_URL" - value = "http://airflow-webserver.${kubernetes_namespace.airflow.metadata[0].name}.svc.cluster.local:8080/api/v1" - } - env { - name = "EMS_API_AUTH_USERNAME" - value = local.airflow_webserver_username - } - env { - name = "EMS_API_AUTH_PASSWORD" - value = var.airflow_webserver_password - } - env { - name = "DAG_CATALOG_DIRECTORY" - value = "/dag-catalog/current/${var.dag_catalog_repo.dags_directory_path}" - } - env { - name = "DEPLOYED_DAGS_DIRECTORY" - value = "/deployed-dags" - } - volume_mount { - name = "dag-catalog" - mount_path = "/dag-catalog" - } - volume_mount { - name = "deployed-dags" - mount_path = "/deployed-dags" - } - } - container { - name = "git-sync" - image = "${var.docker_images.git_sync.name}:${var.docker_images.git_sync.tag}" - env { - name = "GITSYNC_REPO" - value = var.dag_catalog_repo.url - } - env { - name = "GITSYNC_REF" - value = var.dag_catalog_repo.ref - } - env { - name = "GITSYNC_ROOT" - value = "/dag-catalog" - } - env { - name = "GITSYNC_LINK" - value = "current" - } - env { - name = "GITSYNC_PERIOD" - value = "3s" - } - env { - name = "GITSYNC_ONE_TIME" - value = "false" - } - volume_mount { - name = "dag-catalog" - mount_path = "/dag-catalog" - } - } - volume { - name = "deployed-dags" - persistent_volume_claim { - claim_name = kubernetes_persistent_volume_claim.airflow_deployed_dags.metadata[0].name - } - } - volume { - name = "dag-catalog" - empty_dir {} - } - } - } + tags = { + Service = "U-CS" } - depends_on = [ - kubernetes_manifest.karpenter_node_pools - ] } -resource "kubernetes_service" "ogc_processes_api" { - metadata { - name = "ogc-processes-api" - namespace = kubernetes_namespace.airflow.metadata[0].name - } - spec { - selector = { - app = "ogc-processes-api" - } - port { - port = 80 - target_port = 80 - } - type = "ClusterIP" - } +resource "aws_vpc_security_group_ingress_rule" "airflow_ingress_sg_proxy_rule" { + count = length(data.aws_security_groups.venue_proxy_sg.ids) > 0 ? 1 : 0 + security_group_id = aws_security_group.airflow_ingress_sg.id + description = "SecurityGroup ingress rule for venue-services proxy" + ip_protocol = "tcp" + from_port = local.load_balancer_port + to_port = local.load_balancer_port + referenced_security_group_id = data.aws_security_groups.venue_proxy_sg.ids[0] } resource "kubernetes_ingress_v1" "airflow_ingress" { metadata { name = "airflow-ingress" - namespace = kubernetes_namespace.airflow.metadata[0].name + namespace = data.kubernetes_namespace.service_area.metadata[0].name annotations = { - "alb.ingress.kubernetes.io/scheme" = "internet-facing" - "alb.ingress.kubernetes.io/target-type" = "ip" - "alb.ingress.kubernetes.io/subnets" = join(",", jsondecode(data.aws_ssm_parameter.subnet_ids.value)["public"]) - "alb.ingress.kubernetes.io/listen-ports" = "[{\"HTTP\": 5000}]" - "alb.ingress.kubernetes.io/healthcheck-path" = "/health" + "alb.ingress.kubernetes.io/scheme" = "internet-facing" + "alb.ingress.kubernetes.io/target-type" = "ip" + "alb.ingress.kubernetes.io/subnets" = join(",", jsondecode(data.aws_ssm_parameter.subnet_ids.value)["public"]) + "alb.ingress.kubernetes.io/listen-ports" = "[{\"HTTP\": ${local.load_balancer_port}}]" + "alb.ingress.kubernetes.io/security-groups" = aws_security_group.airflow_ingress_sg.id + "alb.ingress.kubernetes.io/manage-backend-security-group-rules" = "true" + "alb.ingress.kubernetes.io/healthcheck-path" = "/health" } } spec { @@ -743,18 +475,6 @@ resource "kubernetes_ingress_v1" "airflow_ingress" { } } } - # path { - # path = "/ogc-processes-api" - # path_type = "Prefix" - # backend { - # service { - # name = "ogc-processes-api" - # port { - # number = 80 - # } - # } - # } - # } } } } @@ -762,42 +482,8 @@ resource "kubernetes_ingress_v1" "airflow_ingress" { depends_on = [helm_release.airflow] } -resource "kubernetes_ingress_v1" "ogc_processes_api_ingress" { - metadata { - name = "ogc-processes-api-ingress" - namespace = kubernetes_namespace.airflow.metadata[0].name - annotations = { - "alb.ingress.kubernetes.io/scheme" = "internet-facing" - "alb.ingress.kubernetes.io/target-type" = "ip" - "alb.ingress.kubernetes.io/subnets" = join(",", jsondecode(data.aws_ssm_parameter.subnet_ids.value)["public"]) - "alb.ingress.kubernetes.io/listen-ports" = "[{\"HTTP\": 5001}]" - "alb.ingress.kubernetes.io/healthcheck-path" = "/health" - } - } - spec { - ingress_class_name = "alb" - rule { - http { - path { - path = "/" - path_type = "Prefix" - backend { - service { - name = kubernetes_service.ogc_processes_api.metadata[0].name - port { - number = 80 - } - } - } - } - } - } - } - wait_for_load_balancer = true -} - resource "aws_ssm_parameter" "airflow_ui_url" { - name = format("/%s", join("/", compact(["", var.project, var.venue, var.service_area, var.deployment_name, local.counter, "processing", "airflow", "ui_url"]))) + name = format("/%s", join("/", compact(["", var.project, var.venue, var.service_area, "processing", "airflow", "ui_url"]))) description = "The URL of the Airflow UI." type = "String" value = "http://${data.kubernetes_ingress_v1.airflow_ingress.status[0].load_balancer[0].ingress[0].hostname}:5000" @@ -808,444 +494,105 @@ resource "aws_ssm_parameter" "airflow_ui_url" { }) } -resource "aws_ssm_parameter" "airflow_api_url" { - name = format("/%s", join("/", compact(["", var.project, var.venue, var.service_area, var.deployment_name, local.counter, "processing", "airflow", "api_url"]))) - description = "The URL of the Airflow REST API." +resource "aws_ssm_parameter" "airflow_ui_health_check_endpoint" { + name = format("/%s", join("/", compact(["", "unity", var.project, var.venue, "component", "airflow-ui"]))) + description = "The URL of the Airflow UI." type = "String" - value = "http://${data.kubernetes_ingress_v1.airflow_ingress.status[0].load_balancer[0].ingress[0].hostname}:5000/api/v1" - tags = merge(local.common_tags, { - Name = format(local.resource_name_prefix, "endpoints-airflow_api") - Component = "SSM" - Stack = "SSM" + value = jsonencode({ + "componentName" : "Airflow UI" + "healthCheckUrl" : "http://${data.kubernetes_ingress_v1.airflow_ingress.status[0].load_balancer[0].ingress[0].hostname}:5000/health" + "landingPageUrl" : "http://${data.kubernetes_ingress_v1.airflow_ingress.status[0].load_balancer[0].ingress[0].hostname}:5000" }) -} - -resource "aws_ssm_parameter" "airflow_logs" { - name = format("/%s", join("/", compact(["", var.project, var.venue, var.service_area, var.deployment_name, local.counter, "processing", "airflow", "logs"]))) - description = "The name of the S3 bucket for the Airflow logs." - type = "String" - value = aws_s3_bucket.airflow_logs.id tags = merge(local.common_tags, { - Name = format(local.resource_name_prefix, "S3-airflow_logs") + Name = format(local.resource_name_prefix, "health-check-endpoints-airflow_ui") Component = "SSM" Stack = "SSM" }) + lifecycle { + ignore_changes = [value] + } } -resource "aws_ssm_parameter" "ogc_processes_ui_url" { - name = format("/%s", join("/", compact(["", var.project, var.venue, var.service_area, var.deployment_name, local.counter, "processing", "ogc_processes", "ui_url"]))) - description = "The URL of the OGC Proccesses API Docs UI." +resource "aws_ssm_parameter" "airflow_api_url" { + name = format("/%s", join("/", compact(["", var.project, var.venue, var.service_area, "processing", "airflow", "api_url"]))) + description = "The URL of the Airflow REST API." type = "String" - value = "http://${data.kubernetes_ingress_v1.ogc_processes_api_ingress.status[0].load_balancer[0].ingress[0].hostname}:5001/redoc" + value = "http://${data.kubernetes_ingress_v1.airflow_ingress.status[0].load_balancer[0].ingress[0].hostname}:5000/api/v1" tags = merge(local.common_tags, { - Name = format(local.resource_name_prefix, "endpoints-ogc_processes_ui") + Name = format(local.resource_name_prefix, "endpoints-airflow_api") Component = "SSM" Stack = "SSM" }) } -resource "aws_ssm_parameter" "ogc_processes_api_url" { - name = format("/%s", join("/", compact(["", var.project, var.venue, var.service_area, var.deployment_name, local.counter, "processing", "ogc_processes", "api_url"]))) - description = "The URL of the OGC Processes REST API." +resource "aws_ssm_parameter" "airflow_api_health_check_endpoint" { + name = format("/%s", join("/", compact(["", "unity", var.project, var.venue, "component", "airflow-api"]))) + description = "The URL of the Airflow REST API." type = "String" - value = "http://${data.kubernetes_ingress_v1.ogc_processes_api_ingress.status[0].load_balancer[0].ingress[0].hostname}:5001" + value = jsonencode({ + "componentName" : "Airflow API" + "healthCheckUrl" : "http://${data.kubernetes_ingress_v1.airflow_ingress.status[0].load_balancer[0].ingress[0].hostname}:5000/api/v1/health" + "landingPageUrl" : "http://${data.kubernetes_ingress_v1.airflow_ingress.status[0].load_balancer[0].ingress[0].hostname}:5000/api/v1" + }) tags = merge(local.common_tags, { - Name = format(local.resource_name_prefix, "endpoints-ogc_processes_api") + Name = format(local.resource_name_prefix, "health-check-endpoints-airflow_api") Component = "SSM" Stack = "SSM" }) -} - -resource "kubernetes_manifest" "karpenter_node_class" { - manifest = { - apiVersion = "karpenter.k8s.aws/v1beta1" - kind = "EC2NodeClass" - metadata = { - name = "default" - } - spec = { - amiFamily = "AL2" - amiSelectorTerms = [{ - id = data.aws_ami.al2_eks_optimized.image_id - }] - userData = <<-EOT - #!/bin/bash - echo "Starting pre-bootstrap configurations..." - # Custom script to enable IP forwarding - sudo sed -i 's/^net.ipv4.ip_forward = 0/net.ipv4.ip_forward = 1/' /etc/sysctl.conf && sudo sysctl -p |true - echo "Pre-bootstrap configurations applied." - EOT - role = data.aws_iam_role.cluster_iam_role.name - subnetSelectorTerms = [for subnet_id in jsondecode(data.aws_ssm_parameter.subnet_ids.value)["private"] : { - id = subnet_id - }] - securityGroupSelectorTerms = [{ - tags = { - "kubernetes.io/cluster/${data.aws_eks_cluster.cluster.name}" = "owned" - "Name" = "${data.aws_eks_cluster.cluster.name}-node" - } - }] - blockDeviceMappings = [for bd in tolist(data.aws_ami.al2_eks_optimized.block_device_mappings) : { - deviceName = bd.device_name - ebs = { - volumeSize = "${bd.ebs.volume_size}Gi" - volumeType = bd.ebs.volume_type - encrypted = bd.ebs.encrypted - deleteOnTermination = bd.ebs.delete_on_termination - } - }] - metadataOptions = { - httpEndpoint = "enabled" - httpPutResponseHopLimit = 3 - } - tags = merge(local.common_tags, { - "karpenter.sh/discovery" = data.aws_eks_cluster.cluster.name - Name = format(local.resource_name_prefix, "karpenter") - Component = "karpenter" - Stack = "karpenter" - }) - } - } -} - -resource "null_resource" "remove_node_class_finalizers" { - # https://github.com/aws/karpenter-provider-aws/issues/5079 - provisioner "local-exec" { - when = destroy - command = < + ProxyPassReverse "/" + + + Redirect "/${var.project}/${var.venue}/sps/home" + + + ProxyPassMatch "http://${data.kubernetes_ingress_v1.airflow_ingress.status[0].load_balancer[0].ingress[0].hostname}:5000/$1" + ProxyPreserveHost On + FallbackResource /management/index.html + AddOutputFilterByType INFLATE;SUBSTITUTE;DEFLATE text/html + Substitute "s|\"/([^\"]*)|\"/${var.project}/${var.venue}/sps/$1|q" + + +EOT tags = merge(local.common_tags, { - Name = format(local.resource_name_prefix, "S3-isl") + Name = format(local.resource_name_prefix, "httpd-proxy-config-airflow") Component = "SSM" Stack = "SSM" }) } -resource "aws_sns_topic" "s3_isl_event_topic" { - name = format(local.resource_name_prefix, "S3IslSnsTopic") - tags = merge(local.common_tags, { - Name = format(local.resource_name_prefix, "SNS-S3IslSnsTopic") - Component = "SNS" - Stack = "SNS" - }) -} - -resource "aws_sns_topic_policy" "s3_isl_event_topic_policy" { - arn = aws_sns_topic.s3_isl_event_topic.arn - policy = jsonencode({ - Version = "2012-10-17" - Statement = [{ - Effect = "Allow" - Principal = { - Service = "s3.amazonaws.com" - } - Action = "SNS:Publish" - Resource = aws_sns_topic.s3_isl_event_topic.arn - Condition = { - ArnLike = { - "aws:SourceArn" : aws_s3_bucket.inbound_staging_location.arn - } - } - }] - }) -} +data "aws_lambda_functions" "lambda_check_all" {} -resource "aws_s3_bucket_notification" "isl_bucket_notification" { - bucket = aws_s3_bucket.inbound_staging_location.id - topic { - topic_arn = aws_sns_topic.s3_isl_event_topic.arn - events = ["s3:ObjectCreated:*"] +resource "aws_lambda_invocation" "unity_proxy_lambda_invocation" { + count = contains(data.aws_lambda_functions.lambda_check_all.function_names, "unity-${var.venue}-httpdproxymanagement") ? 1 : 0 + function_name = "unity-${var.venue}-httpdproxymanagement" + input = "{}" + triggers = { + redeployment = sha1(jsonencode([ + aws_ssm_parameter.unity_proxy_airflow_ui, + ])) } - depends_on = [ - aws_sns_topic_policy.s3_isl_event_topic_policy, - aws_sqs_queue_policy.s3_isl_event_queue_policy - ] -} - -resource "aws_sqs_queue" "s3_isl_event_queue" { - name = format(local.resource_name_prefix, "S3IslSqsQueue") - visibility_timeout_seconds = 60 - tags = merge(local.common_tags, { - Name = format(local.resource_name_prefix, "SQS-S3IslSqsQueue") - Component = "SQS" - Stack = "SQS" - }) } -resource "aws_sqs_queue_policy" "s3_isl_event_queue_policy" { - queue_url = aws_sqs_queue.s3_isl_event_queue.id - policy = jsonencode({ - Version = "2012-10-17" - Statement = [ - { - Effect = "Allow" - Principal = "*" - Action = "sqs:SendMessage" - Resource = aws_sqs_queue.s3_isl_event_queue.arn - Condition = { - ArnEquals = { - "aws:SourceArn" = aws_sns_topic.s3_isl_event_topic.arn - } - } - }, - ] - }) -} - -resource "aws_sns_topic_subscription" "s3_isl_event_subscription" { - topic_arn = aws_sns_topic.s3_isl_event_topic.arn - protocol = "sqs" - endpoint = aws_sqs_queue.s3_isl_event_queue.arn -} - -resource "aws_s3_bucket" "lambdas" { - bucket = format(local.resource_name_prefix, "lambdas") - force_destroy = true - tags = merge(local.common_tags, { - # Add or overwrite specific tags for this resource - Name = format(local.resource_name_prefix, "S3-lambdas") - Component = "S3" - Stack = "S3" - }) -} - -resource "aws_s3_object" "lambdas" { - bucket = aws_s3_bucket.lambdas.id - key = format("%s.zip", format(local.resource_name_prefix, "AirflowDAGTrigger")) - # TODO remove handcoding of lambda file name - source = "${abspath(path.module)}/../../../lambda/deployment_packages/airflow-dag-trigger_package.zip" - depends_on = [null_resource.build_lambda_packages] -} - -resource "aws_ssm_parameter" "airflow_dag_trigger_lambda_package" { - name = format("/%s", join("/", compact(["", var.project, var.venue, var.service_area, var.deployment_name, local.counter, "artifacts", "pipeline", "lambdas", "AirflowDAGTrigger"]))) - description = "The S3 key of the Lambda package for the Airflow Dag Trigger." +resource "aws_ssm_parameter" "airflow_logs" { + name = format("/%s", join("/", compact(["", var.project, var.venue, var.service_area, "processing", "airflow", "logs"]))) + description = "The name of the S3 bucket for the Airflow logs." type = "String" - value = aws_s3_object.lambdas.key + value = aws_s3_bucket.airflow_logs.id tags = merge(local.common_tags, { - Name = format(local.resource_name_prefix, "SSM-AirflowDAGTrigger") + Name = format(local.resource_name_prefix, "S3-airflow_logs") Component = "SSM" Stack = "SSM" }) } - -resource "aws_iam_role" "lambda" { - name = format(local.resource_name_prefix, "LambdaExecutionRole") - assume_role_policy = jsonencode({ - Version = "2012-10-17" - Statement = [ - { - Effect = "Allow" - Principal = { - Service = "lambda.amazonaws.com" - } - Action = "sts:AssumeRole" - }, - ] - }) - permissions_boundary = "arn:aws:iam::${data.aws_caller_identity.current.account_id}:policy/mcp-tenantOperator-AMI-APIG" -} - -# Attach necessary policies to the role. For Lambda execution, you often need AWSLambdaBasicExecutionRole for logging etc. -resource "aws_iam_role_policy_attachment" "lambda_logs" { - role = aws_iam_role.lambda.name - policy_arn = "arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole" -} - -# If the Lambda interacts with specific AWS services, you might need to create and attach custom policies here. -resource "aws_iam_policy" "lambda_sqs_access" { - name = format(local.resource_name_prefix, "LambdaSQSAccessPolicy") - description = "Allows Lambda function to interact with SQS queue" - policy = jsonencode({ - Version = "2012-10-17" - Statement = [ - { - Effect = "Allow" - Action = [ - "sqs:ReceiveMessage", - "sqs:DeleteMessage", - "sqs:GetQueueAttributes" - ], - Resource = aws_sqs_queue.s3_isl_event_queue.arn - }, - ] - }) -} - -resource "aws_iam_role_policy_attachment" "lambda_sqs_access_attach" { - role = aws_iam_role.lambda.name - policy_arn = aws_iam_policy.lambda_sqs_access.arn -} - -resource "aws_lambda_function" "airflow_dag_trigger" { - function_name = format(local.resource_name_prefix, "AirflowDAGTrigger") - s3_bucket = format(local.resource_name_prefix, "lambdas") - s3_key = aws_ssm_parameter.airflow_dag_trigger_lambda_package.value - role = aws_iam_role.lambda.arn - handler = "airflow_dag_trigger.lambda_handler" - runtime = "python3.9" - timeout = 60 - environment { - variables = { - AIRFLOW_BASE_API_ENDPOINT = aws_ssm_parameter.airflow_api_url.value - AIRFLOW_USERNAME = "admin" - AIRFLOW_PASSWORD = var.airflow_webserver_password - } - } - tags = merge(local.common_tags, { - Name = format(local.resource_name_prefix, "Lambda-AirflowDAGTrigger") - Component = "Lambda" - Stack = "Lambda" - }) - depends_on = [ - aws_cloudwatch_log_group.airflow_dag_trigger, - ] -} - -resource "aws_cloudwatch_log_group" "airflow_dag_trigger" { - name = "/aws/lambda/${format(local.resource_name_prefix, "AirflowDAGTrigger")}" - retention_in_days = 14 - tags = merge(local.common_tags, { - Name = format(local.resource_name_prefix, "CloudWatch-${format(local.resource_name_prefix, "AirflowDAGTrigger")}") - Component = "CloudWatch" - Stack = "CloudWatch" - }) -} - -resource "aws_lambda_event_source_mapping" "lambda_airflow_dag_trigger" { - event_source_arn = aws_sqs_queue.s3_isl_event_queue.arn - function_name = aws_lambda_function.airflow_dag_trigger.arn - batch_size = 1 -} diff --git a/terraform-unity/modules/terraform-unity-sps-airflow/outputs.tf b/terraform-unity/modules/terraform-unity-sps-airflow/outputs.tf index 961c847d..654d726f 100644 --- a/terraform-unity/modules/terraform-unity-sps-airflow/outputs.tf +++ b/terraform-unity/modules/terraform-unity-sps-airflow/outputs.tf @@ -12,20 +12,6 @@ output "airflow_urls" { } } -output "ogc_processes_urls" { - description = "SSM parameter IDs and URLs for the various OGC Processes endpoints." - value = { - "ui" = { - "ssm_param_id" = aws_ssm_parameter.ogc_processes_ui_url.id, - "url" = nonsensitive(aws_ssm_parameter.ogc_processes_ui_url.value) - } - "rest_api" = { - "ssm_param_id" = aws_ssm_parameter.ogc_processes_api_url.id, - "url" = nonsensitive(aws_ssm_parameter.ogc_processes_api_url.value) - } - } -} - output "s3_buckets" { description = "SSM parameter IDs and bucket names for the various buckets used in the pipeline." value = { @@ -35,3 +21,7 @@ output "s3_buckets" { } } } + +output "airflow_deployed_dags_pvc" { + value = kubernetes_persistent_volume_claim.airflow_deployed_dags.metadata[0].name +} diff --git a/terraform-unity/modules/terraform-unity-sps-airflow/variables.tf b/terraform-unity/modules/terraform-unity-sps-airflow/variables.tf index d3b9b7d9..3f6ff754 100644 --- a/terraform-unity/modules/terraform-unity-sps-airflow/variables.tf +++ b/terraform-unity/modules/terraform-unity-sps-airflow/variables.tf @@ -13,23 +13,38 @@ variable "service_area" { type = string } -variable "deployment_name" { - description = "The name of the deployment." +variable "release" { + description = "The software release version." type = string } -variable "counter" { - description = "Identifier used to uniquely distinguish resources. This is used in the naming convention of the resource. If left empty, a random hexadecimal value will be generated and used instead." +variable "kubernetes_namespace" { + description = "The kubernetes namespace for Airflow resources." type = string } -variable "release" { - description = "The software release version." +variable "kubeconfig_filepath" { + description = "The path to the kubeconfig file for the Kubernetes cluster." type = string } -variable "kubeconfig_filepath" { - description = "The path to the kubeconfig file for the Kubernetes cluster." +variable "db_instance_identifier" { + description = "The AWS DB instance identifier" + type = string +} + +variable "db_secret_arn" { + description = "The version of the database secret in AWS Secrets Manager" + type = string +} + +variable "efs_file_system_id" { + description = "The EFS file system ID" + type = string +} + +variable "airflow_webserver_username" { + description = "The username for the Airflow webserver and UI." type = string } @@ -53,51 +68,11 @@ variable "docker_images" { airflow = object({ name = string tag = string - }), - ogc_processes_api = object({ - name = string - tag = string - }) - git_sync = object({ - name = string - tag = string - }) - redis = object({ - name = string - tag = string }) }) } -variable "mcp_ami_owner_id" { - description = "The ID of the MCP AMIs" - type = string -} - variable "karpenter_node_pools" { - description = "Configuration for Karpenter node pools" - type = map(object({ - requirements : list(object({ - key : string - operator : string - values : list(string) - })) - limits : object({ - cpu : string - memory : string - }) - disruption : object({ - consolidationPolicy : string - consolidateAfter : string - }) - })) -} - -variable "dag_catalog_repo" { - description = "Git repository that stores the catalog of Airflow DAGs." - type = object({ - url = string - ref = string - dags_directory_path = string - }) + description = "Names of the Karpenter node pools" + type = list(string) } diff --git a/terraform-unity/modules/terraform-unity-sps-airflow/versions.tf b/terraform-unity/modules/terraform-unity-sps-airflow/versions.tf index 6e3c85b3..9be9a912 100644 --- a/terraform-unity/modules/terraform-unity-sps-airflow/versions.tf +++ b/terraform-unity/modules/terraform-unity-sps-airflow/versions.tf @@ -19,7 +19,7 @@ terraform { } aws = { source = "hashicorp/aws" - version = "5.47.0" + version = "5.50.0" } time = { source = "hashicorp/time" diff --git a/terraform-unity/modules/terraform-unity-sps-database/.terraform.lock.hcl b/terraform-unity/modules/terraform-unity-sps-database/.terraform.lock.hcl new file mode 100644 index 00000000..7dd9920a --- /dev/null +++ b/terraform-unity/modules/terraform-unity-sps-database/.terraform.lock.hcl @@ -0,0 +1,65 @@ +# This file is maintained automatically by "terraform init". +# Manual edits may be lost in future updates. + +provider "registry.terraform.io/hashicorp/aws" { + version = "5.50.0" + constraints = "5.50.0" + hashes = [ + "h1:LevuTzPS4S7t+Vh6Kpz77pBNDAwChaos91/6+CVnD4w=", + "zh:19be42f5a545d6712dee4bdb704b018d23bacf5d902ac3cb061eb1750dfe6a20", + "zh:1d880bdba95ce96efde37e5bcf457a57df2c1effa9b47bc67fa29c1a264ae53b", + "zh:1e9c78e324d7492be5e7744436ed71d66fe4eca3fb6af07a28efd0d1e3bf7640", + "zh:27ac672aa61b3795931561fdbe4a306ad1132af517d7711c14569429b2cc694f", + "zh:3b978423dead02f9a98d25de118adf264a2331acdc4550ea93bed01feabc12e7", + "zh:490d7eb4b922ba1b57e0ab8dec1a08df6517485febcab1e091fd6011281c3472", + "zh:64e7c84e18dac1af5778d6f516e01a46f9c91d710867c39fbc7efa3cd972dc62", + "zh:73867ac2956dcdd377121b3aa8fe2e1085e77fae9b61d018f56a863277ea4b6e", + "zh:7ed899d0d5c49f009b445d7816e4bf702d9c48205c24cf884cd2ae0247160455", + "zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425", + "zh:9b93784b3fb13d08cf95a4131c49b56bf7e1cd35daad6156b3658a89ce6fb58f", + "zh:b29d77eb75de474e46eb47e539c48916628d85599bcf14e5cc500b14a4578e75", + "zh:bbd9cec8ca705452e4a3d21d56474eacb8cc7b1b74b7f310fdea4bdcffebab32", + "zh:c352eb3169efa0e27a29b99a2630e8298710a084453c519caa39e5972ff6d1fc", + "zh:e32f4744b43be1708b309a734e0ac10b5c0f9f92e5849298cf1a90f2b906f6f3", + ] +} + +provider "registry.terraform.io/hashicorp/kubernetes" { + version = "2.29.0" + constraints = "2.29.0" + hashes = [ + "h1:7C1MinWhowW8EnlSYhhAFV3bte8x5YcSF5QxUPdoXDk=", + "zh:3edd5dc319b95fe94e61b82d10c1ce7fb53a2f21b067ddb742f2d7d0d19dd113", + "zh:4b9096e6d0cfa0efd4c89270e3d25fea49db570e2cfbe49c5d1de085a15f2578", + "zh:5397573838bcb8844248c8d6ac93cca7f39a0b707ac3ce7a7b306c50c261c195", + "zh:5d635370720d356b7bcb5756ca28de3275ca32ca1ef0201414caecd3a14759ac", + "zh:71a52280408f3fb0ff1866a9ab8059b0d9bde5481869658798e0773461f22eff", + "zh:748663ef0248d2d95f5dea2974332432a395165657856878c5dc6f000b37cc25", + "zh:7fbc1e084bbbb51e31afd3df0c77e833ae59e88cf42b9e2c17b0b1a1e3894723", + "zh:ae89b4be473b446270fa24dc1ef51b0cc4c2a528d9838ec15246d28bac165df3", + "zh:b6433970d680a0cc9898f915224508b5ece86ae4418372fa6bebd2a9d344f226", + "zh:bf871955cf49015e6a0433e814a22a109c1537a775b8b5dc7b37ad05c324904a", + "zh:c16fac91b2197b443a191d98cf37424feed550387ab11bd1427bde819722005e", + "zh:f569b65999264a9416862bca5cd2a6177d94ccb0424f3a4ef424428912b9cb3c", + ] +} + +provider "registry.terraform.io/hashicorp/random" { + version = "3.6.1" + constraints = "3.6.1" + hashes = [ + "h1:a+Goawwh6Qtg4/bRWzfDtIdrEFfPlnVy0y4LdUQY3nI=", + "zh:2a0ec154e39911f19c8214acd6241e469157489fc56b6c739f45fbed5896a176", + "zh:57f4e553224a5e849c99131f5e5294be3a7adcabe2d867d8a4fef8d0976e0e52", + "zh:58f09948c608e601bd9d0a9e47dcb78e2b2c13b4bda4d8f097d09152ea9e91c5", + "zh:5c2a297146ed6fb3fe934c800e78380f700f49ff24dbb5fb5463134948e3a65f", + "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", + "zh:7ce41e26f0603e31cdac849085fc99e5cd5b3b73414c6c6d955c0ceb249b593f", + "zh:8c9e8d30c4ef08ee8bcc4294dbf3c2115cd7d9049c6ba21422bd3471d92faf8a", + "zh:93e91be717a7ffbd6410120eb925ebb8658cc8f563de35a8b53804d33c51c8b0", + "zh:982542e921970d727ce10ed64795bf36c4dec77a5db0741d4665230d12250a0d", + "zh:b9d1873f14d6033e216510ef541c891f44d249464f13cc07d3f782d09c7d18de", + "zh:cfe27faa0bc9556391c8803ade135a5856c34a3fe85b9ae3bdd515013c0c87c1", + "zh:e4aabf3184bbb556b89e4b195eab1514c86a2914dd01c23ad9813ec17e863a8a", + ] +} diff --git a/terraform-unity/modules/terraform-unity-sps-database/README.md b/terraform-unity/modules/terraform-unity-sps-database/README.md new file mode 100644 index 00000000..75a95d16 --- /dev/null +++ b/terraform-unity/modules/terraform-unity-sps-database/README.md @@ -0,0 +1,53 @@ + +## Requirements + +| Name | Version | +|------|---------| +| [terraform](#requirement\_terraform) | ~> 1.8.2 | +| [aws](#requirement\_aws) | 5.50.0 | +| [kubernetes](#requirement\_kubernetes) | 2.29.0 | +| [random](#requirement\_random) | 3.6.1 | + +## Providers + +| Name | Version | +|------|---------| +| [aws](#provider\_aws) | 5.50.0 | +| [random](#provider\_random) | 3.6.1 | + +## Modules + +No modules. + +## Resources + +| Name | Type | +|------|------| +| [aws_db_instance.sps_db](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/resources/db_instance) | resource | +| [aws_db_subnet_group.db](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/resources/db_subnet_group) | resource | +| [aws_secretsmanager_secret.db](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/resources/secretsmanager_secret) | resource | +| [aws_secretsmanager_secret_version.db](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/resources/secretsmanager_secret_version) | resource | +| [aws_security_group.rds_sg](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/resources/security_group) | resource | +| [aws_security_group_rule.eks_egress_to_rds](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/resources/security_group_rule) | resource | +| [aws_security_group_rule.rds_ingress_from_eks](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/resources/security_group_rule) | resource | +| [random_password.db](https://registry.terraform.io/providers/hashicorp/random/3.6.1/docs/resources/password) | resource | +| [aws_eks_cluster.cluster](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/data-sources/eks_cluster) | data source | +| [aws_security_group.default](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/data-sources/security_group) | data source | +| [aws_ssm_parameter.subnet_ids](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/data-sources/ssm_parameter) | data source | + +## Inputs + +| Name | Description | Type | Default | Required | +|------|-------------|------|---------|:--------:| +| [project](#input\_project) | The project or mission deploying Unity SPS | `string` | n/a | yes | +| [release](#input\_release) | The software release version. | `string` | n/a | yes | +| [service\_area](#input\_service\_area) | The service area owner of the resources being deployed | `string` | n/a | yes | +| [venue](#input\_venue) | The MCP venue in which the cluster will be deployed (dev, test, prod) | `string` | n/a | yes | + +## Outputs + +| Name | Description | +|------|-------------| +| [db\_instance\_identifier](#output\_db\_instance\_identifier) | n/a | +| [db\_secret\_arn](#output\_db\_secret\_arn) | n/a | + diff --git a/terraform-unity/modules/terraform-unity-sps-database/data.tf b/terraform-unity/modules/terraform-unity-sps-database/data.tf new file mode 100644 index 00000000..266c660f --- /dev/null +++ b/terraform-unity/modules/terraform-unity-sps-database/data.tf @@ -0,0 +1,15 @@ +data "aws_eks_cluster" "cluster" { + name = format(local.resource_name_prefix, "eks") +} + +data "aws_ssm_parameter" "subnet_ids" { + name = "/unity/account/network/subnet_list" +} + +data "aws_security_group" "default" { + vpc_id = data.aws_eks_cluster.cluster.vpc_config[0].vpc_id + filter { + name = "tag:Name" + values = ["${format(local.resource_name_prefix, "eks")}-node"] + } +} diff --git a/terraform-unity/modules/terraform-unity-sps-database/locals.tf b/terraform-unity/modules/terraform-unity-sps-database/locals.tf new file mode 100644 index 00000000..3f54b220 --- /dev/null +++ b/terraform-unity/modules/terraform-unity-sps-database/locals.tf @@ -0,0 +1,15 @@ +locals { + resource_name_prefix = join("-", compact([var.project, var.venue, var.service_area, "%s"])) + common_tags = { + Name = "" + Venue = var.venue + Proj = var.project + ServiceArea = var.service_area + CapVersion = var.release + Component = "" + CreatedBy = var.service_area + Env = var.venue + mission = var.project + Stack = "" + } +} diff --git a/terraform-unity/modules/terraform-unity-sps-database/main.tf b/terraform-unity/modules/terraform-unity-sps-database/main.tf new file mode 100644 index 00000000..5c2aa29f --- /dev/null +++ b/terraform-unity/modules/terraform-unity-sps-database/main.tf @@ -0,0 +1,84 @@ +resource "random_password" "db" { + length = 16 + special = true + override_special = "_!%^" +} + +resource "aws_secretsmanager_secret" "db" { + name = format(local.resource_name_prefix, "db") + recovery_window_in_days = 0 + tags = merge(local.common_tags, { + Name = format(local.resource_name_prefix, "db") + Component = "processing" + Stack = "processing" + }) +} + +resource "aws_secretsmanager_secret_version" "db" { + secret_id = aws_secretsmanager_secret.db.id + secret_string = random_password.db.result +} + +resource "aws_db_subnet_group" "db" { + name = format(local.resource_name_prefix, "db") + subnet_ids = jsondecode(data.aws_ssm_parameter.subnet_ids.value)["private"] + tags = merge(local.common_tags, { + Name = format(local.resource_name_prefix, "db") + Component = "processing" + Stack = "processing" + }) +} + +# Security group for RDS +resource "aws_security_group" "rds_sg" { + name = format(local.resource_name_prefix, "RdsEc2") + description = "Security group for RDS instance to allow traffic from EKS nodes" + vpc_id = data.aws_eks_cluster.cluster.vpc_config[0].vpc_id + tags = merge(local.common_tags, { + Name = format(local.resource_name_prefix, "RdsEc2") + Component = "airflow" + Stack = "airflow" + }) +} + +# Ingress rule for RDS security group to allow PostgreSQL traffic from EKS nodes security group +resource "aws_security_group_rule" "rds_ingress_from_eks" { + type = "ingress" + from_port = 5432 + to_port = 5432 + protocol = "tcp" + security_group_id = aws_security_group.rds_sg.id + source_security_group_id = data.aws_security_group.default.id +} + +# Egress rule for EKS nodes security group to allow PostgreSQL traffic to RDS security group +resource "aws_security_group_rule" "eks_egress_to_rds" { + type = "egress" + from_port = 5432 + to_port = 5432 + protocol = "tcp" + security_group_id = data.aws_security_group.default.id + source_security_group_id = aws_security_group.rds_sg.id +} + +resource "aws_db_instance" "sps_db" { + identifier = format(local.resource_name_prefix, "spsdb") # format(local.resource_name_prefix, "db") + allocated_storage = 100 + storage_type = "gp3" + engine = "postgres" + engine_version = "13.13" + instance_class = "db.m5d.large" + db_name = "sps_db" + username = "db_user" + password = aws_secretsmanager_secret_version.db.secret_string + parameter_group_name = "default.postgres13" + skip_final_snapshot = true + publicly_accessible = false + db_subnet_group_name = aws_db_subnet_group.db.name + vpc_security_group_ids = [aws_security_group.rds_sg.id] + tags = merge(local.common_tags, { + Name = format(local.resource_name_prefix, "db") + Component = "processing" + Stack = "processing" + }) +} diff --git a/terraform-unity/modules/terraform-unity-sps-database/outputs.tf b/terraform-unity/modules/terraform-unity-sps-database/outputs.tf new file mode 100644 index 00000000..126657ad --- /dev/null +++ b/terraform-unity/modules/terraform-unity-sps-database/outputs.tf @@ -0,0 +1,7 @@ +output "db_instance_identifier" { + value = aws_db_instance.sps_db.id +} + +output "db_secret_arn" { + value = aws_secretsmanager_secret_version.db.arn +} diff --git a/terraform-unity/modules/terraform-unity-sps-database/variables.tf b/terraform-unity/modules/terraform-unity-sps-database/variables.tf new file mode 100644 index 00000000..54189e70 --- /dev/null +++ b/terraform-unity/modules/terraform-unity-sps-database/variables.tf @@ -0,0 +1,19 @@ +variable "project" { + description = "The project or mission deploying Unity SPS" + type = string +} + +variable "venue" { + description = "The MCP venue in which the cluster will be deployed (dev, test, prod)" + type = string +} + +variable "service_area" { + description = "The service area owner of the resources being deployed" + type = string +} + +variable "release" { + description = "The software release version." + type = string +} diff --git a/terraform-unity/modules/terraform-unity-sps-database/versions.tf b/terraform-unity/modules/terraform-unity-sps-database/versions.tf new file mode 100644 index 00000000..86bbb022 --- /dev/null +++ b/terraform-unity/modules/terraform-unity-sps-database/versions.tf @@ -0,0 +1,17 @@ +terraform { + required_version = "~> 1.8.2" + required_providers { + aws = { + source = "hashicorp/aws" + version = "5.50.0" + } + kubernetes = { + source = "hashicorp/kubernetes" + version = "2.29.0" + } + random = { + source = "hashicorp/random" + version = "3.6.1" + } + } +} diff --git a/terraform-unity/modules/terraform-unity-sps-efs/.terraform.lock.hcl b/terraform-unity/modules/terraform-unity-sps-efs/.terraform.lock.hcl new file mode 100644 index 00000000..754e4544 --- /dev/null +++ b/terraform-unity/modules/terraform-unity-sps-efs/.terraform.lock.hcl @@ -0,0 +1,25 @@ +# This file is maintained automatically by "terraform init". +# Manual edits may be lost in future updates. + +provider "registry.terraform.io/hashicorp/aws" { + version = "5.50.0" + constraints = "5.50.0" + hashes = [ + "h1:LevuTzPS4S7t+Vh6Kpz77pBNDAwChaos91/6+CVnD4w=", + "zh:19be42f5a545d6712dee4bdb704b018d23bacf5d902ac3cb061eb1750dfe6a20", + "zh:1d880bdba95ce96efde37e5bcf457a57df2c1effa9b47bc67fa29c1a264ae53b", + "zh:1e9c78e324d7492be5e7744436ed71d66fe4eca3fb6af07a28efd0d1e3bf7640", + "zh:27ac672aa61b3795931561fdbe4a306ad1132af517d7711c14569429b2cc694f", + "zh:3b978423dead02f9a98d25de118adf264a2331acdc4550ea93bed01feabc12e7", + "zh:490d7eb4b922ba1b57e0ab8dec1a08df6517485febcab1e091fd6011281c3472", + "zh:64e7c84e18dac1af5778d6f516e01a46f9c91d710867c39fbc7efa3cd972dc62", + "zh:73867ac2956dcdd377121b3aa8fe2e1085e77fae9b61d018f56a863277ea4b6e", + "zh:7ed899d0d5c49f009b445d7816e4bf702d9c48205c24cf884cd2ae0247160455", + "zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425", + "zh:9b93784b3fb13d08cf95a4131c49b56bf7e1cd35daad6156b3658a89ce6fb58f", + "zh:b29d77eb75de474e46eb47e539c48916628d85599bcf14e5cc500b14a4578e75", + "zh:bbd9cec8ca705452e4a3d21d56474eacb8cc7b1b74b7f310fdea4bdcffebab32", + "zh:c352eb3169efa0e27a29b99a2630e8298710a084453c519caa39e5972ff6d1fc", + "zh:e32f4744b43be1708b309a734e0ac10b5c0f9f92e5849298cf1a90f2b906f6f3", + ] +} diff --git a/terraform-unity/modules/terraform-unity-sps-efs/README.md b/terraform-unity/modules/terraform-unity-sps-efs/README.md new file mode 100644 index 00000000..21212527 --- /dev/null +++ b/terraform-unity/modules/terraform-unity-sps-efs/README.md @@ -0,0 +1,41 @@ + +## Requirements + +| Name | Version | +|------|---------| +| [terraform](#requirement\_terraform) | ~> 1.8.2 | +| [aws](#requirement\_aws) | 5.50.0 | + +## Providers + +| Name | Version | +|------|---------| +| [aws](#provider\_aws) | 5.50.0 | + +## Modules + +No modules. + +## Resources + +| Name | Type | +|------|------| +| [aws_efs_file_system.efs](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/resources/efs_file_system) | resource | +| [aws_kms_alias.efs_key_alias](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/resources/kms_alias) | resource | +| [aws_kms_key.efs_key](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/resources/kms_key) | resource | + +## Inputs + +| Name | Description | Type | Default | Required | +|------|-------------|------|---------|:--------:| +| [project](#input\_project) | The project or mission deploying Unity SPS | `string` | n/a | yes | +| [release](#input\_release) | The software release version. | `string` | n/a | yes | +| [service\_area](#input\_service\_area) | The service area owner of the resources being deployed | `string` | n/a | yes | +| [venue](#input\_venue) | The MCP venue in which the cluster will be deployed (dev, test, prod) | `string` | n/a | yes | + +## Outputs + +| Name | Description | +|------|-------------| +| [file\_system\_id](#output\_file\_system\_id) | n/a | + diff --git a/terraform-unity/modules/terraform-unity-sps-efs/locals.tf b/terraform-unity/modules/terraform-unity-sps-efs/locals.tf new file mode 100644 index 00000000..e61981e2 --- /dev/null +++ b/terraform-unity/modules/terraform-unity-sps-efs/locals.tf @@ -0,0 +1,16 @@ + +locals { + resource_name_prefix = join("-", compact([var.project, var.venue, var.service_area, "%s"])) + common_tags = { + Name = "" + Venue = var.venue + Proj = var.project + ServiceArea = var.service_area + CapVersion = var.release + Component = "" + CreatedBy = var.service_area + Env = var.venue + mission = var.project + Stack = "" + } +} diff --git a/terraform-unity/modules/terraform-unity-sps-efs/main.tf b/terraform-unity/modules/terraform-unity-sps-efs/main.tf new file mode 100644 index 00000000..859f3923 --- /dev/null +++ b/terraform-unity/modules/terraform-unity-sps-efs/main.tf @@ -0,0 +1,28 @@ +resource "aws_kms_key" "efs_key" { + description = "KMS key for EFS encryption" + deletion_window_in_days = 7 + enable_key_rotation = true + + tags = merge(local.common_tags, { + Name = format(local.resource_name_prefix, "EfsKmsKey") + Component = "airflow" + Stack = "airflow" + }) +} + +resource "aws_kms_alias" "efs_key_alias" { + name = "alias/${format(local.resource_name_prefix, "efs-key")}" + target_key_id = aws_kms_key.efs_key.key_id +} + +resource "aws_efs_file_system" "efs" { + creation_token = format(local.resource_name_prefix, "AirflowEfs") + encrypted = true + kms_key_id = aws_kms_key.efs_key.arn + + tags = merge(local.common_tags, { + Name = format(local.resource_name_prefix, "AirflowEfs") + Component = "airflow" + Stack = "airflow" + }) +} diff --git a/terraform-unity/modules/terraform-unity-sps-efs/outputs.tf b/terraform-unity/modules/terraform-unity-sps-efs/outputs.tf new file mode 100644 index 00000000..8f7320c5 --- /dev/null +++ b/terraform-unity/modules/terraform-unity-sps-efs/outputs.tf @@ -0,0 +1,3 @@ +output "file_system_id" { + value = aws_efs_file_system.efs.id +} diff --git a/terraform-unity/modules/terraform-unity-sps-efs/variables.tf b/terraform-unity/modules/terraform-unity-sps-efs/variables.tf new file mode 100644 index 00000000..54189e70 --- /dev/null +++ b/terraform-unity/modules/terraform-unity-sps-efs/variables.tf @@ -0,0 +1,19 @@ +variable "project" { + description = "The project or mission deploying Unity SPS" + type = string +} + +variable "venue" { + description = "The MCP venue in which the cluster will be deployed (dev, test, prod)" + type = string +} + +variable "service_area" { + description = "The service area owner of the resources being deployed" + type = string +} + +variable "release" { + description = "The software release version." + type = string +} diff --git a/terraform-unity/modules/terraform-unity-sps-efs/versions.tf b/terraform-unity/modules/terraform-unity-sps-efs/versions.tf new file mode 100644 index 00000000..cd96074b --- /dev/null +++ b/terraform-unity/modules/terraform-unity-sps-efs/versions.tf @@ -0,0 +1,9 @@ +terraform { + required_version = "~> 1.8.2" + required_providers { + aws = { + source = "hashicorp/aws" + version = "5.50.0" + } + } +} diff --git a/terraform-unity/modules/terraform-unity-sps-eks/.terraform.lock.hcl b/terraform-unity/modules/terraform-unity-sps-eks/.terraform.lock.hcl index cc5c4af0..0b29f14a 100644 --- a/terraform-unity/modules/terraform-unity-sps-eks/.terraform.lock.hcl +++ b/terraform-unity/modules/terraform-unity-sps-eks/.terraform.lock.hcl @@ -45,81 +45,61 @@ provider "registry.terraform.io/hashicorp/cloudinit" { } provider "registry.terraform.io/hashicorp/helm" { - version = "2.13.1" + version = "2.14.0" hashes = [ - "h1:crwHSTDCQ6fS8dQYGkoi700MI5UpbA2BDLgMZgL3B+E=", - "zh:1bf0ae1ecfd2a5d5a57f695a33b2328ef197138f27ff372fed820c975eac9783", - "zh:4676295e3a929848b98869d3040f54f17fbed3d133342b6a1f7b72d5797239e0", - "zh:4bf3705e061e28d16a525aad9229fdd842cdc96f7c23d040d3148957ba3149d8", - "zh:69db9550eacd61d85cf456d438f08addfefea4fcbc4f4a8119105093ea3d950a", - "zh:6e11560e3ea61b141f03842771bfad143ff1c56bd0d1bc01069496107cad0ab6", - "zh:733ea41e2eb4bd63cfdae6886ed47d224dabb0cd37959c6e2b213b1914a80121", - "zh:74caefb2dc8e6055259d716c11194cc0709261c592d41466abf2dc0b21d88297", - "zh:89682ab50b5cf1f1c41eabfc76f53a56482ac7b4bf77d9cb087d789524fd3e31", - "zh:a5ff95092f2f123027b89f585612a225c9bce7e65977b4ffaf4de3ae3e7870bc", - "zh:c85fce024cb5a387702ceb42a3a06e32519cd1e61bc9dd820a762da21110ab96", - "zh:d828ef2db612798179322bcb3fe829a43dd47e740cabb67e3654c8561ae661ff", + "h1:8Vt9264v3UE6mHLRG8yiteVl5h8ZSTkJXf1xdVLa7GA=", + "zh:087a475fda3649e4b6b9aeb5f21704972f5d85c10d0bf334289b0a1b8c1a5575", + "zh:1877991d976491d4e2a653a89491bd3b92123a00f442f15aa62caea8902677c7", + "zh:233d9e550b900be8bbf62871322964239bb4827b3500b77d7e2652a8bae6a106", + "zh:6ed09d405ade276dfc6ec591d113ca328ea3fe423405d4bc1116f7a06dfd86ec", + "zh:9039de4cbee5ae006d9cbf27f40f0a285feb02c3b00901535a1112853de55b5f", + "zh:aea6311b0f29edddefa21b8c7953314459caeace77d72d60588d1277f1723c54", + "zh:bd6a4fea3461c2751527f1c4e4c2c160e72f5b5a3b5cfbfe051adf61badd5ead", + "zh:c5f12a2ea4c3b62d9dd2d8f62c9918ef77b1f9dd4d6ccf1758a2a24139ab5319", + "zh:cd84d7258f263c3bd24138e7633b022451fdc1935a11e34932b63f71bbe6059f", + "zh:e637d01ee4dc2e5702d62c158399ab0d0ba3269e71f5db38db922ff05505ae2a", "zh:f569b65999264a9416862bca5cd2a6177d94ccb0424f3a4ef424428912b9cb3c", + "zh:fbf9c9936ae547b75a81170b7bd20f72bc5538e015efcf7d12f822358d758f57", ] } provider "registry.terraform.io/hashicorp/kubernetes" { - version = "2.29.0" + version = "2.31.0" constraints = ">= 2.10.0" hashes = [ - "h1:7C1MinWhowW8EnlSYhhAFV3bte8x5YcSF5QxUPdoXDk=", - "zh:3edd5dc319b95fe94e61b82d10c1ce7fb53a2f21b067ddb742f2d7d0d19dd113", - "zh:4b9096e6d0cfa0efd4c89270e3d25fea49db570e2cfbe49c5d1de085a15f2578", - "zh:5397573838bcb8844248c8d6ac93cca7f39a0b707ac3ce7a7b306c50c261c195", - "zh:5d635370720d356b7bcb5756ca28de3275ca32ca1ef0201414caecd3a14759ac", - "zh:71a52280408f3fb0ff1866a9ab8059b0d9bde5481869658798e0773461f22eff", - "zh:748663ef0248d2d95f5dea2974332432a395165657856878c5dc6f000b37cc25", - "zh:7fbc1e084bbbb51e31afd3df0c77e833ae59e88cf42b9e2c17b0b1a1e3894723", - "zh:ae89b4be473b446270fa24dc1ef51b0cc4c2a528d9838ec15246d28bac165df3", - "zh:b6433970d680a0cc9898f915224508b5ece86ae4418372fa6bebd2a9d344f226", - "zh:bf871955cf49015e6a0433e814a22a109c1537a775b8b5dc7b37ad05c324904a", - "zh:c16fac91b2197b443a191d98cf37424feed550387ab11bd1427bde819722005e", + "h1:ZlKkkHJrjF4AiMueI2yA+abBc1c37cfwjyxURdLKhEw=", + "zh:0d16b861edb2c021b3e9d759b8911ce4cf6d531320e5dc9457e2ea64d8c54ecd", + "zh:1bad69ed535a5f32dec70561eb481c432273b81045d788eb8b37f2e4a322cc40", + "zh:43c58e3912fcd5bb346b5cb89f31061508a9be3ca7dd4cd8169c066203bcdfb3", + "zh:4778123da9206918a92dfa73cc711475d2b9a8275ff25c13a30513c523ac9660", + "zh:8bfa67d2db03b3bfae62beebe6fb961aee8d91b7a766efdfe4d337b33dfd23dd", + "zh:9020bb5729db59a520ade5e24984b737e65f8b81751fbbd343926f6d44d22176", + "zh:90431dbfc5b92498bfbce38f0b989978c84421a6c33245b97788a46b563fbd6e", + "zh:b71a061dda1244f6a52500e703a9524b851e7b11bbf238c17bbd282f27d51cb2", + "zh:d6232a7651b834b89591b94bf4446050119dcde740247e6083a4d55a2cefd28a", + "zh:d89fba43e699e28e2b5e92fff2f75fc03dbc8de0df9dacefe1a8836f8f430753", + "zh:ef85c0b744f5ba1b10dadc3c11e331ba4225c45bb733e024d7218c24b02b0512", "zh:f569b65999264a9416862bca5cd2a6177d94ccb0424f3a4ef424428912b9cb3c", ] } -provider "registry.terraform.io/hashicorp/random" { - version = "3.6.1" - constraints = "3.6.1" - hashes = [ - "h1:a+Goawwh6Qtg4/bRWzfDtIdrEFfPlnVy0y4LdUQY3nI=", - "zh:2a0ec154e39911f19c8214acd6241e469157489fc56b6c739f45fbed5896a176", - "zh:57f4e553224a5e849c99131f5e5294be3a7adcabe2d867d8a4fef8d0976e0e52", - "zh:58f09948c608e601bd9d0a9e47dcb78e2b2c13b4bda4d8f097d09152ea9e91c5", - "zh:5c2a297146ed6fb3fe934c800e78380f700f49ff24dbb5fb5463134948e3a65f", - "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", - "zh:7ce41e26f0603e31cdac849085fc99e5cd5b3b73414c6c6d955c0ceb249b593f", - "zh:8c9e8d30c4ef08ee8bcc4294dbf3c2115cd7d9049c6ba21422bd3471d92faf8a", - "zh:93e91be717a7ffbd6410120eb925ebb8658cc8f563de35a8b53804d33c51c8b0", - "zh:982542e921970d727ce10ed64795bf36c4dec77a5db0741d4665230d12250a0d", - "zh:b9d1873f14d6033e216510ef541c891f44d249464f13cc07d3f782d09c7d18de", - "zh:cfe27faa0bc9556391c8803ade135a5856c34a3fe85b9ae3bdd515013c0c87c1", - "zh:e4aabf3184bbb556b89e4b195eab1514c86a2914dd01c23ad9813ec17e863a8a", - ] -} - provider "registry.terraform.io/hashicorp/time" { - version = "0.11.1" + version = "0.11.2" constraints = ">= 0.9.0" hashes = [ - "h1:pQGSL9mdgw4qsLndFYsEF93mbsIxyxNoAyIbBqhS3Xo=", - "zh:19a393db736ec4fd024d098d55aefaef07056c37a448ece3b55b3f5f4c2c7e4a", - "zh:227fa1e221de2907f37be78d40c06ca6a6f7b243a1ec33ade014dfaf6d92cd9c", - "zh:29970fecbf4a3ca23bacbb05d6b90cdd33dd379f90059fe39e08289951502d9f", - "zh:65024596f22f10e7dcb5e0e4a75277f275b529daa0bc0daf34ca7901c678ab88", - "zh:694d080cb5e3bf5ef08c7409208d061c135a4f5f4cdc93ea8607860995264b2e", + "h1:qg3O4PmHnlPcvuZ2LvzOYEAPGOKtccgD5kPdQPZw094=", + "zh:02588b5b8ba5d31e86d93edc93b306bcbf47c789f576769245968cc157a9e8c5", + "zh:088a30c23796133678d1d6614da5cf5544430570408a17062288b58c0bd67ac8", + "zh:0df5faa072d67616154d38021934d8a8a316533429a3f582df3b4b48c836cf89", + "zh:12edeeaef96c47f694bd1ba7ead6ccdb96028b25df352eea4bc5e40de7a59177", + "zh:1e859504a656a6e988f07b908e6ffe946b28bfb56889417c0a07ea9605a3b7b0", + "zh:64a6ae0320d4956c4fdb05629cfcebd03bcbd2206e2d733f2f18e4a97f4d5c7c", "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", - "zh:b29d15d13e1b3412e6a4e1627d378dbd102659132f7488f64017dd6b6d5216d3", - "zh:bb79f4cae9f8c17c73998edc54aa16c2130a03227f7f4e71fc6ac87e230575ec", - "zh:ceccf80e95929d97f62dcf1bb3c7c7553d5757b2d9e7d222518722fc934f7ad5", - "zh:f40e638336527490e294d9c938ae55919069e6987e85a80506784ba90348792a", - "zh:f99ef33b1629a3b2278201142a3011a8489e66d92da832a5b99e442204de18fb", - "zh:fded14754ea46fdecc62a52cd970126420d4cd190e598cb61190b4724a727edb", + "zh:924d137959193bf7aee6ebf241fbb9aec46d6eef828c5cf8d3c588770acae7b2", + "zh:b3cc76281a4faa9c2293a2460fc6962f6539e900994053f85185304887dddab8", + "zh:cbb40c791d4a1cdba56cffa43a9c0ed8e69930d49aa6bd931546b18c36e3b720", + "zh:d227d43594f8cb3d24f1fdd71382f14502cbe2a6deaddbc74242656bb5b38daf", + "zh:d4840641c46176bb9d70ba3aff09de749282136c779996b546c81e5ff701bbf6", ] } diff --git a/terraform-unity/modules/terraform-unity-sps-eks/README.md b/terraform-unity/modules/terraform-unity-sps-eks/README.md index d892f025..8005392b 100644 --- a/terraform-unity/modules/terraform-unity-sps-eks/README.md +++ b/terraform-unity/modules/terraform-unity-sps-eks/README.md @@ -7,38 +7,33 @@ |------|---------| | [terraform](#requirement\_terraform) | ~> 1.8.2 | | [aws](#requirement\_aws) | 5.47.0 | -| [random](#requirement\_random) | 3.6.1 | ## Providers | Name | Version | |------|---------| | [aws](#provider\_aws) | 5.47.0 | -| [random](#provider\_random) | 3.6.1 | ## Modules | Name | Source | Version | |------|--------|---------| -| [unity-eks](#module\_unity-eks) | git@github.com:unity-sds/unity-cs-infra.git//terraform-unity-eks_module | unity-sps-2.0.1 | +| [unity-eks](#module\_unity-eks) | git@github.com:unity-sds/unity-cs-infra.git//terraform-unity-eks_module | unity-sps-2.2.0-hotfix | ## Resources | Name | Type | |------|------| | [aws_iam_role_policy.sps_airflow_eks_inline_policy](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/iam_role_policy) | resource | -| [random_id.counter](https://registry.terraform.io/providers/hashicorp/random/3.6.1/docs/resources/id) | resource | | [aws_caller_identity.current](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/data-sources/caller_identity) | data source | ## Inputs | Name | Description | Type | Default | Required | |------|-------------|------|---------|:--------:| -| [counter](#input\_counter) | Identifier used to uniquely distinguish resources. This is used in the naming convention of the resource. If left empty, a random hexadecimal value will be generated and used instead. | `string` | n/a | yes | -| [deployment\_name](#input\_deployment\_name) | The name of the deployment. | `string` | n/a | yes | -| [nodegroups](#input\_nodegroups) | A map of node group configurations |
map(object({
create_iam_role = optional(bool)
iam_role_arn = optional(string)
ami_id = optional(string)
min_size = optional(number)
max_size = optional(number)
desired_size = optional(number)
instance_types = optional(list(string))
capacity_type = optional(string)
enable_bootstrap_user_data = optional(bool)
metadata_options = optional(map(any))
block_device_mappings = optional(map(object({
device_name = string
ebs = object({
volume_size = number
volume_type = string
encrypted = bool
delete_on_termination = bool
})
})))
}))
|
{
"defaultGroup": {
"block_device_mappings": {
"xvda": {
"device_name": "/dev/xvda",
"ebs": {
"delete_on_termination": true,
"encrypted": true,
"volume_size": 100,
"volume_type": "gp2"
}
}
},
"desired_size": 1,
"instance_types": [
"t3.large"
],
"max_size": 1,
"metadata_options": {
"http_endpoint": "enabled",
"http_put_response_hop_limit": 3
},
"min_size": 1
}
}
| no | +| [nodegroups](#input\_nodegroups) | A map of node group configurations |
map(object({
create_iam_role = optional(bool)
iam_role_arn = optional(string)
ami_id = optional(string)
min_size = optional(number)
max_size = optional(number)
desired_size = optional(number)
instance_types = optional(list(string))
capacity_type = optional(string)
enable_bootstrap_user_data = optional(bool)
metadata_options = optional(map(any))
block_device_mappings = optional(map(object({
device_name = string
ebs = object({
volume_size = number
volume_type = string
encrypted = bool
delete_on_termination = bool
})
})))
}))
|
{
"defaultGroup": {
"block_device_mappings": {
"xvda": {
"device_name": "/dev/xvda",
"ebs": {
"delete_on_termination": true,
"encrypted": true,
"volume_size": 100,
"volume_type": "gp2"
}
}
},
"desired_size": 1,
"instance_types": [
"t3.xlarge"
],
"max_size": 1,
"metadata_options": {
"http_endpoint": "enabled",
"http_put_response_hop_limit": 3
},
"min_size": 1
}
}
| no | | [project](#input\_project) | The project or mission deploying Unity SPS | `string` | `"unity"` | no | -| [release](#input\_release) | The software release version. | `string` | `"24.2"` | no | +| [release](#input\_release) | The software release version. | `string` | `"24.3"` | no | | [service\_area](#input\_service\_area) | The service area owner of the resources being deployed | `string` | `"sps"` | no | | [venue](#input\_venue) | The MCP venue in which the cluster will be deployed (dev, test, prod) | `string` | n/a | yes | diff --git a/terraform-unity/modules/terraform-unity-sps-eks/locals.tf b/terraform-unity/modules/terraform-unity-sps-eks/locals.tf index 2b2cae34..5d4b2352 100644 --- a/terraform-unity/modules/terraform-unity-sps-eks/locals.tf +++ b/terraform-unity/modules/terraform-unity-sps-eks/locals.tf @@ -1,6 +1,5 @@ locals { - counter = var.counter != "" ? var.counter : random_id.counter.hex - resource_name_prefix = join("-", compact([var.project, var.venue, var.service_area, "%s", var.deployment_name, local.counter])) + resource_name_prefix = join("-", compact([var.project, var.venue, var.service_area, "%s"])) cluster_name = format(local.resource_name_prefix, "eks") common_tags = { Name = "" diff --git a/terraform-unity/modules/terraform-unity-sps-eks/main.tf b/terraform-unity/modules/terraform-unity-sps-eks/main.tf index 4dd26d9b..777fd649 100644 --- a/terraform-unity/modules/terraform-unity-sps-eks/main.tf +++ b/terraform-unity/modules/terraform-unity-sps-eks/main.tf @@ -1,21 +1,15 @@ -# S3 backend terraform { backend "s3" { - # full path to Terraform state file: - # s3:/// - bucket = "" - key = "" - region = "us-west-2" - encrypt = true + bucket = "unity-unity-dev-bucket" + workspace_key_prefix = "sps/tfstates" + key = "terraform.tfstate" + region = "us-west-2" + encrypt = true } } -resource "random_id" "counter" { - byte_length = 2 -} - module "unity-eks" { - source = "git@github.com:unity-sds/unity-cs-infra.git//terraform-unity-eks_module?ref=unity-sps-2.0.1" + source = "git@github.com:unity-sds/unity-cs-infra.git//terraform-unity-eks_module?ref=unity-sps-2.2.0-hotfix" deployment_name = local.cluster_name nodegroups = var.nodegroups aws_auth_roles = [{ @@ -28,7 +22,7 @@ module "unity-eks" { Component = "eks" Stack = "eks" }) - cluster_version = "1.27" + cluster_version = "1.29" } # add extra policies as inline policy diff --git a/terraform-unity/modules/terraform-unity-sps-eks/variables.tf b/terraform-unity/modules/terraform-unity-sps-eks/variables.tf index cf42ba35..3b36d1c3 100644 --- a/terraform-unity/modules/terraform-unity-sps-eks/variables.tf +++ b/terraform-unity/modules/terraform-unity-sps-eks/variables.tf @@ -15,20 +15,10 @@ variable "service_area" { default = "sps" } -variable "deployment_name" { - description = "The name of the deployment." - type = string -} - -variable "counter" { - description = "Identifier used to uniquely distinguish resources. This is used in the naming convention of the resource. If left empty, a random hexadecimal value will be generated and used instead." - type = string -} - variable "release" { description = "The software release version." type = string - default = "24.2" + default = "24.3" } variable "nodegroups" { @@ -56,7 +46,7 @@ variable "nodegroups" { })) default = { defaultGroup = { - instance_types = ["t3.large"] + instance_types = ["t3.xlarge"] min_size = 1 max_size = 1 desired_size = 1 diff --git a/terraform-unity/modules/terraform-unity-sps-eks/versions.tf b/terraform-unity/modules/terraform-unity-sps-eks/versions.tf index 5b50dbcc..b8182198 100644 --- a/terraform-unity/modules/terraform-unity-sps-eks/versions.tf +++ b/terraform-unity/modules/terraform-unity-sps-eks/versions.tf @@ -5,9 +5,5 @@ terraform { source = "hashicorp/aws" version = "5.47.0" } - random = { - source = "hashicorp/random" - version = "3.6.1" - } } } diff --git a/terraform-unity/modules/terraform-unity-sps-initiators/.terraform.lock.hcl b/terraform-unity/modules/terraform-unity-sps-initiators/.terraform.lock.hcl new file mode 100644 index 00000000..52f0550c --- /dev/null +++ b/terraform-unity/modules/terraform-unity-sps-initiators/.terraform.lock.hcl @@ -0,0 +1,65 @@ +# This file is maintained automatically by "terraform init". +# Manual edits may be lost in future updates. + +provider "registry.terraform.io/hashicorp/aws" { + version = "5.50.0" + constraints = ">= 5.50.0, 5.50.0" + hashes = [ + "h1:LevuTzPS4S7t+Vh6Kpz77pBNDAwChaos91/6+CVnD4w=", + "zh:19be42f5a545d6712dee4bdb704b018d23bacf5d902ac3cb061eb1750dfe6a20", + "zh:1d880bdba95ce96efde37e5bcf457a57df2c1effa9b47bc67fa29c1a264ae53b", + "zh:1e9c78e324d7492be5e7744436ed71d66fe4eca3fb6af07a28efd0d1e3bf7640", + "zh:27ac672aa61b3795931561fdbe4a306ad1132af517d7711c14569429b2cc694f", + "zh:3b978423dead02f9a98d25de118adf264a2331acdc4550ea93bed01feabc12e7", + "zh:490d7eb4b922ba1b57e0ab8dec1a08df6517485febcab1e091fd6011281c3472", + "zh:64e7c84e18dac1af5778d6f516e01a46f9c91d710867c39fbc7efa3cd972dc62", + "zh:73867ac2956dcdd377121b3aa8fe2e1085e77fae9b61d018f56a863277ea4b6e", + "zh:7ed899d0d5c49f009b445d7816e4bf702d9c48205c24cf884cd2ae0247160455", + "zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425", + "zh:9b93784b3fb13d08cf95a4131c49b56bf7e1cd35daad6156b3658a89ce6fb58f", + "zh:b29d77eb75de474e46eb47e539c48916628d85599bcf14e5cc500b14a4578e75", + "zh:bbd9cec8ca705452e4a3d21d56474eacb8cc7b1b74b7f310fdea4bdcffebab32", + "zh:c352eb3169efa0e27a29b99a2630e8298710a084453c519caa39e5972ff6d1fc", + "zh:e32f4744b43be1708b309a734e0ac10b5c0f9f92e5849298cf1a90f2b906f6f3", + ] +} + +provider "registry.terraform.io/hashicorp/local" { + version = "2.5.2" + constraints = ">= 2.5.1" + hashes = [ + "h1:IyFbOIO6mhikFNL/2h1iZJ6kyN3U00jgkpCLUCThAfE=", + "zh:136299545178ce281c56f36965bf91c35407c11897f7082b3b983d86cb79b511", + "zh:3b4486858aa9cb8163378722b642c57c529b6c64bfbfc9461d940a84cd66ebea", + "zh:4855ee628ead847741aa4f4fc9bed50cfdbf197f2912775dd9fe7bc43fa077c0", + "zh:4b8cd2583d1edcac4011caafe8afb7a95e8110a607a1d5fb87d921178074a69b", + "zh:52084ddaff8c8cd3f9e7bcb7ce4dc1eab00602912c96da43c29b4762dc376038", + "zh:71562d330d3f92d79b2952ffdda0dad167e952e46200c767dd30c6af8d7c0ed3", + "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", + "zh:805f81ade06ff68fa8b908d31892eaed5c180ae031c77ad35f82cb7a74b97cf4", + "zh:8b6b3ebeaaa8e38dd04e56996abe80db9be6f4c1df75ac3cccc77642899bd464", + "zh:ad07750576b99248037b897de71113cc19b1a8d0bc235eb99173cc83d0de3b1b", + "zh:b9f1c3bfadb74068f5c205292badb0661e17ac05eb23bfe8bd809691e4583d0e", + "zh:cc4cbcd67414fefb111c1bf7ab0bc4beb8c0b553d01719ad17de9a047adff4d1", + ] +} + +provider "registry.terraform.io/hashicorp/null" { + version = "3.2.3" + constraints = ">= 3.2.2" + hashes = [ + "h1:I0Um8UkrMUb81Fxq/dxbr3HLP2cecTH2WMJiwKSrwQY=", + "zh:22d062e5278d872fe7aed834f5577ba0a5afe34a3bdac2b81f828d8d3e6706d2", + "zh:23dead00493ad863729495dc212fd6c29b8293e707b055ce5ba21ee453ce552d", + "zh:28299accf21763ca1ca144d8f660688d7c2ad0b105b7202554ca60b02a3856d3", + "zh:55c9e8a9ac25a7652df8c51a8a9a422bd67d784061b1de2dc9fe6c3cb4e77f2f", + "zh:756586535d11698a216291c06b9ed8a5cc6a4ec43eee1ee09ecd5c6a9e297ac1", + "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", + "zh:9d5eea62fdb587eeb96a8c4d782459f4e6b73baeece4d04b4a40e44faaee9301", + "zh:a6355f596a3fb8fc85c2fb054ab14e722991533f87f928e7169a486462c74670", + "zh:b5a65a789cff4ada58a5baffc76cb9767dc26ec6b45c00d2ec8b1b027f6db4ed", + "zh:db5ab669cf11d0e9f81dc380a6fdfcac437aea3d69109c7aef1a5426639d2d65", + "zh:de655d251c470197bcbb5ac45d289595295acb8f829f6c781d4a75c8c8b7c7dd", + "zh:f5c68199f2e6076bce92a12230434782bf768103a427e9bb9abee99b116af7b5", + ] +} diff --git a/terraform-unity/modules/terraform-unity-sps-initiators/README.md b/terraform-unity/modules/terraform-unity-sps-initiators/README.md new file mode 100644 index 00000000..6d0bcd43 --- /dev/null +++ b/terraform-unity/modules/terraform-unity-sps-initiators/README.md @@ -0,0 +1,50 @@ + +## Requirements + +| Name | Version | +|------|---------| +| [terraform](#requirement\_terraform) | ~> 1.8.2 | +| [aws](#requirement\_aws) | 5.50.0 | + +## Providers + +| Name | Version | +|------|---------| +| [aws](#provider\_aws) | 5.50.0 | + +## Modules + +| Name | Source | Version | +|------|--------|---------| +| [s3\_bucket\_notification](#module\_s3\_bucket\_notification) | git@github.com:unity-sds/unity-initiator.git//terraform-unity/triggers/s3-bucket-notification | unity-sps-2.2.0 | +| [unity\_initiator](#module\_unity\_initiator) | git@github.com:unity-sds/unity-initiator.git//terraform-unity/initiator | unity-sps-2.2.0 | + +## Resources + +| Name | Type | +|------|------| +| [aws_s3_bucket.code](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/resources/s3_bucket) | resource | +| [aws_s3_bucket.config](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/resources/s3_bucket) | resource | +| [aws_s3_bucket.inbound_staging_location](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/resources/s3_bucket) | resource | +| [aws_s3_object.isl_stacam_rawdp_folder](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/resources/s3_object) | resource | +| [aws_s3_object.router_config](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/resources/s3_object) | resource | +| [aws_ssm_parameter.airflow_api_url](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/data-sources/ssm_parameter) | data source | +| [aws_ssm_parameter.ogc_processes_api_url](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/data-sources/ssm_parameter) | data source | + +## Inputs + +| Name | Description | Type | Default | Required | +|------|-------------|------|---------|:--------:| +| [airflow\_api\_url\_ssm\_param](#input\_airflow\_api\_url\_ssm\_param) | The SSM parameter name for the Airflow API URL | `string` | n/a | yes | +| [airflow\_webserver\_password](#input\_airflow\_webserver\_password) | The password for the Airflow webserver and UI. | `string` | n/a | yes | +| [airflow\_webserver\_username](#input\_airflow\_webserver\_username) | The username for the Airflow webserver and UI. | `string` | n/a | yes | +| [ogc\_processes\_api\_url\_ssm\_param](#input\_ogc\_processes\_api\_url\_ssm\_param) | The SSM parameter name for the OGC Processes API URL | `string` | n/a | yes | +| [project](#input\_project) | The project or mission deploying Unity SPS | `string` | n/a | yes | +| [release](#input\_release) | The software release version. | `string` | n/a | yes | +| [service\_area](#input\_service\_area) | The service area owner of the resources being deployed | `string` | n/a | yes | +| [venue](#input\_venue) | The MCP venue in which the cluster will be deployed (dev, test, prod) | `string` | n/a | yes | + +## Outputs + +No outputs. + diff --git a/terraform-unity/modules/terraform-unity-sps-initiators/data.tf b/terraform-unity/modules/terraform-unity-sps-initiators/data.tf new file mode 100644 index 00000000..8a0304f0 --- /dev/null +++ b/terraform-unity/modules/terraform-unity-sps-initiators/data.tf @@ -0,0 +1,7 @@ +data "aws_ssm_parameter" "airflow_api_url" { + name = var.airflow_api_url_ssm_param +} + +data "aws_ssm_parameter" "ogc_processes_api_url" { + name = var.ogc_processes_api_url_ssm_param +} diff --git a/terraform-unity/modules/terraform-unity-sps-initiators/locals.tf b/terraform-unity/modules/terraform-unity-sps-initiators/locals.tf new file mode 100644 index 00000000..e61981e2 --- /dev/null +++ b/terraform-unity/modules/terraform-unity-sps-initiators/locals.tf @@ -0,0 +1,16 @@ + +locals { + resource_name_prefix = join("-", compact([var.project, var.venue, var.service_area, "%s"])) + common_tags = { + Name = "" + Venue = var.venue + Proj = var.project + ServiceArea = var.service_area + CapVersion = var.release + Component = "" + CreatedBy = var.service_area + Env = var.venue + mission = var.project + Stack = "" + } +} diff --git a/terraform-unity/modules/terraform-unity-sps-initiators/main.tf b/terraform-unity/modules/terraform-unity-sps-initiators/main.tf new file mode 100644 index 00000000..76756982 --- /dev/null +++ b/terraform-unity/modules/terraform-unity-sps-initiators/main.tf @@ -0,0 +1,65 @@ +resource "aws_s3_bucket" "inbound_staging_location" { + bucket = format(local.resource_name_prefix, "isl") + force_destroy = true + tags = merge(local.common_tags, { + Name = format(local.resource_name_prefix, "S3-ISL") + Component = "S3" + Stack = "S3" + }) +} + +resource "aws_s3_bucket" "code" { + bucket = format(local.resource_name_prefix, "code") + force_destroy = true + tags = merge(local.common_tags, { + Name = format(local.resource_name_prefix, "S3-code") + Component = "S3" + Stack = "S3" + }) +} + +resource "aws_s3_bucket" "config" { + bucket = format(local.resource_name_prefix, "config") + force_destroy = true + tags = merge(local.common_tags, { + Name = format(local.resource_name_prefix, "S3-config") + Component = "S3" + Stack = "S3" + }) +} + +resource "aws_s3_object" "router_config" { + bucket = aws_s3_bucket.config.id + key = "routers/srl_router.yaml" + content = templatefile("${path.module}/../../../unity-initiator/routers/srl_router.tmpl.yaml", { + airflow_base_api_endpoint = data.aws_ssm_parameter.airflow_api_url.value + airflow_username = var.airflow_webserver_username + airflow_password = var.airflow_webserver_password + ogc_processes_base_api_endpoint = data.aws_ssm_parameter.ogc_processes_api_url.value + }) + tags = merge(local.common_tags, { + Name = format(local.resource_name_prefix, "S3-router") + Component = "S3" + Stack = "S3" + }) +} + +module "unity_initiator" { + source = "git@github.com:unity-sds/unity-initiator.git//terraform-unity/initiator?ref=unity-sps-2.2.0" + code_bucket = aws_s3_bucket.code.id + project = var.project + router_config = "s3://${aws_s3_bucket.config.id}/${aws_s3_object.router_config.key}" + venue = var.venue +} + +resource "aws_s3_object" "isl_stacam_rawdp_folder" { + bucket = aws_s3_bucket.inbound_staging_location.id + key = "STACAM/RawDP/" +} + +module "s3_bucket_notification" { + source = "git@github.com:unity-sds/unity-initiator.git//terraform-unity/triggers/s3-bucket-notification?ref=unity-sps-2.2.0" + initiator_topic_arn = module.unity_initiator.initiator_topic_arn + isl_bucket = aws_s3_bucket.inbound_staging_location.id + isl_bucket_prefix = "STACAM/RawDP/" +} diff --git a/terraform-unity/modules/terraform-unity-sps-initiators/outputs.tf b/terraform-unity/modules/terraform-unity-sps-initiators/outputs.tf new file mode 100644 index 00000000..e69de29b diff --git a/terraform-unity/modules/terraform-unity-sps-initiators/variables.tf b/terraform-unity/modules/terraform-unity-sps-initiators/variables.tf new file mode 100644 index 00000000..f9701f9d --- /dev/null +++ b/terraform-unity/modules/terraform-unity-sps-initiators/variables.tf @@ -0,0 +1,39 @@ +variable "project" { + description = "The project or mission deploying Unity SPS" + type = string +} + +variable "venue" { + description = "The MCP venue in which the cluster will be deployed (dev, test, prod)" + type = string +} + +variable "service_area" { + description = "The service area owner of the resources being deployed" + type = string +} + +variable "release" { + description = "The software release version." + type = string +} + +variable "airflow_api_url_ssm_param" { + description = "The SSM parameter name for the Airflow API URL" + type = string +} + +variable "airflow_webserver_username" { + description = "The username for the Airflow webserver and UI." + type = string +} + +variable "airflow_webserver_password" { + description = "The password for the Airflow webserver and UI." + type = string +} + +variable "ogc_processes_api_url_ssm_param" { + description = "The SSM parameter name for the OGC Processes API URL" + type = string +} diff --git a/terraform-unity/modules/terraform-unity-sps-initiators/versions.tf b/terraform-unity/modules/terraform-unity-sps-initiators/versions.tf new file mode 100644 index 00000000..cd96074b --- /dev/null +++ b/terraform-unity/modules/terraform-unity-sps-initiators/versions.tf @@ -0,0 +1,9 @@ +terraform { + required_version = "~> 1.8.2" + required_providers { + aws = { + source = "hashicorp/aws" + version = "5.50.0" + } + } +} diff --git a/terraform-unity/modules/terraform-unity-sps-karpenter-node-config/.terraform.lock.hcl b/terraform-unity/modules/terraform-unity-sps-karpenter-node-config/.terraform.lock.hcl new file mode 100644 index 00000000..e7401c67 --- /dev/null +++ b/terraform-unity/modules/terraform-unity-sps-karpenter-node-config/.terraform.lock.hcl @@ -0,0 +1,64 @@ +# This file is maintained automatically by "terraform init". +# Manual edits may be lost in future updates. + +provider "registry.terraform.io/hashicorp/aws" { + version = "5.62.0" + hashes = [ + "h1:X3LAZdkVhb/77gTlhPwKYCA9oblBCSu866fZDDOojPY=", + "zh:1f366cbcda72fb123015439a42ab19f96e10ce4edb404273f4e1b7e06da20b73", + "zh:25f098454a34b483279e0382b24b4f42e51c067222c6e797eda5d3ec33b9beb1", + "zh:4b59d48b527e3cefd73f196853bfc265b3e1e57b55c1c8a2d12ff6e3534b4f07", + "zh:7bb88c1ca95e2b3f0f1fe8636925133b9813fc5b137cc467ba6a233ddf4b360e", + "zh:8a93dece40e816c92647e762839d0370e9cad2aa21dc4ca95baee9385f116459", + "zh:8dfe82c55ab8f633c1e2a39c687e9ca8c892d1c2005bf5166ac396ce868ecd05", + "zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425", + "zh:a754952d69b4860480d5207390e3ab42350c964dbca9a5ac0c6912dd24b4c11d", + "zh:b2a4dbf4abee0e9ec18c5d323b99defdcd3c681f8c4306fb6e02cff7de038f85", + "zh:b57d84be258b571c04271015f03858ab215768b82e47c11ecd86e789d577030a", + "zh:be811b03289407c8d59e6b199bf16e6071165565ffe502148172d0886cf849c4", + "zh:d4144c7366c840eff1ac15ba13d96063f798f0983d24053a832362033624fe6f", + "zh:d88612856d453c4e10c49c76e4ef522b7d068b4f7c3e2e0b03dd74540986eecd", + "zh:e8bd231a5d0786cc4aab8471bb6dabd5a5df1c598afda077a9f27987ada57b67", + "zh:ffb40a66b4d000a8ee4c54227eeb998f887ad867419c3af7d3981587788de074", + ] +} + +provider "registry.terraform.io/hashicorp/kubernetes" { + version = "2.29.0" + constraints = "2.29.0" + hashes = [ + "h1:7C1MinWhowW8EnlSYhhAFV3bte8x5YcSF5QxUPdoXDk=", + "zh:3edd5dc319b95fe94e61b82d10c1ce7fb53a2f21b067ddb742f2d7d0d19dd113", + "zh:4b9096e6d0cfa0efd4c89270e3d25fea49db570e2cfbe49c5d1de085a15f2578", + "zh:5397573838bcb8844248c8d6ac93cca7f39a0b707ac3ce7a7b306c50c261c195", + "zh:5d635370720d356b7bcb5756ca28de3275ca32ca1ef0201414caecd3a14759ac", + "zh:71a52280408f3fb0ff1866a9ab8059b0d9bde5481869658798e0773461f22eff", + "zh:748663ef0248d2d95f5dea2974332432a395165657856878c5dc6f000b37cc25", + "zh:7fbc1e084bbbb51e31afd3df0c77e833ae59e88cf42b9e2c17b0b1a1e3894723", + "zh:ae89b4be473b446270fa24dc1ef51b0cc4c2a528d9838ec15246d28bac165df3", + "zh:b6433970d680a0cc9898f915224508b5ece86ae4418372fa6bebd2a9d344f226", + "zh:bf871955cf49015e6a0433e814a22a109c1537a775b8b5dc7b37ad05c324904a", + "zh:c16fac91b2197b443a191d98cf37424feed550387ab11bd1427bde819722005e", + "zh:f569b65999264a9416862bca5cd2a6177d94ccb0424f3a4ef424428912b9cb3c", + ] +} + +provider "registry.terraform.io/hashicorp/null" { + version = "3.2.2" + constraints = "3.2.2" + hashes = [ + "h1:IMVAUHKoydFrlPrl9OzasDnw/8ntZFerCC9iXw1rXQY=", + "zh:3248aae6a2198f3ec8394218d05bd5e42be59f43a3a7c0b71c66ec0df08b69e7", + "zh:32b1aaa1c3013d33c245493f4a65465eab9436b454d250102729321a44c8ab9a", + "zh:38eff7e470acb48f66380a73a5c7cdd76cc9b9c9ba9a7249c7991488abe22fe3", + "zh:4c2f1faee67af104f5f9e711c4574ff4d298afaa8a420680b0cb55d7bbc65606", + "zh:544b33b757c0b954dbb87db83a5ad921edd61f02f1dc86c6186a5ea86465b546", + "zh:696cf785090e1e8cf1587499516b0494f47413b43cb99877ad97f5d0de3dc539", + "zh:6e301f34757b5d265ae44467d95306d61bef5e41930be1365f5a8dcf80f59452", + "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", + "zh:913a929070c819e59e94bb37a2a253c228f83921136ff4a7aa1a178c7cce5422", + "zh:aa9015926cd152425dbf86d1abdbc74bfe0e1ba3d26b3db35051d7b9ca9f72ae", + "zh:bb04798b016e1e1d49bcc76d62c53b56c88c63d6f2dfe38821afef17c416a0e1", + "zh:c23084e1b23577de22603cff752e59128d83cfecc2e6819edadd8cf7a10af11e", + ] +} diff --git a/terraform-unity/modules/terraform-unity-sps-karpenter-node-config/README.md b/terraform-unity/modules/terraform-unity-sps-karpenter-node-config/README.md new file mode 100644 index 00000000..9e382faa --- /dev/null +++ b/terraform-unity/modules/terraform-unity-sps-karpenter-node-config/README.md @@ -0,0 +1,55 @@ + +## Requirements + +| Name | Version | +|------|---------| +| [terraform](#requirement\_terraform) | ~> 1.8.2 | +| [aws](#requirement\_aws) | 5.50.0 | +| [kubernetes](#requirement\_kubernetes) | 2.29.0 | +| [null](#requirement\_null) | 3.2.2 | + +## Providers + +| Name | Version | +|------|---------| +| [aws](#provider\_aws) | 5.62.0 | +| [kubernetes](#provider\_kubernetes) | 2.29.0 | +| [null](#provider\_null) | 3.2.2 | + +## Modules + +No modules. + +## Resources + +| Name | Type | +|------|------| +| [kubernetes_manifest.karpenter_node_classes](https://registry.terraform.io/providers/hashicorp/kubernetes/2.29.0/docs/resources/manifest) | resource | +| [kubernetes_manifest.karpenter_node_pools](https://registry.terraform.io/providers/hashicorp/kubernetes/2.29.0/docs/resources/manifest) | resource | +| [null_resource.remove_node_class_finalizers](https://registry.terraform.io/providers/hashicorp/null/3.2.2/docs/resources/resource) | resource | +| [aws_ami.al2_eks_optimized](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/data-sources/ami) | data source | +| [aws_eks_cluster.cluster](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/data-sources/eks_cluster) | data source | +| [aws_iam_role.cluster_iam_role](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/data-sources/iam_role) | data source | +| [aws_ssm_parameter.al2_eks_optimized_ami](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/data-sources/ssm_parameter) | data source | +| [aws_ssm_parameter.subnet_ids](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/data-sources/ssm_parameter) | data source | + +## Inputs + +| Name | Description | Type | Default | Required | +|------|-------------|------|---------|:--------:| +| [karpenter\_node\_classes](#input\_karpenter\_node\_classes) | n/a |
map(object({
volume_size = string
}))
| n/a | yes | +| [karpenter\_node\_pools](#input\_karpenter\_node\_pools) | Configuration for Karpenter node pools |
map(object({
requirements : list(object({
key : string
operator : string
values : list(string)
}))
nodeClassRef : string
limits : object({
cpu : string
memory : string
})
disruption : object({
consolidationPolicy : string
consolidateAfter : string
})
}))
| n/a | yes | +| [kubeconfig\_filepath](#input\_kubeconfig\_filepath) | The path to the kubeconfig file for the Kubernetes cluster. | `string` | n/a | yes | +| [mcp\_ami\_owner\_id](#input\_mcp\_ami\_owner\_id) | The ID of the MCP AMIs | `string` | n/a | yes | +| [project](#input\_project) | The project or mission deploying Unity SPS | `string` | n/a | yes | +| [release](#input\_release) | The software release version. | `string` | n/a | yes | +| [service\_area](#input\_service\_area) | The service area owner of the resources being deployed | `string` | n/a | yes | +| [venue](#input\_venue) | The MCP venue in which the cluster will be deployed (dev, test, prod) | `string` | n/a | yes | + +## Outputs + +| Name | Description | +|------|-------------| +| [karpenter\_node\_class\_names](#output\_karpenter\_node\_class\_names) | Names of the Karpenter node classes | +| [karpenter\_node\_pools](#output\_karpenter\_node\_pools) | Names of the Karpenter node pools | + diff --git a/terraform-unity/modules/terraform-unity-sps-karpenter-node-config/data.tf b/terraform-unity/modules/terraform-unity-sps-karpenter-node-config/data.tf new file mode 100644 index 00000000..fa1a7c35 --- /dev/null +++ b/terraform-unity/modules/terraform-unity-sps-karpenter-node-config/data.tf @@ -0,0 +1,23 @@ +data "aws_eks_cluster" "cluster" { + name = format(local.resource_name_prefix, "eks") +} + +data "aws_iam_role" "cluster_iam_role" { + name = "${format(local.resource_name_prefix, "eks")}-eks-node-role" +} + +data "aws_ssm_parameter" "subnet_ids" { + name = "/unity/account/network/subnet_list" +} + +data "aws_ssm_parameter" "al2_eks_optimized_ami" { + name = "/mcp/amis/aml2-eks-${replace(data.aws_eks_cluster.cluster.version, ".", "-")}" +} + +data "aws_ami" "al2_eks_optimized" { + filter { + name = "image-id" + values = [data.aws_ssm_parameter.al2_eks_optimized_ami.value] + } + owners = [var.mcp_ami_owner_id] +} diff --git a/terraform-unity/modules/terraform-unity-sps-karpenter-node-config/locals.tf b/terraform-unity/modules/terraform-unity-sps-karpenter-node-config/locals.tf new file mode 100644 index 00000000..e61981e2 --- /dev/null +++ b/terraform-unity/modules/terraform-unity-sps-karpenter-node-config/locals.tf @@ -0,0 +1,16 @@ + +locals { + resource_name_prefix = join("-", compact([var.project, var.venue, var.service_area, "%s"])) + common_tags = { + Name = "" + Venue = var.venue + Proj = var.project + ServiceArea = var.service_area + CapVersion = var.release + Component = "" + CreatedBy = var.service_area + Env = var.venue + mission = var.project + Stack = "" + } +} diff --git a/terraform-unity/modules/terraform-unity-sps-karpenter-node-config/main.tf b/terraform-unity/modules/terraform-unity-sps-karpenter-node-config/main.tf new file mode 100644 index 00000000..52e7f717 --- /dev/null +++ b/terraform-unity/modules/terraform-unity-sps-karpenter-node-config/main.tf @@ -0,0 +1,113 @@ +resource "kubernetes_manifest" "karpenter_node_classes" { + for_each = var.karpenter_node_classes + + manifest = { + apiVersion = "karpenter.k8s.aws/v1beta1" + kind = "EC2NodeClass" + metadata = { + name = each.key + } + spec = { + amiFamily = "AL2" + amiSelectorTerms = [{ + id = data.aws_ami.al2_eks_optimized.image_id + }] + userData = <<-EOT + #!/bin/bash + echo "Starting pre-bootstrap configurations..." + # Custom script to enable IP forwarding + sudo sed -i 's/^net.ipv4.ip_forward = 0/net.ipv4.ip_forward = 1/' /etc/sysctl.conf && sudo sysctl -p |true + echo "Pre-bootstrap configurations applied." + EOT + role = data.aws_iam_role.cluster_iam_role.name + subnetSelectorTerms = [for subnet_id in jsondecode(data.aws_ssm_parameter.subnet_ids.value)["private"] : { + id = subnet_id + }] + securityGroupSelectorTerms = [{ + tags = { + "kubernetes.io/cluster/${data.aws_eks_cluster.cluster.name}" = "owned" + "Name" = "${data.aws_eks_cluster.cluster.name}-node" + } + }] + blockDeviceMappings = [for bd in tolist(data.aws_ami.al2_eks_optimized.block_device_mappings) : { + deviceName = bd.device_name + ebs = { + volumeSize = each.value.volume_size + volumeType = bd.ebs.volume_type + encrypted = bd.ebs.encrypted + deleteOnTermination = bd.ebs.delete_on_termination + } + }] + metadataOptions = { + httpEndpoint = "enabled" + httpPutResponseHopLimit = 3 + } + tags = merge(local.common_tags, { + "karpenter.sh/discovery" = data.aws_eks_cluster.cluster.name + Name = format(local.resource_name_prefix, "karpenter") + Component = "karpenter" + Stack = "karpenter" + }) + } + } +} + +resource "null_resource" "remove_node_class_finalizers" { + # https://github.com/aws/karpenter-provider-aws/issues/5079 + for_each = var.karpenter_node_classes + + provisioner "local-exec" { + when = destroy + command = < +## Requirements + +| Name | Version | +|------|---------| +| [terraform](#requirement\_terraform) | ~> 1.8.2 | +| [aws](#requirement\_aws) | 5.47.0 | +| [helm](#requirement\_helm) | 2.13.1 | +| [kubernetes](#requirement\_kubernetes) | 2.29.0 | + +## Providers + +| Name | Version | +|------|---------| +| [aws](#provider\_aws) | 5.47.0 | +| [helm](#provider\_helm) | 2.13.1 | + +## Modules + +| Name | Source | Version | +|------|--------|---------| +| [karpenter](#module\_karpenter) | terraform-aws-modules/eks/aws//modules/karpenter | 20.8.5 | + +## Resources + +| Name | Type | +|------|------| +| [helm_release.karpenter](https://registry.terraform.io/providers/hashicorp/helm/2.13.1/docs/resources/release) | resource | +| [aws_caller_identity.current](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/data-sources/caller_identity) | data source | +| [aws_eks_cluster.cluster](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/data-sources/eks_cluster) | data source | +| [aws_eks_cluster_auth.cluster](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/data-sources/eks_cluster_auth) | data source | +| [aws_iam_role.cluster_iam_role](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/data-sources/iam_role) | data source | + +## Inputs + +| Name | Description | Type | Default | Required | +|------|-------------|------|---------|:--------:| +| [helm\_charts](#input\_helm\_charts) | Helm charts for the associated services. |
map(object({
repository = string
chart = string
version = string
}))
|
{
"karpenter": {
"chart": "karpenter",
"repository": "oci://public.ecr.aws/karpenter",
"version": "0.36.1"
}
}
| no | +| [project](#input\_project) | The project or mission deploying Unity SPS | `string` | `"unity"` | no | +| [release](#input\_release) | The software release version. | `string` | `"24.3"` | no | +| [service\_area](#input\_service\_area) | The service area owner of the resources being deployed | `string` | `"sps"` | no | +| [venue](#input\_venue) | The MCP venue in which the cluster will be deployed (dev, test, prod) | `string` | n/a | yes | + +## Outputs + +No outputs. + diff --git a/terraform-unity/modules/terraform-unity-sps-karpenter/locals.tf b/terraform-unity/modules/terraform-unity-sps-karpenter/locals.tf index f4272efd..3114b16a 100644 --- a/terraform-unity/modules/terraform-unity-sps-karpenter/locals.tf +++ b/terraform-unity/modules/terraform-unity-sps-karpenter/locals.tf @@ -1,7 +1,6 @@ locals { - counter = var.counter != "" ? var.counter : random_id.counter.hex - resource_name_prefix = join("-", compact([var.project, var.venue, var.service_area, "%s", var.deployment_name, local.counter])) + resource_name_prefix = join("-", compact([var.project, var.venue, var.service_area, "%s"])) cluster_name = format(local.resource_name_prefix, "eks") common_tags = { Name = "" diff --git a/terraform-unity/modules/terraform-unity-sps-karpenter/main.tf b/terraform-unity/modules/terraform-unity-sps-karpenter/main.tf index 90a57d26..80562081 100644 --- a/terraform-unity/modules/terraform-unity-sps-karpenter/main.tf +++ b/terraform-unity/modules/terraform-unity-sps-karpenter/main.tf @@ -1,19 +1,13 @@ -# S3 backend terraform { backend "s3" { - # full path to Terraform state file: - # s3:/// - bucket = "" - key = "" - region = "us-west-2" - encrypt = true + bucket = "unity-unity-dev-bucket" + workspace_key_prefix = "sps/tfstates" + key = "terraform.tfstate" + region = "us-west-2" + encrypt = true } } -resource "random_id" "counter" { - byte_length = 2 -} - module "karpenter" { source = "terraform-aws-modules/eks/aws//modules/karpenter" version = "20.8.5" diff --git a/terraform-unity/modules/terraform-unity-sps-karpenter/variables.tf b/terraform-unity/modules/terraform-unity-sps-karpenter/variables.tf index e8570c37..97efa0c4 100644 --- a/terraform-unity/modules/terraform-unity-sps-karpenter/variables.tf +++ b/terraform-unity/modules/terraform-unity-sps-karpenter/variables.tf @@ -15,20 +15,10 @@ variable "service_area" { default = "sps" } -variable "deployment_name" { - description = "The name of the deployment." - type = string -} - -variable "counter" { - description = "Identifier used to uniquely distinguish resources. This is used in the naming convention of the resource. If left empty, a random hexadecimal value will be generated and used instead." - type = string -} - variable "release" { description = "The software release version." type = string - default = "24.2" + default = "24.3" } variable "helm_charts" { diff --git a/terraform-unity/modules/terraform-unity-sps-karpenter/versions.tf b/terraform-unity/modules/terraform-unity-sps-karpenter/versions.tf index 13a5467f..9b62f56f 100644 --- a/terraform-unity/modules/terraform-unity-sps-karpenter/versions.tf +++ b/terraform-unity/modules/terraform-unity-sps-karpenter/versions.tf @@ -13,9 +13,5 @@ terraform { source = "hashicorp/aws" version = "5.47.0" } - random = { - source = "hashicorp/random" - version = "3.6.1" - } } } diff --git a/terraform-unity/modules/terraform-unity-sps-ogc-processes-api/.terraform.lock.hcl b/terraform-unity/modules/terraform-unity-sps-ogc-processes-api/.terraform.lock.hcl new file mode 100644 index 00000000..c5309037 --- /dev/null +++ b/terraform-unity/modules/terraform-unity-sps-ogc-processes-api/.terraform.lock.hcl @@ -0,0 +1,45 @@ +# This file is maintained automatically by "terraform init". +# Manual edits may be lost in future updates. + +provider "registry.terraform.io/hashicorp/aws" { + version = "5.50.0" + constraints = "5.50.0" + hashes = [ + "h1:LevuTzPS4S7t+Vh6Kpz77pBNDAwChaos91/6+CVnD4w=", + "zh:19be42f5a545d6712dee4bdb704b018d23bacf5d902ac3cb061eb1750dfe6a20", + "zh:1d880bdba95ce96efde37e5bcf457a57df2c1effa9b47bc67fa29c1a264ae53b", + "zh:1e9c78e324d7492be5e7744436ed71d66fe4eca3fb6af07a28efd0d1e3bf7640", + "zh:27ac672aa61b3795931561fdbe4a306ad1132af517d7711c14569429b2cc694f", + "zh:3b978423dead02f9a98d25de118adf264a2331acdc4550ea93bed01feabc12e7", + "zh:490d7eb4b922ba1b57e0ab8dec1a08df6517485febcab1e091fd6011281c3472", + "zh:64e7c84e18dac1af5778d6f516e01a46f9c91d710867c39fbc7efa3cd972dc62", + "zh:73867ac2956dcdd377121b3aa8fe2e1085e77fae9b61d018f56a863277ea4b6e", + "zh:7ed899d0d5c49f009b445d7816e4bf702d9c48205c24cf884cd2ae0247160455", + "zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425", + "zh:9b93784b3fb13d08cf95a4131c49b56bf7e1cd35daad6156b3658a89ce6fb58f", + "zh:b29d77eb75de474e46eb47e539c48916628d85599bcf14e5cc500b14a4578e75", + "zh:bbd9cec8ca705452e4a3d21d56474eacb8cc7b1b74b7f310fdea4bdcffebab32", + "zh:c352eb3169efa0e27a29b99a2630e8298710a084453c519caa39e5972ff6d1fc", + "zh:e32f4744b43be1708b309a734e0ac10b5c0f9f92e5849298cf1a90f2b906f6f3", + ] +} + +provider "registry.terraform.io/hashicorp/kubernetes" { + version = "2.29.0" + constraints = "2.29.0" + hashes = [ + "h1:7C1MinWhowW8EnlSYhhAFV3bte8x5YcSF5QxUPdoXDk=", + "zh:3edd5dc319b95fe94e61b82d10c1ce7fb53a2f21b067ddb742f2d7d0d19dd113", + "zh:4b9096e6d0cfa0efd4c89270e3d25fea49db570e2cfbe49c5d1de085a15f2578", + "zh:5397573838bcb8844248c8d6ac93cca7f39a0b707ac3ce7a7b306c50c261c195", + "zh:5d635370720d356b7bcb5756ca28de3275ca32ca1ef0201414caecd3a14759ac", + "zh:71a52280408f3fb0ff1866a9ab8059b0d9bde5481869658798e0773461f22eff", + "zh:748663ef0248d2d95f5dea2974332432a395165657856878c5dc6f000b37cc25", + "zh:7fbc1e084bbbb51e31afd3df0c77e833ae59e88cf42b9e2c17b0b1a1e3894723", + "zh:ae89b4be473b446270fa24dc1ef51b0cc4c2a528d9838ec15246d28bac165df3", + "zh:b6433970d680a0cc9898f915224508b5ece86ae4418372fa6bebd2a9d344f226", + "zh:bf871955cf49015e6a0433e814a22a109c1537a775b8b5dc7b37ad05c324904a", + "zh:c16fac91b2197b443a191d98cf37424feed550387ab11bd1427bde819722005e", + "zh:f569b65999264a9416862bca5cd2a6177d94ccb0424f3a4ef424428912b9cb3c", + ] +} diff --git a/terraform-unity/modules/terraform-unity-sps-ogc-processes-api/README.md b/terraform-unity/modules/terraform-unity-sps-ogc-processes-api/README.md new file mode 100644 index 00000000..05e6521c --- /dev/null +++ b/terraform-unity/modules/terraform-unity-sps-ogc-processes-api/README.md @@ -0,0 +1,72 @@ + +## Requirements + +| Name | Version | +|------|---------| +| [terraform](#requirement\_terraform) | ~> 1.8.2 | +| [aws](#requirement\_aws) | 5.50.0 | +| [kubernetes](#requirement\_kubernetes) | 2.29.0 | + +## Providers + +| Name | Version | +|------|---------| +| [aws](#provider\_aws) | 5.50.0 | +| [kubernetes](#provider\_kubernetes) | 2.29.0 | + +## Modules + +No modules. + +## Resources + +| Name | Type | +|------|------| +| [aws_lambda_invocation.unity_proxy_lambda_invocation](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/resources/lambda_invocation) | resource | +| [aws_security_group.ogc_ingress_sg](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/resources/security_group) | resource | +| [aws_ssm_parameter.ogc_processes_api_health_check_endpoint](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/resources/ssm_parameter) | resource | +| [aws_ssm_parameter.ogc_processes_api_url](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/resources/ssm_parameter) | resource | +| [aws_ssm_parameter.ogc_processes_ui_url](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/resources/ssm_parameter) | resource | +| [aws_ssm_parameter.unity_proxy_ogc_api](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/resources/ssm_parameter) | resource | +| [aws_vpc_security_group_ingress_rule.ogc_ingress_sg_jpl_rule](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/resources/vpc_security_group_ingress_rule) | resource | +| [aws_vpc_security_group_ingress_rule.ogc_ingress_sg_proxy_rule](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/resources/vpc_security_group_ingress_rule) | resource | +| [kubernetes_deployment.ogc_processes_api](https://registry.terraform.io/providers/hashicorp/kubernetes/2.29.0/docs/resources/deployment) | resource | +| [kubernetes_deployment.redis](https://registry.terraform.io/providers/hashicorp/kubernetes/2.29.0/docs/resources/deployment) | resource | +| [kubernetes_ingress_v1.ogc_processes_api_ingress](https://registry.terraform.io/providers/hashicorp/kubernetes/2.29.0/docs/resources/ingress_v1) | resource | +| [kubernetes_service.ogc_processes_api](https://registry.terraform.io/providers/hashicorp/kubernetes/2.29.0/docs/resources/service) | resource | +| [kubernetes_service.redis](https://registry.terraform.io/providers/hashicorp/kubernetes/2.29.0/docs/resources/service) | resource | +| [aws_db_instance.db](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/data-sources/db_instance) | data source | +| [aws_eks_cluster.cluster](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/data-sources/eks_cluster) | data source | +| [aws_lambda_functions.lambda_check_all](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/data-sources/lambda_functions) | data source | +| [aws_secretsmanager_secret_version.db](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/data-sources/secretsmanager_secret_version) | data source | +| [aws_security_groups.venue_proxy_sg](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/data-sources/security_groups) | data source | +| [aws_ssm_parameter.subnet_ids](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/data-sources/ssm_parameter) | data source | +| [aws_vpc.cluster_vpc](https://registry.terraform.io/providers/hashicorp/aws/5.50.0/docs/data-sources/vpc) | data source | +| [kubernetes_ingress_v1.ogc_processes_api_ingress](https://registry.terraform.io/providers/hashicorp/kubernetes/2.29.0/docs/data-sources/ingress_v1) | data source | +| [kubernetes_namespace.service_area](https://registry.terraform.io/providers/hashicorp/kubernetes/2.29.0/docs/data-sources/namespace) | data source | +| [kubernetes_persistent_volume_claim.airflow_deployed_dags](https://registry.terraform.io/providers/hashicorp/kubernetes/2.29.0/docs/data-sources/persistent_volume_claim) | data source | + +## Inputs + +| Name | Description | Type | Default | Required | +|------|-------------|------|---------|:--------:| +| [airflow\_deployed\_dags\_pvc](#input\_airflow\_deployed\_dags\_pvc) | The name of the PVC for Airflow deployed DAGs | `string` | n/a | yes | +| [airflow\_webserver\_password](#input\_airflow\_webserver\_password) | The password for the Airflow webserver and UI. | `string` | n/a | yes | +| [airflow\_webserver\_username](#input\_airflow\_webserver\_username) | The username for the Airflow webserver and UI. | `string` | n/a | yes | +| [dag\_catalog\_repo](#input\_dag\_catalog\_repo) | Git repository that stores the catalog of Airflow DAGs. |
object({
url = string
ref = string
dags_directory_path = string
})
| n/a | yes | +| [db\_instance\_identifier](#input\_db\_instance\_identifier) | The AWS DB instance identifier | `string` | n/a | yes | +| [db\_secret\_arn](#input\_db\_secret\_arn) | The version of the database secret in AWS Secrets Manager | `string` | n/a | yes | +| [docker\_images](#input\_docker\_images) | Docker images for the associated services. |
object({
ogc_processes_api = object({
name = string
tag = string
})
git_sync = object({
name = string
tag = string
})
redis = object({
name = string
tag = string
})
})
| n/a | yes | +| [karpenter\_node\_pools](#input\_karpenter\_node\_pools) | Names of the Karpenter node pools | `list(string)` | n/a | yes | +| [kubernetes\_namespace](#input\_kubernetes\_namespace) | The kubernetes namespace for the API's resources. | `string` | n/a | yes | +| [project](#input\_project) | The project or mission deploying Unity SPS | `string` | n/a | yes | +| [release](#input\_release) | The software release version. | `string` | n/a | yes | +| [service\_area](#input\_service\_area) | The service area owner of the resources being deployed | `string` | n/a | yes | +| [venue](#input\_venue) | The MCP venue in which the cluster will be deployed (dev, test, prod) | `string` | n/a | yes | + +## Outputs + +| Name | Description | +|------|-------------| +| [ogc\_processes\_urls](#output\_ogc\_processes\_urls) | SSM parameter IDs and URLs for the various OGC Processes endpoints. | + diff --git a/terraform-unity/modules/terraform-unity-sps-ogc-processes-api/data.tf b/terraform-unity/modules/terraform-unity-sps-ogc-processes-api/data.tf new file mode 100644 index 00000000..0a9922b8 --- /dev/null +++ b/terraform-unity/modules/terraform-unity-sps-ogc-processes-api/data.tf @@ -0,0 +1,38 @@ +data "kubernetes_namespace" "service_area" { + metadata { + name = var.kubernetes_namespace + } +} + +data "aws_eks_cluster" "cluster" { + name = format(local.resource_name_prefix, "eks") +} + +data "aws_vpc" "cluster_vpc" { + id = data.aws_eks_cluster.cluster.vpc_config[0].vpc_id +} + +data "aws_ssm_parameter" "subnet_ids" { + name = "/unity/account/network/subnet_list" +} + +data "aws_db_instance" "db" { + db_instance_identifier = var.db_instance_identifier +} + +data "aws_secretsmanager_secret_version" "db" { + secret_id = var.db_secret_arn +} + +data "kubernetes_persistent_volume_claim" "airflow_deployed_dags" { + metadata { + name = var.airflow_deployed_dags_pvc + } +} + +data "kubernetes_ingress_v1" "ogc_processes_api_ingress" { + metadata { + name = kubernetes_ingress_v1.ogc_processes_api_ingress.metadata[0].name + namespace = data.kubernetes_namespace.service_area.metadata[0].name + } +} diff --git a/terraform-unity/modules/terraform-unity-sps-ogc-processes-api/locals.tf b/terraform-unity/modules/terraform-unity-sps-ogc-processes-api/locals.tf new file mode 100644 index 00000000..42a50002 --- /dev/null +++ b/terraform-unity/modules/terraform-unity-sps-ogc-processes-api/locals.tf @@ -0,0 +1,17 @@ + +locals { + resource_name_prefix = join("-", compact([var.project, var.venue, var.service_area, "%s"])) + common_tags = { + Name = "" + Venue = var.venue + Proj = var.project + ServiceArea = var.service_area + CapVersion = var.release + Component = "" + CreatedBy = var.service_area + Env = var.venue + mission = var.project + Stack = "" + } + load_balancer_port = 5001 +} diff --git a/terraform-unity/modules/terraform-unity-sps-ogc-processes-api/main.tf b/terraform-unity/modules/terraform-unity-sps-ogc-processes-api/main.tf new file mode 100644 index 00000000..48adbd4e --- /dev/null +++ b/terraform-unity/modules/terraform-unity-sps-ogc-processes-api/main.tf @@ -0,0 +1,367 @@ +resource "kubernetes_deployment" "redis" { + metadata { + name = "ogc-processes-api-redis-lock" + namespace = data.kubernetes_namespace.service_area.metadata[0].name + } + spec { + replicas = 1 + selector { + match_labels = { + app = "redis" + } + } + template { + metadata { + labels = { + app = "redis" + } + } + spec { + container { + name = "redis" + image = "${var.docker_images.redis.name}:${var.docker_images.redis.tag}" + port { + container_port = 6379 + } + } + } + } + } +} + +resource "kubernetes_service" "redis" { + metadata { + name = "ogc-processes-api-redis-lock" + namespace = data.kubernetes_namespace.service_area.metadata[0].name + } + spec { + selector = { + app = "redis" + } + port { + name = "redis" + port = 6379 + target_port = 6379 + } + type = "ClusterIP" + } +} + +resource "kubernetes_deployment" "ogc_processes_api" { + metadata { + name = "ogc-processes-api" + namespace = data.kubernetes_namespace.service_area.metadata[0].name + } + spec { + replicas = 2 + selector { + match_labels = { + app = "ogc-processes-api" + } + } + template { + metadata { + labels = { + app = "ogc-processes-api" + } + } + spec { + affinity { + node_affinity { + required_during_scheduling_ignored_during_execution { + node_selector_term { + match_expressions { + key = "karpenter.sh/nodepool" + operator = "In" + values = compact([for pool in var.karpenter_node_pools : pool if pool == "airflow-core-components"]) + } + match_expressions { + key = "karpenter.sh/capacity-type" + operator = "In" + values = ["on-demand"] + } + match_expressions { + key = "karpenter.k8s.aws/instance-family" + operator = "In" + values = ["c6i", "c5"] + } + match_expressions { + key = "karpenter.k8s.aws/instance-cpu" + operator = "In" + values = ["2", "4"] + } + } + } + } + } + container { + name = "ogc-processes-api" + image = "${var.docker_images.ogc_processes_api.name}:${var.docker_images.ogc_processes_api.tag}" + port { + container_port = 80 + } + env { + name = "DB_URL" + value = "postgresql://${data.aws_db_instance.db.master_username}:${urlencode(data.aws_secretsmanager_secret_version.db.secret_string)}@${data.aws_db_instance.db.endpoint}/${data.aws_db_instance.db.db_name}" + } + env { + name = "REDIS_HOST" + value = "${kubernetes_service.redis.metadata[0].name}.${data.kubernetes_namespace.service_area.metadata[0].name}.svc.cluster.local" + + } + env { + name = "REDIS_PORT" + value = 6379 + } + env { + name = "EMS_API_URL" + value = "http://airflow-webserver.${data.kubernetes_namespace.service_area.metadata[0].name}.svc.cluster.local:8080/api/v1" + } + env { + name = "EMS_API_AUTH_USERNAME" + value = var.airflow_webserver_username + } + env { + name = "EMS_API_AUTH_PASSWORD" + value = var.airflow_webserver_password + } + env { + name = "DAG_CATALOG_DIRECTORY" + value = "/dag-catalog/current/${var.dag_catalog_repo.dags_directory_path}" + } + env { + name = "DEPLOYED_DAGS_DIRECTORY" + value = "/deployed-dags" + } + volume_mount { + name = "dag-catalog" + mount_path = "/dag-catalog" + } + volume_mount { + name = "deployed-dags" + mount_path = "/deployed-dags" + } + } + container { + name = "git-sync" + image = "${var.docker_images.git_sync.name}:${var.docker_images.git_sync.tag}" + env { + name = "GITSYNC_REPO" + value = var.dag_catalog_repo.url + } + env { + name = "GITSYNC_REF" + value = var.dag_catalog_repo.ref + } + env { + name = "GITSYNC_ROOT" + value = "/dag-catalog" + } + env { + name = "GITSYNC_LINK" + value = "current" + } + env { + name = "GITSYNC_PERIOD" + value = "3s" + } + env { + name = "GITSYNC_ONE_TIME" + value = "false" + } + volume_mount { + name = "dag-catalog" + mount_path = "/dag-catalog" + } + } + volume { + name = "deployed-dags" + persistent_volume_claim { + claim_name = data.kubernetes_persistent_volume_claim.airflow_deployed_dags.metadata[0].name + } + } + volume { + name = "dag-catalog" + empty_dir {} + } + } + } + } +} + +resource "kubernetes_service" "ogc_processes_api" { + metadata { + name = "ogc-processes-api" + namespace = data.kubernetes_namespace.service_area.metadata[0].name + } + spec { + selector = { + app = "ogc-processes-api" + } + port { + port = 80 + target_port = 80 + } + type = "ClusterIP" + } +} + +resource "aws_security_group" "ogc_ingress_sg" { + name = "${var.project}-${var.venue}-ogc-ingress-sg" + description = "SecurityGroup for OGC API LoadBalancer ingress" + vpc_id = data.aws_vpc.cluster_vpc.id + tags = merge(local.common_tags, { + Name = format(local.resource_name_prefix, "OgcLBSg") + Component = "ogc" + Stack = "ogc" + }) +} + +#tfsec:ignore:AVD-AWS-0107 +resource "aws_vpc_security_group_ingress_rule" "ogc_ingress_sg_jpl_rule" { + for_each = toset(["128.149.0.0/16", "137.78.0.0/16", "137.79.0.0/16"]) + security_group_id = aws_security_group.ogc_ingress_sg.id + description = "SecurityGroup ingress rule for JPL-local addresses" + ip_protocol = "tcp" + from_port = local.load_balancer_port + to_port = local.load_balancer_port + cidr_ipv4 = each.key +} + +data "aws_security_groups" "venue_proxy_sg" { + filter { + name = "group-name" + values = ["${var.project}-${var.venue}-ecs_service_sg"] + } + tags = { + Service = "U-CS" + } +} + +resource "aws_vpc_security_group_ingress_rule" "ogc_ingress_sg_proxy_rule" { + count = length(data.aws_security_groups.venue_proxy_sg.ids) > 0 ? 1 : 0 + security_group_id = aws_security_group.ogc_ingress_sg.id + description = "SecurityGroup ingress rule for venue-services proxy" + ip_protocol = "tcp" + from_port = local.load_balancer_port + to_port = local.load_balancer_port + referenced_security_group_id = data.aws_security_groups.venue_proxy_sg.ids[0] +} + +resource "kubernetes_ingress_v1" "ogc_processes_api_ingress" { + metadata { + name = "ogc-processes-api-ingress" + namespace = data.kubernetes_namespace.service_area.metadata[0].name + annotations = { + "alb.ingress.kubernetes.io/scheme" = "internet-facing" + "alb.ingress.kubernetes.io/target-type" = "ip" + "alb.ingress.kubernetes.io/subnets" = join(",", jsondecode(data.aws_ssm_parameter.subnet_ids.value)["public"]) + "alb.ingress.kubernetes.io/listen-ports" = "[{\"HTTP\": ${local.load_balancer_port}}]" + "alb.ingress.kubernetes.io/security-groups" = aws_security_group.ogc_ingress_sg.id + "alb.ingress.kubernetes.io/manage-backend-security-group-rules" = "true" + "alb.ingress.kubernetes.io/healthcheck-path" = "/health" + } + } + spec { + ingress_class_name = "alb" + rule { + http { + path { + path = "/" + path_type = "Prefix" + backend { + service { + name = kubernetes_service.ogc_processes_api.metadata[0].name + port { + number = 80 + } + } + } + } + } + } + } + wait_for_load_balancer = true +} + + +resource "aws_ssm_parameter" "ogc_processes_ui_url" { + name = format("/%s", join("/", compact(["", var.project, var.venue, var.service_area, "processing", "ogc_processes", "ui_url"]))) + description = "The URL of the OGC Proccesses API Docs UI." + type = "String" + value = "http://${data.kubernetes_ingress_v1.ogc_processes_api_ingress.status[0].load_balancer[0].ingress[0].hostname}:5001/redoc" + tags = merge(local.common_tags, { + Name = format(local.resource_name_prefix, "endpoints-ogc_processes_ui") + Component = "SSM" + Stack = "SSM" + }) +} + +resource "aws_ssm_parameter" "ogc_processes_api_url" { + name = format("/%s", join("/", compact(["", var.project, var.venue, var.service_area, "processing", "ogc_processes", "api_url"]))) + description = "The URL of the OGC Processes REST API." + type = "String" + value = "http://${data.kubernetes_ingress_v1.ogc_processes_api_ingress.status[0].load_balancer[0].ingress[0].hostname}:5001" + tags = merge(local.common_tags, { + Name = format(local.resource_name_prefix, "endpoints-ogc_processes_api") + Component = "SSM" + Stack = "SSM" + }) +} + +resource "aws_ssm_parameter" "ogc_processes_api_health_check_endpoint" { + name = format("/%s", join("/", compact(["", "unity", var.project, var.venue, "component", "ogc-api"]))) + description = "The URL of the OGC Processes REST API." + type = "String" + value = jsonencode({ + "componentName" : "OGC API" + "healthCheckUrl" : "http://${data.kubernetes_ingress_v1.ogc_processes_api_ingress.status[0].load_balancer[0].ingress[0].hostname}:5001/health" + "landingPageUrl" : "http://${data.kubernetes_ingress_v1.ogc_processes_api_ingress.status[0].load_balancer[0].ingress[0].hostname}:5001" + }) + tags = merge(local.common_tags, { + Name = format(local.resource_name_prefix, "health-check-endpoints-ogc_processes_api") + Component = "SSM" + Stack = "SSM" + }) + lifecycle { + ignore_changes = [value] + } +} + +resource "aws_ssm_parameter" "unity_proxy_ogc_api" { + name = format("/%s", join("/", compact(["unity", var.project, var.venue, "cs", "management", "proxy", "configurations", "016-sps-ogc-api"]))) + description = "The unity-proxy configuration for the Airflow OGC API." + type = "String" + value = <<-EOT + + + ProxyPassReverse "/" + + + ProxyPassMatch "http://${data.kubernetes_ingress_v1.ogc_processes_api_ingress.status[0].load_balancer[0].ingress[0].hostname}:5001/$1" + ProxyPreserveHost On + FallbackResource /management/index.html + AddOutputFilterByType INFLATE;SUBSTITUTE;DEFLATE text/html + Substitute "s|\"/([^\"]*)|\"/${var.project}/${var.venue}/ogc/$1|q" + + +EOT + tags = merge(local.common_tags, { + Name = format(local.resource_name_prefix, "httpd-proxy-config-ogc") + Component = "SSM" + Stack = "SSM" + }) +} + +data "aws_lambda_functions" "lambda_check_all" {} + +resource "aws_lambda_invocation" "unity_proxy_lambda_invocation" { + count = contains(data.aws_lambda_functions.lambda_check_all.function_names, "unity-${var.venue}-httpdproxymanagement") ? 1 : 0 + function_name = "unity-${var.venue}-httpdproxymanagement" + input = "{}" + triggers = { + redeployment = sha1(jsonencode([ + aws_ssm_parameter.unity_proxy_ogc_api + ])) + } +} diff --git a/terraform-unity/modules/terraform-unity-sps-ogc-processes-api/outputs.tf b/terraform-unity/modules/terraform-unity-sps-ogc-processes-api/outputs.tf new file mode 100644 index 00000000..a8302464 --- /dev/null +++ b/terraform-unity/modules/terraform-unity-sps-ogc-processes-api/outputs.tf @@ -0,0 +1,13 @@ +output "ogc_processes_urls" { + description = "SSM parameter IDs and URLs for the various OGC Processes endpoints." + value = { + "ui" = { + "ssm_param_id" = aws_ssm_parameter.ogc_processes_ui_url.id, + "url" = nonsensitive(aws_ssm_parameter.ogc_processes_ui_url.value) + } + "rest_api" = { + "ssm_param_id" = aws_ssm_parameter.ogc_processes_api_url.id, + "url" = nonsensitive(aws_ssm_parameter.ogc_processes_api_url.value) + } + } +} diff --git a/terraform-unity/modules/terraform-unity-sps-ogc-processes-api/variables.tf b/terraform-unity/modules/terraform-unity-sps-ogc-processes-api/variables.tf new file mode 100644 index 00000000..30fdaf23 --- /dev/null +++ b/terraform-unity/modules/terraform-unity-sps-ogc-processes-api/variables.tf @@ -0,0 +1,81 @@ +variable "project" { + description = "The project or mission deploying Unity SPS" + type = string +} + +variable "venue" { + description = "The MCP venue in which the cluster will be deployed (dev, test, prod)" + type = string +} + +variable "service_area" { + description = "The service area owner of the resources being deployed" + type = string +} + +variable "release" { + description = "The software release version." + type = string +} + +variable "kubernetes_namespace" { + description = "The kubernetes namespace for the API's resources." + type = string +} + +variable "db_instance_identifier" { + description = "The AWS DB instance identifier" + type = string +} + +variable "db_secret_arn" { + description = "The version of the database secret in AWS Secrets Manager" + type = string +} + +variable "airflow_deployed_dags_pvc" { + description = "The name of the PVC for Airflow deployed DAGs" + type = string +} + +variable "airflow_webserver_username" { + description = "The username for the Airflow webserver and UI." + type = string +} + +variable "airflow_webserver_password" { + description = "The password for the Airflow webserver and UI." + type = string +} + +variable "docker_images" { + description = "Docker images for the associated services." + type = object({ + ogc_processes_api = object({ + name = string + tag = string + }) + git_sync = object({ + name = string + tag = string + }) + redis = object({ + name = string + tag = string + }) + }) +} + +variable "dag_catalog_repo" { + description = "Git repository that stores the catalog of Airflow DAGs." + type = object({ + url = string + ref = string + dags_directory_path = string + }) +} + +variable "karpenter_node_pools" { + description = "Names of the Karpenter node pools" + type = list(string) +} diff --git a/terraform-unity/modules/terraform-unity-sps-ogc-processes-api/versions.tf b/terraform-unity/modules/terraform-unity-sps-ogc-processes-api/versions.tf new file mode 100644 index 00000000..0bb2dd1a --- /dev/null +++ b/terraform-unity/modules/terraform-unity-sps-ogc-processes-api/versions.tf @@ -0,0 +1,13 @@ +terraform { + required_version = "~> 1.8.2" + required_providers { + aws = { + source = "hashicorp/aws" + version = "5.50.0" + } + kubernetes = { + source = "hashicorp/kubernetes" + version = "2.29.0" + } + } +} diff --git a/terraform-unity/outputs.tf b/terraform-unity/outputs.tf index 710983a2..4d1eeb00 100644 --- a/terraform-unity/outputs.tf +++ b/terraform-unity/outputs.tf @@ -1,9 +1,9 @@ output "resources" { - description = "SSM parameter IDs for pipeline resources." + description = "SSM parameter IDs for SPS resources." value = { "endpoints" = { "airflow" = module.unity-sps-airflow.airflow_urls - "ogc_processes" = module.unity-sps-airflow.ogc_processes_urls + "ogc_processes" = module.unity-sps-ogc-processes-api.ogc_processes_urls } "buckets" = module.unity-sps-airflow.s3_buckets } diff --git a/terraform-unity/modules/terraform-unity-sps-airflow/provider.tf b/terraform-unity/provider.tf similarity index 100% rename from terraform-unity/modules/terraform-unity-sps-airflow/provider.tf rename to terraform-unity/provider.tf diff --git a/terraform-unity/variables.tf b/terraform-unity/variables.tf index f565ad48..eed6e71a 100644 --- a/terraform-unity/variables.tf +++ b/terraform-unity/variables.tf @@ -19,21 +19,10 @@ variable "service_area" { default = "sps" } -variable "deployment_name" { - description = "The name of the deployment." - type = string -} - -variable "counter" { - description = "Identifier used to uniquely distinguish resources. This is used in the naming convention of the resource. If left empty, a random hexadecimal value will be generated and used instead." - type = string - default = "" -} - variable "release" { description = "The software release version." type = string - default = "24.2" + default = "24.3" } variable "kubeconfig_filepath" { @@ -41,6 +30,12 @@ variable "kubeconfig_filepath" { type = string } +variable "airflow_webserver_username" { + description = "The username for the Airflow webserver and UI." + type = string + default = "admin" +} + variable "airflow_webserver_password" { description = "The password for the Airflow webserver and UI." type = string @@ -67,13 +62,26 @@ variable "helm_charts" { } } -variable "docker_images" { - description = "Docker images for the associated services." +variable "airflow_docker_images" { + description = "Docker images for the associated Airflow services." type = object({ airflow = object({ name = string tag = string - }), + }) + }) + default = { + airflow = { + name = "ghcr.io/unity-sds/unity-sps/sps-airflow" + tag = "2.2.0" + } + } +} + + +variable "ogc_processes_docker_images" { + description = "Docker images for the associated OGC Processes API services." + type = object({ ogc_processes_api = object({ name = string tag = string @@ -88,21 +96,17 @@ variable "docker_images" { }) }) default = { - airflow = { - name = "ghcr.io/unity-sds/unity-sps/sps-airflow" - tag = "2.1.0" - }, ogc_processes_api = { name = "ghcr.io/unity-sds/unity-sps-ogc-processes-api/unity-sps-ogc-processes-api" - tag = "1.0.0" + tag = "2.0.0" } git_sync = { name = "registry.k8s.io/git-sync/git-sync" - tag = "v4.2.3" + tag = "v4.2.4" }, redis = { name = "redis" - tag = "7.2.4" + tag = "7.4.0" } } } @@ -113,6 +117,21 @@ variable "mcp_ami_owner_id" { default = "794625662971" } +variable "karpenter_node_classes" { + description = "Configuration for karpenter_node_classes" + type = map(object({ + volume_size = string + })) + default = { + "default" = { + volume_size = "30Gi" + } + "airflow-kubernetes-pod-operator-high-workload" = { + volume_size = "300Gi" + } + } +} + variable "karpenter_node_pools" { description = "Configuration for Karpenter node pools" type = map(object({ @@ -121,6 +140,7 @@ variable "karpenter_node_pools" { operator : string values : list(string) })) + nodeClassRef : string limits : object({ cpu : string memory : string @@ -131,7 +151,51 @@ variable "karpenter_node_pools" { }) })) default = { + "airflow-kubernetes-pod-operator-high-workload" = { + nodeClassRef = "airflow-kubernetes-pod-operator-high-workload", + requirements = [ + { + key = "karpenter.k8s.aws/instance-family" + operator = "In" + values = ["m7i", "m6i", "m5", "t3", "c7i", "c6i", "c5", "r7i", "r6i", "r5"] + }, + { + key = "karpenter.k8s.aws/instance-cpu" + operator = "Gt" + values = ["1"] // From 2 inclusive + }, + { + key = "karpenter.k8s.aws/instance-cpu" + operator = "Lt" + values = ["49"] // To 48 inclusive + }, + { + key = "karpenter.k8s.aws/instance-memory" + operator = "Gt" + values = ["8191"] // 8 GiB = 8192 MiB + }, + { + key = "karpenter.k8s.aws/instance-memory" + operator = "Lt" + values = ["98305"] // 96 GiB = 98404 MiB + }, + { + key = "karpenter.k8s.aws/instance-hypervisor", + operator = "In", + values = ["nitro"] + } + ] + limits = { + cpu = "528" // 11 x 48 + memory = "1056Gi" // 11 x 96 + } + disruption = { + consolidationPolicy = "WhenEmpty" + consolidateAfter = "1m" + } + }, "airflow-kubernetes-pod-operator" = { + nodeClassRef = "default", requirements = [ { key = "karpenter.k8s.aws/instance-family" @@ -151,12 +215,12 @@ variable "karpenter_node_pools" { { key = "karpenter.k8s.aws/instance-memory" operator = "Gt" - values = ["8191"] // From 8 GB inclusive + values = ["8191"] // 8 GiB = 8192 MiB }, { key = "karpenter.k8s.aws/instance-memory" operator = "Lt" - values = ["32769"] // To 32 GB inclusive + values = ["32769"] // 32 GiB = 32768 MiB }, { key = "karpenter.k8s.aws/instance-hypervisor", @@ -174,6 +238,7 @@ variable "karpenter_node_pools" { } }, "airflow-celery-workers" = { + nodeClassRef = "default", requirements = [ { key = "karpenter.k8s.aws/instance-family" @@ -216,6 +281,7 @@ variable "karpenter_node_pools" { } }, "airflow-core-components" = { + nodeClassRef = "default", requirements = [ { key = "karpenter.k8s.aws/instance-family" diff --git a/terraform-unity/versions.tf b/terraform-unity/versions.tf index 6e3c85b3..7dc2bcb8 100644 --- a/terraform-unity/versions.tf +++ b/terraform-unity/versions.tf @@ -9,17 +9,13 @@ terraform { source = "hashicorp/helm" version = "2.13.1" } - random = { - source = "hashicorp/random" - version = "3.6.1" - } null = { source = "hashicorp/null" version = "3.2.2" } aws = { source = "hashicorp/aws" - version = "5.47.0" + version = "5.50.0" } time = { source = "hashicorp/time" diff --git a/unity-initiator/routers/srl_router.tmpl.yaml b/unity-initiator/routers/srl_router.tmpl.yaml new file mode 100644 index 00000000..8de3fb12 --- /dev/null +++ b/unity-initiator/routers/srl_router.tmpl.yaml @@ -0,0 +1,37 @@ +initiator_config: + + name: SRL Example + + payload_type: + url: + - regexes: + - '(?<=/)(?P0980|0990)_(?P\d{10})-(?P\d{5})-(?P\d{1,3})(?P\.dat|\.emd)$' + evaluators: + - name: eval_srl_edrgen_readiness + actions: + - name: submit_dag_by_id + params: + dag_id: eval_srl_edrgen_readiness + airflow_base_api_endpoint: ${airflow_base_api_endpoint} + airflow_username: ${airflow_username} + airflow_password: ${airflow_password} + on_success: + actions: + - name: submit_dag_by_id + params: + dag_id: srl_edrgen + + - regexes: + - '(?<=/)(?Phello_world\.txt)$' + evaluators: + - name: eval_hello_world_readiness + actions: + - name: submit_ogc_process_execution + params: + process_id: eval_hello_world_readiness + ogc_processes_base_api_endpoint: ${ogc_processes_base_api_endpoint} + on_success: + actions: + - name: submit_ogc_process_execution + params: + process_id: hello_world diff --git a/unity-test/conftest.py b/unity-test/conftest.py index 95cfdff1..05b9ebdb 100644 --- a/unity-test/conftest.py +++ b/unity-test/conftest.py @@ -26,8 +26,8 @@ def pytest_addoption(parser): "--venue", action="store", default=None, - choices=("dev", "int", "ops"), - help="The venue in which the cluster will be deployed (dev, int, ops).", + choices=("dev", "test", "ops"), + help="The venue in which the cluster will be deployed (dev, test, ops).", ) parser.addoption( "--developer", @@ -79,6 +79,12 @@ def ogc_processes_api_url(request): return url +@pytest.fixture(scope="session") +def venue(request): + venue = request.config.getoption("--venue") + return venue + + @pytest.fixture(scope="session") def airflow_api_auth(): return HTTPBasicAuth("admin", os.getenv("AIRFLOW_WEBSERVER_PASSWORD")) diff --git a/unity-test/system/integration/step_defs/test_sbg_preprocess_workflow.py b/unity-test/system/integration/step_defs/test_sbg_preprocess_workflow.py index 9b38878c..dc2fb246 100644 --- a/unity-test/system/integration/step_defs/test_sbg_preprocess_workflow.py +++ b/unity-test/system/integration/step_defs/test_sbg_preprocess_workflow.py @@ -1,3 +1,9 @@ +# This test executes the SBG Preprocess DAG as a CWL workflow. +# The workflow parameters are contained in a YAML file which is venue-dependent. +# The SBG Preprocess DAG must already be deployed in Airflow, +# and it is invoked via the Airflow API. +# The CWL task is executed via a KubernetesPodOperator on a worker node +# that is dynamically provisioned by Karpenter. from pathlib import Path import backoff @@ -8,6 +14,19 @@ FEATURES_DIR = FILE_PATH.parent.parent / "features" FEATURE_FILE: Path = FEATURES_DIR / "sbg_preprocess_workflow.feature" +# DAG parameters are venue specific +DAG_ID = "sbg_preprocess_cwl_dag" +SBG_PREPROCESS_PARAMETERS = { + "dev": { + "cwl_workflow": "https://raw.githubusercontent.com/unity-sds/sbg-workflows/main/preprocess/sbg-preprocess-workflow.cwl", + "cwl_args": "https://raw.githubusercontent.com/unity-sds/sbg-workflows/main/preprocess/sbg-preprocess-workflow.dev.yml", + }, + "test": { + "cwl_workflow": "https://raw.githubusercontent.com/unity-sds/sbg-workflows/main/preprocess/sbg-preprocess-workflow.cwl", + "cwl_args": "https://raw.githubusercontent.com/unity-sds/sbg-workflows/main/preprocess/sbg-preprocess-workflow.test.yml", + }, +} + @scenario(FEATURE_FILE, "Check SBG Preprocess Workflow") def test_check_sbg_preprocess_workflow(): @@ -20,13 +39,16 @@ def api_up_and_running(): @when("I trigger a dag run for the SBG Preprocess dag", target_fixture="response") -def trigger_dag(airflow_api_url, airflow_api_auth): - # leaving out dag_run_id to avoid conflicts with previous runs- we can always fetch it from the response - # unsure about contents of the conf argument, though +def trigger_dag(airflow_api_url, airflow_api_auth, venue): + # DAG parameters are venue dependent + cwl_workflow = SBG_PREPROCESS_PARAMETERS[venue]["cwl_workflow"] + cwl_args = SBG_PREPROCESS_PARAMETERS[venue]["cwl_args"] response = requests.post( - f"{airflow_api_url}/api/v1/dags/sbg_preprocess_cwl_dag/dagRuns", + f"{airflow_api_url}/api/v1/dags/{DAG_ID}/dagRuns", auth=airflow_api_auth, - json={"note": "Triggered by unity-test suite"}, + json={"conf": {"cwl_workflow": f"{cwl_workflow}", "cwl_args": f"{cwl_args}"}}, + # nosec + verify=False, ) return response @@ -56,6 +78,8 @@ def poll_dag_run(response, airflow_api_url, airflow_api_auth): dag_run_response = requests.get( f"""{airflow_api_url}/api/v1/dags/{dag_json["dag_id"]}/dagRuns/{dag_json["dag_run_id"]}""", auth=airflow_api_auth, + # nosec + verify=False, ) assert dag_run_response.status_code == 200, f"Expected status code 200, but got {response.status_code}" json = dag_run_response.json() diff --git a/unity-test/system/smoke/step_defs/test_airflow_api_health.py b/unity-test/system/smoke/step_defs/test_airflow_api_health.py index 48ed11a0..81fb0fc2 100644 --- a/unity-test/system/smoke/step_defs/test_airflow_api_health.py +++ b/unity-test/system/smoke/step_defs/test_airflow_api_health.py @@ -23,7 +23,7 @@ def api_up_and_running(): @when("I send a GET request to the health endpoint", target_fixture="response") def send_get_request(airflow_api_url): - response = requests.get(f"{airflow_api_url}/health") + response = requests.get(f"{airflow_api_url}/health", verify=False) # nosec B501 return response diff --git a/unity-test/system/smoke/step_defs/test_ogc_processes_api_health.py b/unity-test/system/smoke/step_defs/test_ogc_processes_api_health.py index 2c44e51e..c797b01a 100644 --- a/unity-test/system/smoke/step_defs/test_ogc_processes_api_health.py +++ b/unity-test/system/smoke/step_defs/test_ogc_processes_api_health.py @@ -20,7 +20,7 @@ def api_up_and_running(): @when("I send a GET request to the health endpoint", target_fixture="response") def send_get_request(ogc_processes_api_url): - response = requests.get(f"{ogc_processes_api_url}/health") + response = requests.get(f"{ogc_processes_api_url}/health", verify=False) # nosec B501 print(response.json()) return response diff --git a/utils/deploy_ogc_app_packages.py b/utils/deploy_ogc_app_packages.py new file mode 100644 index 00000000..909a6833 --- /dev/null +++ b/utils/deploy_ogc_app_packages.py @@ -0,0 +1,98 @@ +import argparse +import json +import logging +import os + +import unity_sps_ogc_processes_api_python_client +from unity_sps_ogc_processes_api_python_client.models.ogcapppkg import Ogcapppkg +from unity_sps_ogc_processes_api_python_client.rest import ApiException + +# Configure logging +logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s") + + +def register_process(api_instance, proc, ogcapppkg_instance): + """ + Register a process with the OGC API. + + Args: + api_instance: The API client instance. + proc: The process identifier. + ogcapppkg_instance: The Ogcapppkg instance containing the process description. + + Returns: + None + """ + try: + # Deploy a process + api_instance.deploy_processes_post(w=proc, ogcapppkg=ogcapppkg_instance) + logging.info(f"Successfully registered process: {proc}") + except ApiException as e: + logging.error(f"Exception when calling DRUApi->deploy_processes_post for process {proc}: {e}") + except Exception as e: + logging.error(f"Unexpected error for process {proc}: {e}") + + +def main(): + """ + Main function to deploy processes to the OGC API. + + Parses command-line arguments, reads process descriptions from JSON files, + and registers each process with the OGC API. + + Setup: pip install -e ".[develop, test]" + + Invocation Syntax: python utils/deploy_ogc_app_packages.py + Example invocation: python utils/deploy_ogc_app_packages.py http://k8s-sps-ogcproce-XXXXXX-XXXXXXX.us-west-2.elb.amazonaws.com:5001 .../unity-sps/ogc-application-packages + + Args: + None + + Returns: + None + """ + parser = argparse.ArgumentParser(description="Deploy processes to OGC API") + parser.add_argument("ogc_api_processes", help="OGC API Processes URL") + parser.add_argument( + "ogc_app_packages_dir", help="Directory containing JSON files with application packages" + ) + args = parser.parse_args() + + OGC_API_PROCESSES = args.ogc_api_processes + ogc_app_packages_dir = args.ogc_app_packages_dir + + # Configure the API client + configuration = unity_sps_ogc_processes_api_python_client.Configuration(host=OGC_API_PROCESSES) + + with unity_sps_ogc_processes_api_python_client.ApiClient(configuration) as api_client: + api_instance = unity_sps_ogc_processes_api_python_client.DRUApi(api_client) + + # Iterate through all JSON files in the specified directory + for filename in os.listdir(ogc_app_packages_dir): + if filename.endswith(".json"): + json_file = os.path.join(ogc_app_packages_dir, filename) + try: + # Read the process description from the JSON file + with open(json_file, "r") as f: + process_data = json.load(f) + + # Extract the process ID from the JSON data + proc = process_data.get("processDescription", {}).get("id") + if not proc: + logging.error(f"Process ID not found in JSON file: {json_file}") + continue + + # Create an instance of Ogcapppkg from the JSON data + ogcapppkg_instance = Ogcapppkg.from_dict(process_data) + logging.info(f"Registering process: {proc}") + register_process(api_instance, proc, ogcapppkg_instance) + except FileNotFoundError: + logging.error(f"JSON file not found: {json_file}") + except json.JSONDecodeError: + logging.error(f"Error decoding JSON file: {json_file}") + except Exception as e: + logging.error(f"Unexpected error processing file {json_file}: {e}") + + +if __name__ == "__main__": + main() diff --git a/utils/post_deployment.sh b/utils/post_deployment.sh deleted file mode 100755 index 7013c67c..00000000 --- a/utils/post_deployment.sh +++ /dev/null @@ -1,29 +0,0 @@ -#!/bin/bash -#set -ex - -# Script to execute post-deployment operations -# Pre-Requisites: -# - SPS has been deployed successfully -# - AWS credentials are renewed and set in the environment -# Syntax: -# ./post_deployment.sh -# Example: -# ./post_deployment.sh http://k8s-airflow-ogcproce-944e409e1d-687289935.us-west-2.elb.amazonaws.com:5001 - -# script argument: the $WPST_API -export WPST_API=$1 - -# list of processes to be registered -declare -a procs=("cwl_dag" "karpenter_test" "sbg_L1_to_L2_e2e_cwl_step_by_step_dag") - -for proc in "${procs[@]}" -do - echo " " - # register process - echo "Registering process: $proc" - curl -X POST -H "Content-Type: application/json; charset=utf-8" --data '{"id":"'${proc}'", "version": "1.0.0"}' "${WPST_API}/processes" - # unregister process - # echo "Unregistering process: $proc" - # curl -X DELETE -H "Content-Type: application/json; charset=utf-8" "${WPST_API}/processes/${proc}" - echo " " -done diff --git a/utils/trigger_dag.py b/utils/trigger_dag.py index 4d982e51..d773f739 100644 --- a/utils/trigger_dag.py +++ b/utils/trigger_dag.py @@ -41,6 +41,10 @@ def main(): dt_now = datetime.now(timezone.utc) logical_date = dt_now.strftime("%Y-%m-%dT%H:%M:%SZ") data = {"logical_date": logical_date} + # Example on how to pass DAG specific parameters + # data = {"logical_date": logical_date, + # "conf": {"cwl_args": "abc123"} + # } result = requests.post( url, json=data, headers=headers, auth=HTTPBasicAuth(airflow_username, airflow_password) ) diff --git a/utils/unity_sps_deploy_or_destroy_sps.sh b/utils/unity_sps_deploy_or_destroy_sps.sh index ca12516d..8879d9ef 100755 --- a/utils/unity_sps_deploy_or_destroy_sps.sh +++ b/utils/unity_sps_deploy_or_destroy_sps.sh @@ -14,6 +14,11 @@ set -ex # Components must be destroyed in the revers order: # destroy airflow > karpenter > eks +# Note: the first time you run "deploy" on eks/karpenter/airflow, you don't already have the $TFVARS_FILENAME +# in the proper directory, the script will stop because it cannot parse the file that was automatically generated. +# Edit that file: remove the first and last line, and add the specific values for your deployment. +# Then run the script again. + # Note: # Must make sure we don't check in a new version of this script with a real AWS account number @@ -29,7 +34,6 @@ export SERVICE_AREA=sps export VENUE=dev export DEPLOYMENT=luca export COUNTER=7 -export BUCKET=unity-unity-dev-bucket # the root directory of the "unity-sps" installation export UNITY_SPS_DIR=/Users/cinquini/PycharmProjects/unity-sps @@ -57,8 +61,7 @@ export TFVARS_FILENAME=unity-${VENUE}-sps-${COMPONENT}-${DEPLOYMENT}-${COUNTER}. # initialize Terraform cd $tf_dir tfswitch 1.8.2 -export KEY=sps/tfstates/${PROJECT}-${VENUE}-${SERVICE_AREA}-${COMPONENT}-${DEPLOYMENT}-${COUNTER}.tfstate -terraform init -reconfigure -backend-config="bucket=$BUCKET" -backend-config="key=$KEY" +terraform init -reconfigure terraform get -update # if new cluster --> create new tfvars file