Skip to content

Commit

Permalink
Merge branch 'dev' into segev_dra_4.13
Browse files Browse the repository at this point in the history
  • Loading branch information
elsegev committed Oct 23, 2023
2 parents 54f69f7 + 056c58c commit 2686346
Show file tree
Hide file tree
Showing 43 changed files with 560 additions and 79 deletions.
1 change: 0 additions & 1 deletion .github/workflows/dsf_poc_cli.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@ on:
env:
TF_CLI_ARGS: "-no-color"
TF_INPUT: 0
TF_VAR_gw_count: 2
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}

Expand Down
214 changes: 214 additions & 0 deletions .github/workflows/dsf_poc_cli_azure.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,214 @@
name: 'DSF POC CLI - Azure'

on:
workflow_call:
inputs:
use_modules_from_terraform_registry:
required: true
type: boolean
explicit_ref:
required: true
type: string
secrets:
AWS_ACCESS_KEY_ID:
required: true
AWS_SECRET_ACCESS_KEY:
required: true
SLACK_WEBHOOK_URL:
required: true
ARM_CLIENT_SECRET:
required: true

workflow_dispatch:
inputs:
use_modules_from_terraform_registry:
type: boolean
required: false

env:
TF_CLI_ARGS: "-no-color"
TF_INPUT: 0
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} # aws creds are needed for s3 backend
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
ARM_SUBSCRIPTION_ID: ${{ vars.ARM_SUBSCRIPTION_ID }}
ARM_CLIENT_ID: ${{ vars.ARM_CLIENT_ID }}
ARM_TENANT_ID: ${{ vars.ARM_TENANT_ID }}
ARM_CLIENT_SECRET: ${{ secrets.ARM_CLIENT_SECRET }}
permissions:
contents: read

jobs:
terraform:
strategy:
fail-fast: false
matrix:
include:
- name: DSF POC - SONAR
workspace: azure_cli-

name: '${{ matrix.name }}'
runs-on: ubuntu-latest
env:
EXAMPLE_DIR: ./examples/azure/dsf_deployment
environment: test

# Use the Bash shell regardless whether the GitHub Actions runner is ubuntu-latest, macos-latest, or windows-latest
defaults:
run:
shell: bash
steps:

- name: Pick ref
run: |
if [ -z "${{ inputs.explicit_ref }}" ]; then
echo REF=${{ github.ref }} >> $GITHUB_ENV;
else
echo REF=${{ inputs.explicit_ref }} >> $GITHUB_ENV;
fi
- name: Set Workspace Name
run: |
echo "Event Name: ${{ github.event_name }}"
if [ ${{ github.event_name }} == 'schedule' ]; then
echo TF_WORKSPACE=${{ matrix.workspace }}${{ github.event_name }}-$REF >> $GITHUB_ENV
else
echo TF_WORKSPACE=${{ matrix.workspace }}${{ github.run_number }}-${{ github.run_attempt }}-${{ inputs.explicit_ref }} >> $GITHUB_ENV
echo TMP_WORKSPACE_NAME=${{ matrix.workspace }}${{ github.run_number }}-${{ github.run_attempt }}-${{ inputs.explicit_ref }} >> $GITHUB_ENV
fi
# Checkout the repository to the GitHub Actions runner
- name: Checkout
uses: actions/checkout@v3
with:
ref: ${{ env.REF }}

- name: Change the modules source to local
if: ${{ inputs.use_modules_from_terraform_registry == false }}
run: |
find $EXAMPLE_DIR -type f -exec sed -i -f sed.expr {} \;
- name: Sets env vars for environment
run: |
echo "TF_VAR_tarball_s3_bucket=0ed58e18-0c0c-11ed-861d-0242ac120003" >> $GITHUB_ENV
if: github.ref != 'refs/heads/"master"'

- name: Create terraform backend file
run: |
cat << EOF > $EXAMPLE_DIR/backend.tf
terraform {
backend "s3" {
bucket = "terraform-state-bucket-dsfkit-github-tests"
key = "states/terraform.tfstate"
dynamodb_table = "terraform-state-lock"
region = "us-east-1"
}
}
EOF
- name: Create terraform.tfvars file
run: |
cat << EOF > $EXAMPLE_DIR/terraform.tfvars
resource_group_location = "East US"
tarball_location = {
az_resource_group = "eytan-resource-group"
az_storage_account = "eytanstorageaccount"
az_container = "sonar"
az_blob = "jsonar-4.12.0.10.0.tar.gz"
}
EOF
# Install the latest version of Terraform CLI and configure the Terraform CLI configuration file with a Terraform Cloud user API token
- name: Setup Terraform
uses: hashicorp/setup-terraform@v2
with:
terraform_wrapper: false
terraform_version: ~1.6.0

- name: Setup jq
uses: sergeysova/jq-action@v2

# Initialize a new or existing Terraform working directory by creating initial files, loading any remote state, downloading modules, etc.
- name: Terraform Init
run: terraform -chdir=$EXAMPLE_DIR init
env:
TF_WORKSPACE: default

- name: Cleaning environment
run: |
if [ ${{ github.event_name }} == 'schedule' ]; then
mv $EXAMPLE_DIR/main.tf{,_}
mv $EXAMPLE_DIR/outputs.tf{,_}
mv $EXAMPLE_DIR/sonar.tf{,_}
mv $EXAMPLE_DIR/networking.tf{,_}
terraform -chdir=$EXAMPLE_DIR destroy -auto-approve
mv $EXAMPLE_DIR/main.tf{_,}
mv $EXAMPLE_DIR/outputs.tf{_,}
mv $EXAMPLE_DIR/sonar.tf{_,}
mv $EXAMPLE_DIR/networking.tf{_,}
fi
- name: Terraform Validate
run: terraform -chdir=$EXAMPLE_DIR validate

# Generates an execution plan for Terraform
- name: Terraform Plan
run: |
terraform -chdir=$EXAMPLE_DIR workspace list
terraform -chdir=$EXAMPLE_DIR plan
# On push to "main", build or change infrastructure according to Terraform configuration files
# Note: It is recommended to set up a required "strict" status check in your repository for "Terraform Cloud". See the documentation on "strict" required status checks for more information: https://help.github.com/en/github/administering-a-repository/types-of-required-status-checks
- name: Terraform Apply
id: apply
# if: github.ref == 'refs/heads/"master"' && github.event_name == 'push' || github.event_name == 'workflow_dispatch'
run: terraform -chdir=$EXAMPLE_DIR apply -auto-approve

- name: Terraform Output
if: always()
run: terraform -chdir=$EXAMPLE_DIR output -json

- name: Collect Artifacts
if: always()
uses: actions/upload-artifact@v2
with:
name: collected-keys
path: |
${{ env.EXAMPLE_DIR }}/ssh_keys
- name: Terraform Destroy
id: destroy
if: always()
run: |
if [ '${{ steps.apply.conclusion }}' == 'success' ] || [ ${{ github.event_name }} != 'schedule' ]; then
terraform -chdir=$EXAMPLE_DIR destroy -auto-approve
fi
- name: Terraform Delete Workspace
if: always()
run: |
if [ '${{ steps.destroy.conclusion }}' == 'success' ] && [ ${{ github.event_name }} != 'schedule' ]; then
terraform -chdir=$EXAMPLE_DIR workspace delete $TMP_WORKSPACE_NAME
fi
env:
TF_WORKSPACE: default

- name: Check how was the workflow run
id: check-trigger
if: ${{ failure() }}
run: |
if [ "${{ github.event_name }}" == "schedule" ]; then
echo "run-by=Automation" >> $GITHUB_OUTPUT
else
echo "run-by=${{ github.actor }}" >> $GITHUB_OUTPUT
fi
# Send job failure to Slack
- name: Send Slack When Failure
run: |
if [ ${{ env.REF }} == 'master' ]; then
curl -X POST -H 'Content-type: application/json' --data '{"text":":exclamation: :exclamation: :exclamation:\n*${{ matrix.name }} Prod ${{ inputs.workspace }} automation Failed*\n<https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}|Please check the job!>\nRun by: ${{ steps.check-trigger.outputs.run-by }}", "channel": "#dsfkit-prod"}' ${{ secrets.SLACK_WEBHOOK_URL }}
elif [ ${{ env.REF }} == 'dev' ]; then
curl -X POST -H 'Content-type: application/json' --data '{"text":":exclamation: :exclamation: :exclamation:\n*${{ matrix.name }} dev ${{ inputs.workspace }} automation Failed*\n<https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}|Please check the job!>\nRun by: ${{ steps.check-trigger.outputs.run-by }}", "channel": "#edsf_automation"}' ${{ secrets.SLACK_WEBHOOK_URL }}
else
curl -X POST -H 'Content-type: application/json' --data '{"text":":exclamation: :exclamation: :exclamation:\n*${{ matrix.name }} private branch ${{ inputs.workspace }} automation Failed*\n<https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}|Please check the job!>\nRun by: ${{ steps.check-trigger.outputs.run-by }}", "channel": "#edsf_automation"}' ${{ secrets.SLACK_WEBHOOK_URL }}
fi
if: ${{ failure() }}
22 changes: 22 additions & 0 deletions .github/workflows/nightly_manager.yml
Original file line number Diff line number Diff line change
Expand Up @@ -121,3 +121,25 @@ jobs:
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
DAM_LICENSE: ${{ secrets.DAM_LICENSE }}

master_dsf_poc_azure:
uses: imperva/dsfkit/.github/workflows/dsf_poc_cli_azure.yml@master
with:
use_modules_from_terraform_registry: false
explicit_ref: master
secrets:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
ARM_CLIENT_SECRET: ${{ secrets.ARM_CLIENT_SECRET }}
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

dev_dsf_poc_azure:
uses: imperva/dsfkit/.github/workflows/dsf_poc_cli_azure.yml@dev
with:
use_modules_from_terraform_registry: false
explicit_ref: dev
secrets:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
ARM_CLIENT_SECRET: ${{ secrets.ARM_CLIENT_SECRET }}
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
2 changes: 1 addition & 1 deletion .github/workflows/sonar_poc_cli.yml
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ jobs:
- name: Change the modules source to local
if: ${{ inputs.use_modules_from_terraform_registry == false }}
run: |
find ./examples/ -type f -exec sed -i -f sed.expr {} \;
find $EXAMPLE_DIR -type f -exec sed -i -f sed.expr {} \;
- name: Sets env vars for environment
run: |
Expand Down
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -102,9 +102,9 @@ This guide references the following information and links, some of which are ava
<td>
<a href="https://docs.imperva.com/howto/ee19c683">Sonar v4.12</a>
<a href="https://docs.imperva.com/howto/3f513830">DAM v14.12</a>
<a href="https://docs.imperva.com/howto/3f513830">DAM v14.13</a>
<a href="https://docs.imperva.com/howto/4e487f3c">DRA v4.12</a>
<a href="https://docs.imperva.com/howto/71799f9d">DRA v4.13</a>
</td>
<td>DSF Components Overview
</td>
Expand Down
31 changes: 31 additions & 0 deletions examples/azure/dsf_deployment/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
# DSF Deployment example
[![GitHub tag](https://img.shields.io/github/v/tag/imperva/dsfkit.svg)](https://github.com/imperva/dsfkit/tags)

This example provides DSF (Data Security Fabric) deployment with DSF Hub, and Agentless Gateways.

## Modularity
The deployment is modular and allows users to deploy one or more of the following modules:

1. New VPC
2. Sonar
- DSF Hub
- DSF Hub DR HADR (High Availability Disaster Recovery) node
- Agentless Gateways
- Agentless Gateways DR HADR (High Availability Disaster Recovery) nodes

## Variables
Several variables in the `variables.tf` file are important for configuring the deployment. The following variables dictate the deployment content and should be paid more attention to:
- `enable_sonar`: Enable Sonar sub-product
- `agent_gw_count`: Number of Agent Gateways
- `hub_hadr`: Enable DSF Hub High Availability Disaster Recovery (HADR)
- `agentless_gw_hadr`: Enable Agentless Gateway High Availability Disaster Recovery (HADR)

### Networking
- `subnet_ids`: IDs of the subnets for the deployment. If not specified, a new vpc is created.

## Default Example
To perform the default deployment, run the following command:

```bash
terraform apply -auto-approve
```
2 changes: 2 additions & 0 deletions examples/installation/dsf_single_account_deployment/dam.tf
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ module "mx" {
} : null
large_scale_mode = var.large_scale_mode.mx
tags = local.tags
send_usage_statistics = var.send_usage_statistics
}

module "agent_gw" {
Expand All @@ -59,6 +60,7 @@ module "agent_gw" {
large_scale_mode = var.large_scale_mode.agent_gw
gateway_group_name = local.gateway_group_name
tags = local.tags
send_usage_statistics = var.send_usage_statistics
providers = {
aws = aws.provider-2
}
Expand Down
2 changes: 2 additions & 0 deletions examples/installation/dsf_single_account_deployment/dra.tf
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ module "dra_admin" {
key_pair = local.dra_admin_public_key_name
instance_profile_name = var.dra_admin_instance_profile_name
tags = local.tags
send_usage_statistics = var.send_usage_statistics
}

module "dra_analytics" {
Expand All @@ -46,6 +47,7 @@ module "dra_analytics" {
admin_server_private_ip = module.dra_admin[0].private_ip
admin_server_public_ip = module.dra_admin[0].public_ip
tags = local.tags
send_usage_statistics = var.send_usage_statistics
providers = {
aws = aws.provider-2
}
Expand Down
4 changes: 4 additions & 0 deletions examples/installation/dsf_single_account_deployment/sonar.tf
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ module "hub_main" {
}] : []
generate_access_tokens = true
tags = local.tags
send_usage_statistics = var.send_usage_statistics
providers = {
aws = aws.provider-1
}
Expand Down Expand Up @@ -102,6 +103,7 @@ module "hub_dr" {
base_directory = var.sonar_machine_base_directory
generate_access_tokens = true
tags = local.tags
send_usage_statistics = var.send_usage_statistics
providers = {
aws = aws.provider-1
}
Expand Down Expand Up @@ -166,6 +168,7 @@ module "agentless_gw_main" {
instance_profile_name = var.agentless_gw_instance_profile_name
base_directory = var.sonar_machine_base_directory
tags = local.tags
send_usage_statistics = var.send_usage_statistics
providers = {
aws = aws.provider-2
}
Expand Down Expand Up @@ -208,6 +211,7 @@ module "agentless_gw_dr" {
instance_profile_name = var.agentless_gw_instance_profile_name
base_directory = var.sonar_machine_base_directory
tags = local.tags
send_usage_statistics = var.send_usage_statistics
providers = {
aws = aws.provider-2
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -620,3 +620,9 @@ variable "dra_analytics_instance_profile_name" {
description = "Instance profile to assign to the DRA Analytics EC2. Keep empty if you wish to create a new instance profile."
default = null
}

variable "send_usage_statistics" {
type = bool
default = true
description = "Set to true to enable sending usage statistics, or false to disable."
}
Loading

0 comments on commit 2686346

Please sign in to comment.