Skip to content

Commit

Permalink
Doc/update terraform variables (#2009)
Browse files Browse the repository at this point in the history
* Updated most Terraform modules' variables files to the 1.0 specification, and improved inline documentation and comments.

* Added more documentation around Terraform variables.

* Added sample.tfvars for AWS modules, and provided documentation on most variables. The biggest gap currently is ami_account_id and csoc_account_id, which don't have good WHY documentation.

Co-authored-by: jawadqur <[email protected]>
  • Loading branch information
AidanHilt and jawadqur authored Oct 3, 2022
1 parent 6dd5983 commit 97df638
Show file tree
Hide file tree
Showing 56 changed files with 1,771 additions and 140 deletions.
22 changes: 11 additions & 11 deletions .secrets.baseline
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
"files": "^.secrets.baseline$",
"lines": null
},
"generated_at": "2022-06-21T21:12:27Z",
"generated_at": "2022-07-29T15:31:31Z",
"plugins_used": [
{
"name": "AWSKeyDetector"
Expand Down Expand Up @@ -2235,12 +2235,21 @@
"type": "Secret Keyword"
}
],
"tf_files/aws/eks/sample.tfvars": [
{
"hashed_secret": "83c1003f406f34fba4d6279a948fee3abc802884",
"is_secret": false,
"is_verified": false,
"line_number": 107,
"type": "Hex High Entropy String"
}
],
"tf_files/aws/eks/variables.tf": [
{
"hashed_secret": "83c1003f406f34fba4d6279a948fee3abc802884",
"is_secret": false,
"is_verified": false,
"line_number": 135,
"line_number": 133,
"type": "Hex High Entropy String"
}
],
Expand Down Expand Up @@ -2412,15 +2421,6 @@
"type": "Hex High Entropy String"
}
],
"tf_files/aws/rds/sample.tfvars": [
{
"hashed_secret": "76c3c4836dee37d8d0642949f84092a9a24bbf46",
"is_secret": false,
"is_verified": false,
"line_number": 7,
"type": "Secret Keyword"
}
],
"tf_files/aws/slurm/README.md": [
{
"hashed_secret": "fd85d792fa56981cf6a8d2a5c0857c74af86e99d",
Expand Down
5 changes: 5 additions & 0 deletions tf_files/aws/access/sample.tfvars
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
#The URL to an S3 bucket we want to work with
access_url = ""

#The ARN to an Amazon ACM-managed certificate
access_cert = ""
7 changes: 6 additions & 1 deletion tf_files/aws/account-policies/sample.tfvars
Original file line number Diff line number Diff line change
@@ -1 +1,6 @@
# defaults shold usually be ok - check variables.tf
#The AWS region we are working in
region = "us-east-1"


#The IAM roles to be created
roles = ["devopsdirector", "bsdisocyber", "projectmanagerplanx", "devopsplanx", "devplanx"]
9 changes: 9 additions & 0 deletions tf_files/aws/account_management-logs/sample.tfvars
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
#ID of AWS account that owns the public AMIs
#TODO clarification
csoc_account_id = "433568766270"

#TODO check what these are used for. This module seems to use csoc_common_logging,
#which seems to use modules/common-logging. Neither of those appear to have these two
account_name = ""

alarm_actions = ""
67 changes: 67 additions & 0 deletions tf_files/aws/batch/sample.tfvars
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
#A tag used to identify resources associated with this job.
job_id = ""

#This is a prefix that will be applied to resources generated as part of this deployment. It is for tracking purposes.
#This is generally the long name of the job, which is the hostname + job type + job ID.
prefix = ""

#The name of the AWS batch job definition
batch_job_definition_name = ""

#This is the location of a JSON file that contains an AWS Batch job definition, containing information such as
#the name of the container to use and resources to allocate.
#More information can be found here: https://docs.aws.amazon.com/batch/latest/userguide/job_definitions.html
container_properties = ""

#The name of the IAM instance role to be attached to the machines running this batch job. An instance role is a limited role
#applied to EC2 instances to allow them to access designated resources.
#More information can be found at: https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html
iam_instance_role = ""

#The instance profile to attach to attach to EC2 machines. The instance profile is associated with a role, and is the
#resource that is associated with a specific EC2 instance to give it access to desired resources. More information can be
#found at: https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use_switch-role-ec2_instance-profiles.html
iam_instance_profile_role = ""

#The role that allows AWS Batch itself (not the EC2 instances) to access needed resources. More information can be found at:
#https://docs.aws.amazon.com/batch/latest/userguide/service_IAM_role.html
aws_batch_service_role = ""

#The name of the security group associated with this batch job
aws_batch_compute_environment_sg = ""

#The name of the batch compute environment to run the jobs in. A job environment consits of ECS container instances that can
#run the job.
compute_environment_name = ""

#What type of EC2 instance to use in order to handle the job.
instance_type = ["c4.large"]

priority = 10

#The maximum number of EC2 vCPUs that an environment can use.
max_vcpus = 256

#The minimum number of EC2 vCPUs that an environment should maintain.
min_vcpus = 0

#What type of compute environment to use. Valid selections are [EC2, SPOT]
compute_env_type = "EC2"

#Valid options are [MANAGED, UNMANAGED]
#This controls whether AWS manages spinning up the resources for us, or if we bring our own environment.
#DO NOT USE UNMANAGED unless you know what you're doing.
compute_type = "MANAGED"

#The EC2 key pair that is used for instances launched in the compute environment.
ec2_key_pair = "giangb"

#The name of the job queue to create as part of this deployment.
batch_job_queue_name = ""

#The name of the SQS queue that will be created as a part of this deployment. The queue is the primary way that different nodes
#communicate that they have completed a part of the batch job, and pass their completed parts to the next stage of the pipeline
sqs_queue_name = ""

#The name of the bucket the results should be output to.
output_bucket_name = ""
44 changes: 44 additions & 0 deletions tf_files/aws/bucket_manifest_utils/sample.tfvars
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
#Path to the function file
lambda_function_file = ""

#Name of the function you are creating
lambda_function_name = ""

#Description of the function
lambda_function_description = ""

#IAM role ARN to attach to the function
lambda_function_iam_role_arn = ""

#The name of the Amazon Lambda function that will handle the task.
#For a Python-focused example, see here: https://docs.aws.amazon.com/lambda/latest/dg/python-handler.html
lambda_function_handler = "lambda_function.handler"

#Language and version to use to run the lambda function.
#For more information, see: https://docs.aws.amazon.com/lambda/latest/dg/lambda-runtimes.html
lambda_function_runtime = "python3.7"


#Timeout of the function in seconds
lambda_function_timeout = 3

#How much RAM in MB will be used
lambda_function_memory_size = 128

#A map containing key-value pairs that define environment variables for the function
lambda_function_env = {}

#A map contaning key-value pairs used in AWS to filter and search for resources
lambda_function_tags = {}

#Whether the function will be attached to a VPC. Valid options are [true, false]
lambda_function_with_vpc = false

#List of security groups for the lambda function with a vpc
lambda_function_security_groups = []

#List of subnets for the lambda function with a vpc
lambda_function_subnets_id = []



52 changes: 43 additions & 9 deletions tf_files/aws/cognito/sample.tfvars
Original file line number Diff line number Diff line change
@@ -1,10 +1,44 @@

vpc_name = "INSERT VPC NAME HERE"
cognito_provider_name = "federation name"
cognito_domain_name = "subname for .auth.us-east-1.amazoncognito.com"
cognito_callback_urls = ["https://url1"]
cognito_provider_details = {"MetadataURL"="https://someurl"}
tags = {
"Organization" = "PlanX"
"Environment" = "CSOC"
}
#A list of allowed OAuth Flows
cognito_oauth_flows = ["code", "implicit"]

#A user directory for Amazon Cognito, which handles sign-on for users. This is generally given the same name as the
#name of the app using the service.
cognito_user_pool_name = "fence"

#The identity provider types that Cognito will use. An identity provider is a service that stores and manages
#identities. See: https://docs.aws.amazon.com/cognito-user-identity-pools/latest/APIReference/API_CreateIdentityProvider.html#CognitoUserPools-CreateIdentityProvider-request-ProviderType
cognito_provider_type = "SAML"

#The attribute mapping is how Cognito translates the information about a user recieved from an identitiy provider into
#the attributes that Cognito expects from a user.
#For more information, see: https://docs.aws.amazon.com/cognito/latest/developerguide/cognito-user-pools-specifying-attribute-mapping.html
cognito_attribute_mapping = {
"email" = "http://schemas.xmlsoap.org/ws/2005/05/identity/claims/emailaddress"
}

#The OAuth scopes specify what information from a user's account Cognito is able to access. Scopes are provider-specific, and
#you will need to consult the documentation for your identity provider to determine what scopes are necessary and valid
cognito_oauth_scopes = ["email", "openid"]

#Details about the auth provider, for this module most likely the MetadataURL or MetadataFILE
cognito_provider_details = {}

#The name of the VPC that the Cognito pool will be created in
vpc_name = ""

#The address of the sign-in and sign-up pages
cognito_domain_name = ""

#The URL(s) that can be redirected to after a successful sign-in
cognito_callback_urls = []

#The name of the provided identity provider. This is the name used within AWS
cognito_provider_name = ""

#A map contaning key-value pairs used in AWS to filter and search for resources
tags = {
"Organization" = "PlanX"
"Environment" = "CSOC"
}

Loading

0 comments on commit 97df638

Please sign in to comment.