Skip to content

Commit

Permalink
added cloud_storage_telemetry_type variable
Browse files Browse the repository at this point in the history
  • Loading branch information
SadanandGowda committed Nov 16, 2023
1 parent bf876cc commit 61ce63b
Showing 1 changed file with 28 additions and 28 deletions.
56 changes: 28 additions & 28 deletions private_repo/ansible/inventory/dev/DataPipeline/common.yml
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
# ------------------------------------------------------------------------------------------------------------ #
# Mandatorty variables - DO NOT LEAVE ANYTHING BLANK #
cloud_service_provider: "" # Your cloud service provider name. Supported values are aws, azure, gcloud
domain_name: "" # your domain name like example.com
cloud_service_provider: "" # Your cloud service provider name. Supported values are aws, azure, gcloud
domain_name: "" # your domain name like example.com
# docker hub details
dockerhub: "change.docker.url" # docker hub username or url incase of private registry
private_ingressgateway_ip: "" # your private kubernetes load balancer ip
search_lb_ip: # search service Load balancer IP
dockerhub: "change.docker.url" # docker hub username or url incase of private registry
private_ingressgateway_ip: "" # your private kubernetes load balancer ip
search_lb_ip: # search service Load balancer IP

# Cloud Service Provider Variables
# If cloud_service_provider is AWS then update with access key as value
Expand All @@ -16,7 +16,7 @@ search_lb_ip: # search service Load balancer IP
# Example: cloud_public_storage_accountname: "cloudstorage-gcp-test.iam.gserviceaccount.com"
# If cloud_service_provider is AZURE then update with stoage account name
# Example: cloud_public_storage_accountname: "azurestotageaccount"

cloud_storage_telemetry_type: ""
cloud_public_storage_accountname: ""
# If cloud_service_provider is AWS/OCI then update with region
# Example: cloud_public_storage_region: us-east-1
Expand All @@ -30,7 +30,7 @@ cloud_public_storage_namespace: ""
# Create object storage for each below mentioned variables and update accordingly
# If cloud_service_provider is AWS/OCI update with bucket name
# If cloud_service_provider is gcloud(GCP) update with bucket name
# If cloud_service_provider is AZURE update with container name
# If cloud_service_provider is AZURE update with container name
# Example: cloud_storage_certqr_bucketname: "certqr-storage"
cloud_storage_certqr_bucketname: ""
# This storage contains chatbot related data
Expand Down Expand Up @@ -87,7 +87,7 @@ cloud_storage_label_bucketname: ""
cloud_storage_certservice_bucketname: ""
# This storage contains UCI services data
# Example: cloud_storage_uci_bucketname: "uci-storage"
cloud_storage_uci_bucketname: ""
cloud_storage_uci_bucketname: ""
# This storage contains artifacts data
# Example: cloud_storage_artifacts_bucketname: "artifact-storage"
cloud_storage_artifacts_bucketname: ""
Expand All @@ -107,8 +107,8 @@ cloud_storage_url: "https://{{ cloud_public_storage_accountname }}.blob.core.win

# ------------------------------------------------------------------------------------------------------------ #
# Optional variables - Can be left blank if you dont plan to use the intended features
env: dev # some name like dev, preprod etc
proto: https # http or https, preferably https
env: dev # some name like dev, preprod etc
proto: https # http or https, preferably https

# Azure media streaming service
stream_base_url: "" # Media service streaming url
Expand All @@ -120,13 +120,13 @@ media_service_azure_token_client_key: ""
media_service_azure_token_client_secret: ""

# data exhaust alerts
data_exhaust_webhook_url: "slack.com" # Slack webhook url
data_exhaust_Channel: "slack.com" # Slack channel for data products alerts
secor_alerts_slack_channel: "slack.com" # Slack channel name for secor alerts - Example #all_alerts_channel
data_exhaust_webhook_url: "slack.com" # Slack webhook url
data_exhaust_Channel: "slack.com" # Slack channel for data products alerts
secor_alerts_slack_channel: "slack.com" # Slack channel name for secor alerts - Example #all_alerts_channel

# ------------------------------------------------------------------------------------------------------------ #
# Sensible defaults which you need not change - But if you would like to change, you are free to do so
data_exhaust_name: "datapipeline-monitoring" # Slack notification name
data_exhaust_name: "datapipeline-monitoring" # Slack notification name
postgres:
db_url: "{{ groups['postgres'][0] }}"
db_username: analytics
Expand All @@ -138,9 +138,9 @@ postgres:
db_admin_password: "{{dp_vault_pgdb_admin_password}}"

druid_postgres_user: druid # Do not change this
imagepullsecrets: "{{env}}registrysecret" # kubernetes imagePullSecrets
kubeconfig_path: /var/lib/jenkins/secrets/k8s.yaml # kubeconfig file path on jenkins
core_kubeconfig_path: "{{ kubeconfig_path }}" # kubeconfig file path on jenkins for core kube cluster, change this if you use separate kube cluster for core and KP + DP
imagepullsecrets: "{{env}}registrysecret" # kubernetes imagePullSecrets
kubeconfig_path: /var/lib/jenkins/secrets/k8s.yaml # kubeconfig file path on jenkins
core_kubeconfig_path: "{{ kubeconfig_path }}" # kubeconfig file path on jenkins for core kube cluster, change this if you use separate kube cluster for core and KP + DP

# provide the s3 compatible endpoint
# for AWS
Expand All @@ -151,22 +151,22 @@ core_kubeconfig_path: "{{ kubeconfig_path }}" # kubeconfig file pat
cloud_public_storage_endpoint: ""

# Update below vars if seperate object storage is required
cloud_private_storage_accountname: "{{ cloud_public_storage_accountname }}"
cloud_private_storage_endpoint: "{{ cloud_public_storage_endpoint }}"
cloud_private_storage_region: "{{ cloud_public_storage_region }}"
cloud_private_storage_accountname: "{{ cloud_public_storage_accountname }}"
cloud_private_storage_endpoint: "{{ cloud_public_storage_endpoint }}"
cloud_private_storage_region: "{{ cloud_public_storage_region }}"
cloud_private_storage_project: "{{ cloud_public_storage_project }}"

cloud_management_storage_accountname: "{{ cloud_public_storage_accountname }}"
cloud_management_storage_endpoint: "{{ cloud_public_storage_endpoint }}"
cloud_management_storage_region: "{{ cloud_public_storage_region }}"
cloud_management_storage_accountname: "{{ cloud_public_storage_accountname }}"
cloud_management_storage_endpoint: "{{ cloud_public_storage_endpoint }}"
cloud_management_storage_region: "{{ cloud_public_storage_region }}"
cloud_management_storage_project: "{{ cloud_public_storage_project }}"

cloud_artifact_storage_accountname: "{{ cloud_public_storage_accountname }}"
cloud_artifact_storage_endpoint: "{{ cloud_public_storage_endpoint }}"
cloud_artifact_storage_region: "{{ cloud_public_storage_region }}"
cloud_artifact_storage_accountname: "{{ cloud_public_storage_accountname }}"
cloud_artifact_storage_endpoint: "{{ cloud_public_storage_endpoint }}"
cloud_artifact_storage_region: "{{ cloud_public_storage_region }}"
cloud_artifact_storage_project: "{{ cloud_public_storage_project }}"

## Enable below vars to upload database backups in seperate buckets
## Enable below vars to upload database backups in seperate buckets
# cloud_storage_cassandrabackup_bucketname: ""
# cloud_storage_dpcassandrabackup_bucketname: ""
# cloud_storage_dppostgresbackup_bucketname: ""
Expand Down Expand Up @@ -301,4 +301,4 @@ processing_kafka_overriden_topics:
replication_factor: 1

# graylog
send_logs_to_graylog: true # filebeat agents will send logs to graylog instead of ES
send_logs_to_graylog: true # filebeat agents will send logs to graylog instead of ES

0 comments on commit 61ce63b

Please sign in to comment.