Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

added cloud_storage_telemetry_type variable #3920

Closed
Closed
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
58 changes: 30 additions & 28 deletions private_repo/ansible/inventory/dev/DataPipeline/common.yml
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
# ------------------------------------------------------------------------------------------------------------ #
# Mandatorty variables - DO NOT LEAVE ANYTHING BLANK #
cloud_service_provider: "" # Your cloud service provider name. Supported values are aws, azure, gcloud
domain_name: "" # your domain name like example.com
cloud_service_provider: "" # Your cloud service provider name. Supported values are aws, azure, gcloud
domain_name: "" # your domain name like example.com
# docker hub details
dockerhub: "change.docker.url" # docker hub username or url incase of private registry
private_ingressgateway_ip: "" # your private kubernetes load balancer ip
search_lb_ip: # search service Load balancer IP
dockerhub: "change.docker.url" # docker hub username or url incase of private registry
private_ingressgateway_ip: "" # your private kubernetes load balancer ip
search_lb_ip: # search service Load balancer IP

# Cloud Service Provider Variables
# If cloud_service_provider is AWS then update with access key as value
Expand All @@ -16,7 +16,7 @@ search_lb_ip: # search service Load balancer IP
# Example: cloud_public_storage_accountname: "cloudstorage-gcp-test.iam.gserviceaccount.com"
# If cloud_service_provider is AZURE then update with stoage account name
# Example: cloud_public_storage_accountname: "azurestotageaccount"

cloud_storage_telemetry_type: ""
cloud_public_storage_accountname: ""
# If cloud_service_provider is AWS/OCI then update with region
# Example: cloud_public_storage_region: us-east-1
Expand All @@ -30,7 +30,7 @@ cloud_public_storage_namespace: ""
# Create object storage for each below mentioned variables and update accordingly
# If cloud_service_provider is AWS/OCI update with bucket name
# If cloud_service_provider is gcloud(GCP) update with bucket name
# If cloud_service_provider is AZURE update with container name
# If cloud_service_provider is AZURE update with container name
# Example: cloud_storage_certqr_bucketname: "certqr-storage"
cloud_storage_certqr_bucketname: ""
# This storage contains chatbot related data
Expand Down Expand Up @@ -87,14 +87,16 @@ cloud_storage_label_bucketname: ""
cloud_storage_certservice_bucketname: ""
# This storage contains UCI services data
# Example: cloud_storage_uci_bucketname: "uci-storage"
cloud_storage_uci_bucketname: ""
cloud_storage_uci_bucketname: ""
# This storage contains artifacts data
# Example: cloud_storage_artifacts_bucketname: "artifact-storage"
cloud_storage_artifacts_bucketname: ""
# This storage contains backups data
# Example: cloud_storage_management_bucketname: "management-storage"
cloud_storage_management_bucketname: ""

cloud_storage_report_verfication_bucketname: ""

# Uncomment the variable based on your cloud provider (as a default we have kept Azure variable uncommented)
# GCP
# cloud_storage_url: https://storage.googleapis.com
Expand All @@ -107,8 +109,8 @@ cloud_storage_url: "https://{{ cloud_public_storage_accountname }}.blob.core.win

# ------------------------------------------------------------------------------------------------------------ #
# Optional variables - Can be left blank if you dont plan to use the intended features
env: dev # some name like dev, preprod etc
proto: https # http or https, preferably https
env: dev # some name like dev, preprod etc
proto: https # http or https, preferably https

# Azure media streaming service
stream_base_url: "" # Media service streaming url
Expand All @@ -120,13 +122,13 @@ media_service_azure_token_client_key: ""
media_service_azure_token_client_secret: ""

# data exhaust alerts
data_exhaust_webhook_url: "slack.com" # Slack webhook url
data_exhaust_Channel: "slack.com" # Slack channel for data products alerts
secor_alerts_slack_channel: "slack.com" # Slack channel name for secor alerts - Example #all_alerts_channel
data_exhaust_webhook_url: "slack.com" # Slack webhook url
data_exhaust_Channel: "slack.com" # Slack channel for data products alerts
secor_alerts_slack_channel: "slack.com" # Slack channel name for secor alerts - Example #all_alerts_channel

# ------------------------------------------------------------------------------------------------------------ #
# Sensible defaults which you need not change - But if you would like to change, you are free to do so
data_exhaust_name: "datapipeline-monitoring" # Slack notification name
data_exhaust_name: "datapipeline-monitoring" # Slack notification name
postgres:
db_url: "{{ groups['postgres'][0] }}"
db_username: analytics
Expand All @@ -138,9 +140,9 @@ postgres:
db_admin_password: "{{dp_vault_pgdb_admin_password}}"

druid_postgres_user: druid # Do not change this
imagepullsecrets: "{{env}}registrysecret" # kubernetes imagePullSecrets
kubeconfig_path: /var/lib/jenkins/secrets/k8s.yaml # kubeconfig file path on jenkins
core_kubeconfig_path: "{{ kubeconfig_path }}" # kubeconfig file path on jenkins for core kube cluster, change this if you use separate kube cluster for core and KP + DP
imagepullsecrets: "{{env}}registrysecret" # kubernetes imagePullSecrets
kubeconfig_path: /var/lib/jenkins/secrets/k8s.yaml # kubeconfig file path on jenkins
core_kubeconfig_path: "{{ kubeconfig_path }}" # kubeconfig file path on jenkins for core kube cluster, change this if you use separate kube cluster for core and KP + DP

# provide the s3 compatible endpoint
# for AWS
Expand All @@ -151,22 +153,22 @@ core_kubeconfig_path: "{{ kubeconfig_path }}" # kubeconfig file pat
cloud_public_storage_endpoint: ""

# Update below vars if seperate object storage is required
cloud_private_storage_accountname: "{{ cloud_public_storage_accountname }}"
cloud_private_storage_endpoint: "{{ cloud_public_storage_endpoint }}"
cloud_private_storage_region: "{{ cloud_public_storage_region }}"
cloud_private_storage_accountname: "{{ cloud_public_storage_accountname }}"
cloud_private_storage_endpoint: "{{ cloud_public_storage_endpoint }}"
cloud_private_storage_region: "{{ cloud_public_storage_region }}"
cloud_private_storage_project: "{{ cloud_public_storage_project }}"

cloud_management_storage_accountname: "{{ cloud_public_storage_accountname }}"
cloud_management_storage_endpoint: "{{ cloud_public_storage_endpoint }}"
cloud_management_storage_region: "{{ cloud_public_storage_region }}"
cloud_management_storage_accountname: "{{ cloud_public_storage_accountname }}"
cloud_management_storage_endpoint: "{{ cloud_public_storage_endpoint }}"
cloud_management_storage_region: "{{ cloud_public_storage_region }}"
cloud_management_storage_project: "{{ cloud_public_storage_project }}"

cloud_artifact_storage_accountname: "{{ cloud_public_storage_accountname }}"
cloud_artifact_storage_endpoint: "{{ cloud_public_storage_endpoint }}"
cloud_artifact_storage_region: "{{ cloud_public_storage_region }}"
cloud_artifact_storage_accountname: "{{ cloud_public_storage_accountname }}"
cloud_artifact_storage_endpoint: "{{ cloud_public_storage_endpoint }}"
cloud_artifact_storage_region: "{{ cloud_public_storage_region }}"
cloud_artifact_storage_project: "{{ cloud_public_storage_project }}"

## Enable below vars to upload database backups in seperate buckets
## Enable below vars to upload database backups in seperate buckets
# cloud_storage_cassandrabackup_bucketname: ""
# cloud_storage_dpcassandrabackup_bucketname: ""
# cloud_storage_dppostgresbackup_bucketname: ""
Expand Down Expand Up @@ -301,4 +303,4 @@ processing_kafka_overriden_topics:
replication_factor: 1

# graylog
send_logs_to_graylog: true # filebeat agents will send logs to graylog instead of ES
send_logs_to_graylog: true # filebeat agents will send logs to graylog instead of ES