diff --git a/private_repo/ansible/inventory/dev/DataPipeline/common.yml b/private_repo/ansible/inventory/dev/DataPipeline/common.yml index 40f9469f21..1108941180 100644 --- a/private_repo/ansible/inventory/dev/DataPipeline/common.yml +++ b/private_repo/ansible/inventory/dev/DataPipeline/common.yml @@ -1,11 +1,11 @@ # ------------------------------------------------------------------------------------------------------------ # # Mandatorty variables - DO NOT LEAVE ANYTHING BLANK # -cloud_service_provider: "" # Your cloud service provider name. Supported values are aws, azure, gcloud -domain_name: "" # your domain name like example.com +cloud_service_provider: "" # Your cloud service provider name. Supported values are aws, azure, gcloud +domain_name: "" # your domain name like example.com # docker hub details -dockerhub: "change.docker.url" # docker hub username or url incase of private registry -private_ingressgateway_ip: "" # your private kubernetes load balancer ip -search_lb_ip: # search service Load balancer IP +dockerhub: "change.docker.url" # docker hub username or url incase of private registry +private_ingressgateway_ip: "" # your private kubernetes load balancer ip +search_lb_ip: # search service Load balancer IP # Cloud Service Provider Variables # If cloud_service_provider is AWS then update with access key as value @@ -16,7 +16,7 @@ search_lb_ip: # search service Load balancer IP # Example: cloud_public_storage_accountname: "cloudstorage-gcp-test.iam.gserviceaccount.com" # If cloud_service_provider is AZURE then update with stoage account name # Example: cloud_public_storage_accountname: "azurestotageaccount" - +cloud_storage_telemetry_type: "" cloud_public_storage_accountname: "" # If cloud_service_provider is AWS/OCI then update with region # Example: cloud_public_storage_region: us-east-1 @@ -30,7 +30,7 @@ cloud_public_storage_namespace: "" # Create object storage for each below mentioned variables and update accordingly # If cloud_service_provider is AWS/OCI update with bucket name # If cloud_service_provider is gcloud(GCP) update with bucket name -# If cloud_service_provider is AZURE update with container name +# If cloud_service_provider is AZURE update with container name # Example: cloud_storage_certqr_bucketname: "certqr-storage" cloud_storage_certqr_bucketname: "" # This storage contains chatbot related data @@ -87,7 +87,7 @@ cloud_storage_label_bucketname: "" cloud_storage_certservice_bucketname: "" # This storage contains UCI services data # Example: cloud_storage_uci_bucketname: "uci-storage" -cloud_storage_uci_bucketname: "" +cloud_storage_uci_bucketname: "" # This storage contains artifacts data # Example: cloud_storage_artifacts_bucketname: "artifact-storage" cloud_storage_artifacts_bucketname: "" @@ -107,8 +107,8 @@ cloud_storage_url: "https://{{ cloud_public_storage_accountname }}.blob.core.win # ------------------------------------------------------------------------------------------------------------ # # Optional variables - Can be left blank if you dont plan to use the intended features -env: dev # some name like dev, preprod etc -proto: https # http or https, preferably https +env: dev # some name like dev, preprod etc +proto: https # http or https, preferably https # Azure media streaming service stream_base_url: "" # Media service streaming url @@ -120,13 +120,13 @@ media_service_azure_token_client_key: "" media_service_azure_token_client_secret: "" # data exhaust alerts -data_exhaust_webhook_url: "slack.com" # Slack webhook url -data_exhaust_Channel: "slack.com" # Slack channel for data products alerts -secor_alerts_slack_channel: "slack.com" # Slack channel name for secor alerts - Example #all_alerts_channel +data_exhaust_webhook_url: "slack.com" # Slack webhook url +data_exhaust_Channel: "slack.com" # Slack channel for data products alerts +secor_alerts_slack_channel: "slack.com" # Slack channel name for secor alerts - Example #all_alerts_channel # ------------------------------------------------------------------------------------------------------------ # # Sensible defaults which you need not change - But if you would like to change, you are free to do so -data_exhaust_name: "datapipeline-monitoring" # Slack notification name +data_exhaust_name: "datapipeline-monitoring" # Slack notification name postgres: db_url: "{{ groups['postgres'][0] }}" db_username: analytics @@ -138,9 +138,9 @@ postgres: db_admin_password: "{{dp_vault_pgdb_admin_password}}" druid_postgres_user: druid # Do not change this -imagepullsecrets: "{{env}}registrysecret" # kubernetes imagePullSecrets -kubeconfig_path: /var/lib/jenkins/secrets/k8s.yaml # kubeconfig file path on jenkins -core_kubeconfig_path: "{{ kubeconfig_path }}" # kubeconfig file path on jenkins for core kube cluster, change this if you use separate kube cluster for core and KP + DP +imagepullsecrets: "{{env}}registrysecret" # kubernetes imagePullSecrets +kubeconfig_path: /var/lib/jenkins/secrets/k8s.yaml # kubeconfig file path on jenkins +core_kubeconfig_path: "{{ kubeconfig_path }}" # kubeconfig file path on jenkins for core kube cluster, change this if you use separate kube cluster for core and KP + DP # provide the s3 compatible endpoint # for AWS @@ -151,22 +151,22 @@ core_kubeconfig_path: "{{ kubeconfig_path }}" # kubeconfig file pat cloud_public_storage_endpoint: "" # Update below vars if seperate object storage is required -cloud_private_storage_accountname: "{{ cloud_public_storage_accountname }}" -cloud_private_storage_endpoint: "{{ cloud_public_storage_endpoint }}" -cloud_private_storage_region: "{{ cloud_public_storage_region }}" +cloud_private_storage_accountname: "{{ cloud_public_storage_accountname }}" +cloud_private_storage_endpoint: "{{ cloud_public_storage_endpoint }}" +cloud_private_storage_region: "{{ cloud_public_storage_region }}" cloud_private_storage_project: "{{ cloud_public_storage_project }}" -cloud_management_storage_accountname: "{{ cloud_public_storage_accountname }}" -cloud_management_storage_endpoint: "{{ cloud_public_storage_endpoint }}" -cloud_management_storage_region: "{{ cloud_public_storage_region }}" +cloud_management_storage_accountname: "{{ cloud_public_storage_accountname }}" +cloud_management_storage_endpoint: "{{ cloud_public_storage_endpoint }}" +cloud_management_storage_region: "{{ cloud_public_storage_region }}" cloud_management_storage_project: "{{ cloud_public_storage_project }}" -cloud_artifact_storage_accountname: "{{ cloud_public_storage_accountname }}" -cloud_artifact_storage_endpoint: "{{ cloud_public_storage_endpoint }}" -cloud_artifact_storage_region: "{{ cloud_public_storage_region }}" +cloud_artifact_storage_accountname: "{{ cloud_public_storage_accountname }}" +cloud_artifact_storage_endpoint: "{{ cloud_public_storage_endpoint }}" +cloud_artifact_storage_region: "{{ cloud_public_storage_region }}" cloud_artifact_storage_project: "{{ cloud_public_storage_project }}" -## Enable below vars to upload database backups in seperate buckets +## Enable below vars to upload database backups in seperate buckets # cloud_storage_cassandrabackup_bucketname: "" # cloud_storage_dpcassandrabackup_bucketname: "" # cloud_storage_dppostgresbackup_bucketname: "" @@ -301,4 +301,4 @@ processing_kafka_overriden_topics: replication_factor: 1 # graylog -send_logs_to_graylog: true # filebeat agents will send logs to graylog instead of ES +send_logs_to_graylog: true # filebeat agents will send logs to graylog instead of ES