forked from cjmateos/spark-pi-example
-
Notifications
You must be signed in to change notification settings - Fork 0
/
.pipeline.yml.azure.template
72 lines (61 loc) · 2.9 KB
/
.pipeline.yml.azure.template
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
pipeline:
create_cluster:
image: banzaicloud/plugin-pipeline-client:0.3.0
cluster_name: "[[your-cluster-name]]"
cluster_provider: "azure"
azure_resource_group: "[[az_resource_group]]"
secrets: [plugin_endpoint, plugin_token]
install_monitoring:
image: banzaicloud/plugin-pipeline-client:0.3.0
deployment_name: "banzaicloud-stable/pipeline-cluster-monitor"
deployment_release_name: "monitor"
secrets: [plugin_endpoint, plugin_token]
install_spark_resources:
image: banzaicloud/plugin-pipeline-client:0.3.0
deployment_name: "banzaicloud-stable/spark"
deployment_release_name: "release-1"
deployment_values:
historyServer:
enabled: true
spark-hs:
app:
logDirectory: "wasb://[[your-blob-container]]@{{ .PLUGIN_AZURE_STORAGE_ACCOUNT }}.blob.core.windows.net/"
azureStorageAccountName: "{{ .PLUGIN_AZURE_STORAGE_ACCOUNT }}"
azureStorageAccessKey: "{{ .PLUGIN_AZURE_STORAGE_ACCOUNT_ACCESS_KEY }}"
secrets: [plugin_endpoint, plugin_token, plugin_azure_storage_account, plugin_azure_storage_account_access_key]
remote_checkout:
image: banzaicloud/plugin-k8s-proxy:0.3.0
original_image: plugins/git
remote_build:
image: banzaicloud/plugin-k8s-proxy:0.3.0
original_image: maven:3.5-jdk-8
original_commands:
- mvn clean package
run:
image: banzaicloud/plugin-k8s-proxy:0.3.0
original_image: banzaicloud/plugin-spark-submit-k8s:0.3.0
proxy_service_account: spark
spark_submit_options:
class: banzaicloud.SparkPi
kubernetes-namespace: default
spark_submit_configs:
spark.app.name: sparkpi
spark.local.dir: /tmp/spark-locals
spark.kubernetes.driver.docker.image: banzaicloud/spark-driver:v2.2.1-k8s-1.0.11
spark.kubernetes.executor.docker.image: banzaicloud/spark-executor:v2.2.1-k8s-1.0.11
spark.kubernetes.initcontainer.docker.image: banzaicloud/spark-init:v2.2.1-k8s-1.0.11
spark.dynamicAllocation.enabled: "true"
spark.kubernetes.resourceStagingServer.uri: http://spark-rss:10000
spark.kubernetes.resourceStagingServer.internal.uri: http://spark-rss:10000
spark.shuffle.service.enabled: "true"
spark.kubernetes.shuffle.namespace: default
spark.kubernetes.shuffle.labels: app=spark-shuffle-service,spark-version=2.2.0
spark.kubernetes.authenticate.driver.serviceAccountName: spark
spark.metrics.conf: /opt/spark/conf/metrics.properties
spark.eventLog.enabled: "true"
spark.eventLog.dir: "wasb://[[your-blob-container]]@{{ .PLUGIN_AZURE_STORAGE_ACCOUNT }}.blob.core.windows.net/"
spark.hadoop.fs.azure.account.key.{{ .PLUGIN_AZURE_STORAGE_ACCOUNT }}.blob.core.windows.net: "{{ .PLUGIN_AZURE_STORAGE_ACCOUNT_ACCESS_KEY }}"
spark_submit_app_args:
- target/spark-pi-1.0-SNAPSHOT.jar
- 1000
secrets: [plugin_azure_storage_account, plugin_azure_storage_account_access_key]