Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feature: Refactor Tiltfile for readability #293

Open
4 tasks
nrawal47 opened this issue Aug 23, 2024 · 0 comments
Open
4 tasks

Feature: Refactor Tiltfile for readability #293

nrawal47 opened this issue Aug 23, 2024 · 0 comments
Labels
enhancement New feature or request

Comments

@nrawal47
Copy link

Description

This feature will refactor the Tiltfile for readability. It is meant to keep the file DRY and accessible to devs new to Tilt. FYI I am an internal employee to BAH submitting a PR to a public-facing repo.

DOD

Acceptance criteria required to realize the requested feature

  • Tiltfile is as DRY as possible
  • Tiltfile retains original functionality
  • Tiltfile is more readable
  • In-line documentation helps new devs understand what is happening

Test Strategy/Script

How will this feature be verified? Run it and find out! tilt up; tilt down

References/Additional Context

The following is an example of a refactored Tiltfile that addresses the above:

allow_k8s_contexts('local')
docker_prune_settings(num_builds=1, keep_recent=1)

# Define the remote Docker Registry and aiSSEMBLE version
aissemble_version = '1.6.1'
docker_registry = '<your-docker-registry>'

# Define common base paths
deploy_apps_base_path = '<your-project>-deploy/src/main/resources/apps'
pipeline_values_base_path = '<your-project>-pipelines/<your-project>-object-data-delivery-pipeline/src/<project>_object_data_delivery_pipeline/resources/apps'

build_args = { 'DOCKER_BASELINE_REPO_ID': docker_registry,
               'VERSION_AISSEMBLE': aissemble_version}

# Tilt Extension for Deploying K8s Secrets
load('ext://secret', 'secret_from_dict')
k8s_yaml(secret_from_dict("aws-config", inputs = {
    'AWS_ACCESS_KEY_ID' : 'CHANGEME',
    'AWS_SECRET_ACCESS_KEY': 'CHANGEME',
    'AWS_SESSION_TOKEN': 'CHANGEME'
}))

# Kafka
kafka_chart = '%s/kafka-cluster' % deploy_apps_base_path 
kafka_values = '%s/values.yaml' % kafka_chart
kafka_dev_values = '%s/values-dev.yaml' % kafka_chart

yaml = helm(
    kafka_chart,
    values=[kafka_values,kafka_dev_values]
)

k8s_yaml(yaml)

# Hive Metastore DB
hive_db_chart = '%s/hive-metastore-db' % deploy_apps_base_path
hive_db_values = '%s/values.yaml' % hive_db_chart
hive_db_dev_values = '%s/values-dev.yaml' % hive_db_chart

yaml = helm(
   hive_db_chart,
   name='hive-metastore-db',
   values=[hive_db_values,hive_db_dev_values]
)

k8s_yaml(yaml)

# Spark Operator
spark_op_chart = '%s/spark-operator' % deploy_apps_base_path
spark_op_values = '%s/values.yaml' % spark_op_chart
spark_op_dev_values = '%s/values-dev.yaml' % spark_op_chart

yaml = helm(
   spark_op_chart,
   name='spark-operator',
   values=[spark_op_values,spark_op_dev_values]
)
k8s_yaml(yaml)

# Metadata
meta_chart = '%s/metadata' % deploy_apps_base_path
meta_values = '%s/values.yaml' % meta_chart
meta_dev_values = '%s/values-dev.yaml' % meta_chart

yaml = helm(
   meta_chart,
   name='metadata',
   values=[meta_values,meta_dev_values]
)

k8s_yaml(yaml)

## Docker Build for Policy Decision Point ##
docker_build(
    ref='<your-registry>/<your-project>/aissemble/<your-project>-policy-decision-point-docker',
    context='<your-project>-docker/<your-project>-policy-decision-point-docker',
    build_args=build_args,
    dockerfile='<your-project>-docker/<your-project>-policy-decision-point-docker/src/main/resources/docker/Dockerfile'
)

# Policy Decision Point
pdp_chart = '%s/policy-decision-point' % deploy_apps_base_path
pdp_values = '%s/values.yaml' % pdp_chart
pdp_dev_values = '%s/values-dev.yaml' % pdp_chart

yaml = helm(
   pdp_chart,
   name='policy-decision-point',
   values=[pdp_values,pdp_dev_values]
)

k8s_yaml(yaml)

# Spark Infrastructure
spark_infra_chart = '%s/spark-infrastructure' % deploy_apps_base_path
spark_infra_values = '%s/values.yaml' % spark_infra_chart
spark_infra_dev_values = '%s/values-dev.yaml' % spark_infra_chart

yaml = helm(
   spark_infra_chart,
   name='spark-infrastructure',
   values=[spark_infra_values,spark_infra_dev_values]
)

k8s_yaml(yaml)

# Hive Metastore Service
hive_svc_chart = '%s/hive-metastore-service' % deploy_apps_base_path
hive_svc_values = '%s/values.yaml' % hive_svc_chart
hive_svc_dev_values = '%s/values-dev.yaml' % hive_svc_chart

yaml = helm(
   hive_svc_chart,
   name='hive-metastore-service',
   values=[hive_svc_values,hive_svc_dev_values]
)

k8s_yaml(yaml)

# The service depends on the DB to be up and running
k8s_resource('hive-metastore-service', resource_deps=['hive-metastore-db'])

# S3-Localstack
s3_chart = '%s/s3-local' % deploy_apps_base_path
s3_values = '%s/values.yaml' % s3_chart
s3_dev_values = '%s/values-dev.yaml' % s3_chart

yaml = helm(
   s3_chart,
   name='s3-local',
   values=[s3_values,s3_dev_values]
)

k8s_yaml(yaml)

# Pipeline Invocation Service
pis_chart = '%s/pipeline-invocation-service' % deploy_apps_base_path
pis_values = '%s/values.yaml' % pis_chart
pis_dev_values = '%s/values-dev.yaml' % pis_chart

yaml = helm(
   pis_chart,
   name='pipeline-invocation-service',
   values=[pis_values,pis_dev_values]
)

k8s_yaml(yaml)

# Add deployment resources here

## Docker Build for Spark Worker Image ##
docker_build(
    ref='aissemble/<your-project>-spark-worker-docker',
    context='<your-project>-docker/<your-project>-spark-worker-docker',
    build_args=build_args,
    extra_tag='aissemble/<your-project>-spark-worker-docker:latest',
    dockerfile='<your-project>-docker/<your-project>-spark-worker-docker/src/main/resources/docker/Dockerfile'
)

# Spark Worker
k8s_yaml('%s/spark-worker-image/spark-worker-image.yaml' % deploy_apps_base_path)

# PROJECT Object Data Delivery Pipeline Compiler
pipelines_path = '<your-project>-pipelines/<your-project>-object-data-delivery-pipeline'
spark_worker_docker_path = '<your-project>-docker/<your-project>-spark-worker-docker/target/dockerbuild'

local_resource(
    name='compile-<your-project>-object-data-delivery-pipeline',
    cmd='cd %s && poetry run behave tests/features \
    && poetry build \
    && cd - \
    && cp -r %s/dist/* %s/<your-project>-object-data-delivery-pipeline \
    && cp %s/dist/requirements.txt %s/requirements/<your-project>-object-data-delivery-pipeline' \
    % (pipelines_path,pipelines_path,spark_worker_docker_path,pipelines_path,spark_worker_docker_path),
    deps=[pipelines_path],
    auto_init=True,
    ignore=['**/dist/']
)

# PROJECT Object Data Delivery Pipeline - `SparkApplication`
yaml = local('helm template aissemble-spark-application \
--version %s \
--values %s/<your-project>-object-data-delivery-pipeline-base-values.yaml,%s/<your-project>-object-data-delivery-pipeline-dev-values.yaml \
--repo <helm-registry>' \
% (aissemble_version,pipeline_values_base_path,pipeline_values_base_path))

k8s_yaml(yaml)

k8s_resource(
    '<your-project>-object-data-delivery-pipeline',
    port_forwards=[port_forward(4747, 4747, 'debug')],
    auto_init=False,
    trigger_mode=TRIGGER_MODE_MANUAL
)

# This tells Tilt where to find the image associated with any SparkApplications in our project.
# This is currently pointing to sparkApp.spec.image inside the base or dev values.yaml files.
k8s_kind('SparkApplication', image_json_path='{.spec.image}')

@nrawal47 nrawal47 added the enhancement New feature or request label Aug 23, 2024
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
enhancement New feature or request
Projects
None yet
Development

No branches or pull requests

1 participant