Skip to content

Checkout custom actions from the event sha #73

Checkout custom actions from the event sha

Checkout custom actions from the event sha #73

Workflow file for this run

# DO NOT EDIT MANUALLY - This file is auto-generated from `/flowzone.yml`
name: Flowzone
on:
workflow_call:
secrets:
GH_APP_PRIVATE_KEY:
description: GitHub App to generate ephemeral access tokens
required: false
FLOWZONE_TOKEN:
description: .. or Personal Access Token (PAT) with admin/owner permissions in the org.
required: false
NPM_TOKEN:
description: The npm auth token to use for publishing
required: false
DOCKERHUB_USER:
description: Username to publish to the Docker Hub container registry
required: false
DOCKER_REGISTRY_USER:
description: Deprecated, use DOCKERHUB_USER instead
required: false
DOCKERHUB_TOKEN:
description: A personal access token to publish to the Docker Hub container registry
required: false
DOCKER_REGISTRY_PASS:
description: Deprecated, use DOCKERHUB_TOKEN instead
required: false
BALENA_API_KEY:
description: API key for pushing releases to balena applications
required: false
BALENA_API_KEY_PUSH:
description: Deprecated, use BALENA_API_KEY instead
required: false
CARGO_REGISTRY_TOKEN:
description: A personal access token to publish to a cargo registry
required: false
COMPOSE_VARS:
description: Optional base64 encoded docker-compose `.env` file for testing Docker images
required: false
CF_ACCOUNT_ID:
description: Cloudflare account ID
required: false
CF_API_TOKEN:
description: Cloudflare API token with limited access for Pages projects
required: false
CUSTOM_JOB_SECRET_1:
description: Optional secret for using with custom jobs
required: false
CUSTOM_JOB_SECRET_2:
description: Optional secret for using with custom jobs
required: false
CUSTOM_JOB_SECRET_3:
description: Optional secret for using with custom jobs
required: false
OPENAI_API_KEY:
description: OpenAI API Key for GPT pull request reviews
required: false
OPENAI_ORG_KEY:
description: OpenAI Organization ID for GPT pull request reviews
required: false
inputs:
aws_region:
description: AWS region with GitHub OIDC provider IAM configuration
type: string
required: false
default: ${{ vars.AWS_REGION || '' }}
aws_iam_role:
description: AWS IAM role ARN to assume with GitHub OIDC provider
type: string
required: false
default: ${{ vars.AWS_IAM_ROLE || '' }}
cloudformation_templates:
description: |
AWS CloudFormation templates to deploy (e.g.)
```
{
"stacks": [
{
"name": "foo",
"template": "aws/bar.yaml",
"tags": [
"Name=foo",
"Environment=${FOO}"
],
"capabilities": [
"CAPABILITY_IAM",
"CAPABILITY_NAMED_IAM"
]
},
...
]
}
```
* assumes `aws/bar.yaml` exists.
* `${ENVVARS}` injected at runtime from `vars` and `secrets` contexts
type: string
required: false
default: ""
terraform_projects:
description: |
Terraform projects to deploy (e.g.)
```
{
"projects": [
{
"main_tf": "terraform/foo"
}
]
}
```
* assumes `terraform/foo/main.tf` exists
* GitHub `vars` and `secrets` contexts injected into TF plan/apply steps
type: string
required: false
default: ""
app_id:
description: GitHub App id to impersonate
type: string
required: false
default: ${{ vars.APP_ID || '291899' }}
installation_id:
description: GitHub App installation id
type: string
required: false
default: ${{ vars.INSTALLATION_ID || '34040165' }}
token_scope:
description: Ephemeral token scope(s)
type: string
required: false
default: |-
{
"administration": "write",
"contents": "write",
"metadata": "read",
"packages": "write",
"pages": "write",
"pull_requests": "read"
}
jobs_timeout_minutes:
description: Timeout for the job(s).
type: number
required: false
default: 360
working_directory:
description: GitHub actions working directory
type: string
required: false
default: .
docker_images:
description: Comma-delimited string of Docker images (without tags) to publish (skipped if empty)
type: string
required: false
default: ""
bake_targets:
description: Comma-delimited string of Docker buildx bake targets to publish (skipped if empty)
type: string
required: false
default: default
docker_invert_tags:
description: Invert the tags for the Docker images (e.g. `{tag}-{variant}` becomes `{variant}-{tag}`)
type: boolean
required: false
default: false
docker_publish_platform_tags:
description: Publish platform-specific tags in addition to multi-arch manifests (e.g. `product-os/flowzone:latest-amd64`)
type: boolean
required: false
default: false
balena_environment:
description: balenaCloud environment
type: string
required: false
default: balena-cloud.com
balena_slugs:
description: Comma-delimited string of balenaCloud apps, fleets, or blocks to deploy (skipped if empty)
type: string
required: false
default: ""
cargo_targets:
description: Comma-delimited string of Rust stable targets to publish (skipped if empty)
type: string
required: false
default: |
aarch64-unknown-linux-gnu,
armv7-unknown-linux-gnueabihf,
arm-unknown-linux-gnueabihf,
x86_64-unknown-linux-gnu,
i686-unknown-linux-gnu
rust_toolchain:
description: Version specifier (e.g. 1.65, stable, nigthly) for the toolchain to use when building Rust sources
type: string
required: false
default: stable
rust_binaries:
description: Set to true to publish Rust binary release artifacts to GitHub
type: boolean
required: false
default: false
pseudo_terminal:
description: Set to true to enable terminal emulation for test steps
type: boolean
required: false
default: false
repo_config:
description: Set to true to standardise repository settings after a successful run
type: boolean
required: false
default: false
repo_allow_forking:
description: Allow forking of an organization repository
type: boolean
required: false
default: true
repo_default_branch:
description: Set the default branch name for the repository
type: string
required: false
default: master
repo_delete_branch_on_merge:
description: Delete head branch when pull requests are merged
type: boolean
required: false
default: true
repo_allow_update_branch:
description: Always suggest updating pull request branches
type: boolean
required: false
default: true
repo_description:
description: Description of the repository
type: string
required: false
default: ""
repo_homepage:
description: Repository home page URL
type: string
required: false
default: ""
repo_enable_auto_merge:
description: Enable auto-merge functionality
type: boolean
required: false
default: true
repo_enable_issues:
description: Enable issues in the repository
type: boolean
required: false
default: true
repo_enable_merge_commit:
description: Enable merging pull requests via merge commit
type: boolean
required: false
default: true
repo_enable_projects:
description: Enable projects in the repository
type: boolean
required: false
default: false
repo_enable_rebase_merge:
description: Enable merging pull requests via rebase
type: boolean
required: false
default: false
repo_enable_squash_merge:
description: Enable merging pull requests via squashed commit
type: boolean
required: false
default: false
repo_enable_wiki:
description: Enable wiki in the repository
type: boolean
required: false
default: false
repo_visibility:
description: Change the visibility of the repository to {public,private,internal}
type: string
required: false
default: default
disable_versioning:
description: Set to true to disable automatic versioning
type: boolean
required: false
default: false
job_name:
description: The name of the job, necessary for branch protection if not using the default of 'Flowzone'
type: string
required: false
default: Flowzone
checkout_fetch_depth:
description: Configures the depth of the actions/checkout git fetch.
type: number
required: false
default: 1
tests_run_on:
description: Deprecated, use 'custom_runs_on' input instead.
type: string
required: false
default: ""
runs_on:
description: JSON array of runner label strings for generic jobs.
type: string
required: false
default: |
[
"ubuntu-22.04"
]
custom_runs_on:
description: JSON 2-dimensional matrix of runner label strings for custom jobs.
type: string
required: false
default: |
[
["ubuntu-22.04"]
]
docker_runs_on:
description: JSON key-value pairs mapping platforms to arrays of runner labels. Unlisted platforms will use `runs_on`.
type: string
required: false
default: "{}"
cloudflare_website:
description: Setting this to your existing CF pages project name will generate and deploy a website. Skipped if empty.
type: string
required: false
default: ""
docusaurus_website:
description: Set to false to disable building a docusaurus website. If false the script `npm run deploy-docs` will be run if it exists.
type: boolean
required: false
default: true
github_prerelease:
description: Finalize releases on merge.
type: boolean
required: false
default: false
restrict_custom_actions:
description: Do not execute custom actions for external contributors. Only remove this restriction if custom actions have been vetted as secure.
type: boolean
required: false
default: true
custom_test_matrix:
description: Comma-delimited string of values that will be passed to the custom test action.
type: string
required: false
default: ""
custom_publish_matrix:
description: Comma-delimited string of values that will be passed to the custom publish action.
type: string
required: false
default: ""
custom_finalize_matrix:
description: Comma-delimited string of values that will be passed to the custom finalize action.
type: string
required: false
default: ""
protect_branch:
description: Set to false to disable updating branch protection rules after a successful run.
type: boolean
required: false
default: true
required_approving_review_count:
description: Count of GitHub approved reviews required for Pull Requests to be merged. Set to 0 if using palantir/policy-bot for PR merge conditions.
type: string
required: false
default: "0"
required_status_checks:
description: JSON array of status checks that must pass before a Pull Requests can be merged. Skipped if `protect_branch` is false.
type: string
required: false
default: |
[
"Flowzone / All tests",
"Flowzone / All jobs",
"policy-bot: ${{ github.event.repository.default_branch }}"
]
toggle_auto_merge:
description: Set to false to disable toggling auto-merge on PRs.
type: boolean
required: false
default: true
enable_gpt_review:
description: Set to false to disable GPT pull request review generation.
type: boolean
required: false
default: false
ok_to_test_label:
description: Require a label before running checks for external contributions (forks).
type: string
required: false
default: ok-to-test
concurrency:
group: ${{ github.workflow }}-${{ github.event.number || github.ref }}
cancel-in-progress: ${{ github.event.action == 'synchronize' }}
env:
NPM_REGISTRY: https://registry.npmjs.org
CARGO_REGISTRY: crates.io
jobs:
event_types:
name: Event Types
runs-on: ${{ fromJSON(inputs.runs_on) }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
if: |
(
(
github.event_name == 'pull_request' ||
github.event_name == 'pull_request_target'
) && (
github.event.action == 'opened' ||
github.event.action == 'synchronize' ||
github.event.action == 'closed'
)
) || (
github.event_name == 'push' &&
startsWith(github.ref, 'refs/tags/')
)
strategy:
fail-fast: true
matrix:
include:
- event_name: ${{ github.event_name }}
event_action: ${{ github.event.action }}
steps:
- name: Reject external pull_request events on pull_request
if: |
github.event_name == 'pull_request' &&
github.event.pull_request.head.repo.full_name != github.repository
run: |
echo "::error::External workflows can not be used with `pull_request` events. \
Please contact a member of the organization for assistance."
exit 1
- name: Reject internal pull_request events on pull_request_target
if: |
github.event_name == 'pull_request_target' &&
github.event.pull_request.head.repo.full_name == github.repository
run: |
echo "::error::Internal workflows should not be used with `pull_request_target` events. \
Please consult the documentation for more information."
exit 1
- name: Reject unapproved pull_request_target events
if: |
inputs.ok_to_test_label != '' &&
github.event.pull_request.state == 'open' &&
github.event.pull_request.head.repo.full_name != github.repository
env:
GH_DEBUG: "true"
GH_PAGER: cat
GH_PROMPT_DISABLED: "true"
GH_REPO: ${{ github.repository }}
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
pr_labels="$(gh pr view ${{ github.event.pull_request.number }} --json labels -q .labels[].name)"
for label in "${pr_labels}"
do
if [[ "$label" =~ "${{ inputs.ok_to_test_label }}" ]]
then
gh pr edit ${{ github.event.pull_request.number }} --remove-label "${{ inputs.ok_to_test_label }}"
exit 0
fi
done
echo "::error::External contributions must be approved with the label '${{ inputs.ok_to_test_label }}'. \
Please contact a member of the organization for assistance."
exit 1
- name: Reject missing secrets
run: |
if [ -z '${{ secrets.FLOWZONE_TOKEN }}${{ secrets.GH_APP_PRIVATE_KEY }}' ]
then
echo '::error::Must specify either GH_APP_PRIVATE_KEY or FLOWZONE_TOKEN.'
false
fi
- name: Warn if GPT Review is skipped
if: |
github.event.pull_request.state == 'open' &&
github.event.repository.private &&
inputs.enable_gpt_review
run: echo "::warning::GPT Review is not supported for private repositories!"
- name: Log GitHub context
env:
GITHUB_CONTEXT: ${{ toJSON(github) }}
run: echo "${GITHUB_CONTEXT}" || true
versioned_source:
name: Versioned source
runs-on: ${{ fromJSON(inputs.runs_on) }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
needs:
- event_types
if: |
github.event.action != 'closed' || github.event.pull_request.merged == true
defaults:
run:
working-directory: .
shell: bash --noprofile --norc -eo pipefail -x {0}
outputs:
tag: ${{ steps.versionist.outputs.tag || steps.git_describe.outputs.tag }}
semver: ${{ steps.versionist.outputs.semver || steps.git_describe.outputs.semver }}
sha: ${{ steps.create_tag.outputs.sha || steps.git_describe.outputs.sha }}
commit_sha: ${{ steps.create_commit.outputs.sha }}
tag_sha: ${{ steps.create_tag.outputs.sha }}
env:
GH_DEBUG: "true"
GH_PAGER: cat
GH_PROMPT_DISABLED: "true"
GH_REPO: ${{ github.repository }}
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- name: Generate GitHub App installation token
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a
continue-on-error: true
id: gh_app_token
with:
app_id: ${{ inputs.app_id }}
installation_retrieval_mode: id
installation_retrieval_payload: ${{ inputs.installation_id }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
permissions: |-
{
"administration": "write",
"contents": "write",
"metadata": "read",
"pull_requests": "read"
}
- name: Checkout merge ref
if: github.event.pull_request.state == 'open'
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
fetch-depth: 0
submodules: recursive
ref: refs/pull/${{ github.event.number }}/merge
token: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
- name: Checkout event sha
if: github.event.pull_request.state != 'open'
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
fetch-depth: 0
submodules: recursive
ref: ${{ github.sha }}
token: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
- name: Reject merge commits
if: github.event.pull_request.state == 'open'
run: |
if [ "$(git cat-file -p ${{ github.event.pull_request.head.sha || github.event.head_commit.id }} | grep '^parent ' | wc -l)" -gt 1 ]
then
echo "::error::Latest commit appears to be a merge, which is currently unsupported. Try a rebase instead."
exit 1
fi
- name: Describe git state
id: git_describe
run: |
tag="$(git tag --points-at HEAD | tail -n1)"
echo "tag=${tag}" >> $GITHUB_OUTPUT
echo "semver=$(npx -q -y -- semver -c -l "${tag}")" >> $GITHUB_OUTPUT
echo "describe=$(git describe --tags --always --dirty | cat)" >> $GITHUB_OUTPUT
echo "sha=$(git rev-parse HEAD)" >> $GITHUB_OUTPUT
- name: Setup Node.js
uses: actions/setup-node@1a4442cacd436585916779262731d5b162bc6ec7
with:
node-version: 18.x
- name: Install versionist
if: inputs.disable_versioning != true
run: |
npm install -g balena-versionist@~0.14.13 versionist@^7.0.3
npm ls -g balena-versionist
npm ls -g versionist
- name: Generate changelog
if: inputs.disable_versioning != true
env:
GH_TOKEN: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
run: |
if [ ! -f .versionbot/CHANGELOG.yml ]
then
$(npm root -g)/versionist/scripts/generate-changelog.sh .
fi
- name: Run versionist
if: inputs.disable_versioning != true
id: versionist
env:
GITHUB_TOKEN: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
run: |
out="$(balena-versionist 2>&1)"
error="$(awk '/Error:/{getline; print}' <<< "${out}")"
case ${error} in
"") # no error
;;
'No such file or directory'*'/package.json')
echo "::error file=.versionbot/CHANGELOG.yml,line=1::Versionist expects a package.json if repo.yml does not provide a 'type' for the project"
;;
*)
echo "::error::${error}"
exit 1
;;
esac
git status --porcelain
versions=()
[ -f .versionbot/CHANGELOG.yml ] && versions+=($(yq e '.[0].version' .versionbot/CHANGELOG.yml))
semver="${versions[0]}"
echo "semver=${semver}" >> $GITHUB_OUTPUT
echo "tag=v${semver}" >> $GITHUB_OUTPUT
- name: Create blobs and tree objects
if: inputs.disable_versioning != true
id: create_tree
shell: bash
env:
GH_TOKEN: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
PARENT_COMMIT_SHA: ${{ steps.git_describe.outputs.sha }}
run: |
# Temporary array to hold our new tree objects
declare -a tree_array
# Use git status to check for new and modified files
modified_files=$(git diff --name-only ; git ls-files --others --exclude-standard)
# Extract changes
for file in $modified_files; do
echo "Creating blob of file $file..."
base64 -w0 "$file" > content.base64
response=$(gh api -X POST /repos/$GH_REPO/git/blobs \
-F '[email protected]' \
-F 'encoding=base64')
rm content.base64
echo "$response" | jq .
blob_sha=$(echo $response | jq -r .sha)
# Add blob to our tree
tree_array+=("{\"path\":\"$file\",\"mode\":\"100644\",\"type\":\"blob\",\"sha\":\"$blob_sha\"}")
done
# Get the SHA of the tree the parent commit points to
base_tree_sha=$(git show -s --format=%T $PARENT_COMMIT_SHA)
# Create JSON array for tree creation
tree_json=$(printf ",%s" "${tree_array[@]}")
tree_json=${tree_json:1}
tree_json="[$tree_json]"
tree_json="{\"tree\": $tree_json, \"base_tree\": \"$base_tree_sha\"}"
echo "Creating tree..."
echo "$tree_json" | jq .
response=$(echo $tree_json | gh api -X POST /repos/$GH_REPO/git/trees --input -)
echo "$response" | jq .
echo "sha=$(echo $response | jq -r .sha)" >> $GITHUB_OUTPUT
echo "json=$(echo $response | jq -c .)" >> $GITHUB_OUTPUT
- name: Create commit object
if: inputs.disable_versioning != true
id: create_commit
env:
GH_TOKEN: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
MESSAGE: ${{ steps.versionist.outputs.tag }}
PARENT_COMMIT_SHA: ${{ steps.git_describe.outputs.sha }}
run: |
response=$(gh api -X POST /repos/$GH_REPO/git/commits \
-F "message=$MESSAGE" \
-F "tree=${{ steps.create_tree.outputs.sha }}" \
-F "parents[]=$PARENT_COMMIT_SHA")
echo "$response" | jq .
echo "sha=$(echo $response | jq -r .sha)" >> $GITHUB_OUTPUT
echo "json=$(echo $response | jq -c .)" >> $GITHUB_OUTPUT
- name: Create tag object
if: inputs.disable_versioning != true
id: create_tag
env:
GH_TOKEN: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
TAG: ${{ steps.versionist.outputs.tag }}
MESSAGE: ${{ steps.versionist.outputs.tag }}
run: |
response=$(gh api -X POST repos/$GH_REPO/git/tags \
-F "tag=${TAG}" \
-F "message=${MESSAGE}" \
-F "object=${{ steps.create_commit.outputs.sha }}" \
-F "type=commit")
echo "$response" | jq .
echo "sha=$(echo $response | jq -r .sha)" >> $GITHUB_OUTPUT
echo "json=$(echo $response | jq -c .)" >> $GITHUB_OUTPUT
- name: Update branch reference
if: github.event.pull_request.merged == true && steps.create_commit.outputs.sha != ''
env:
GH_TOKEN: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
run: |
gh api \
-X PATCH \
-H "Accept: application/vnd.github+json" \
-H "X-GitHub-Api-Version: 2022-11-28" \
/repos/$GH_REPO/git/refs/heads/${{ github.base_ref }} \
-f sha='${{ steps.create_commit.outputs.sha }}' \
-F force=true \
--include
- name: Create tag reference
if: github.event.pull_request.merged == true && steps.create_tag.outputs.sha != ''
env:
GH_TOKEN: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
run: |
gh api \
-X POST \
-H "Accept: application/vnd.github+json" \
-H "X-GitHub-Api-Version: 2022-11-28" \
/repos/$GH_REPO/git/refs \
-f ref='refs/tags/${{ steps.versionist.outputs.tag }}' \
-f sha='${{ steps.create_tag.outputs.sha }}' \
--include
is_npm:
name: Is npm
runs-on: ${{ fromJSON(inputs.runs_on) }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
needs:
- versioned_source
defaults:
run:
working-directory: ${{ inputs.working_directory }}
shell: bash --noprofile --norc -eo pipefail -x {0}
outputs:
npm: ${{ steps.npm.outputs.enabled }}
has_npm_lockfile: ${{ steps.npm_lock.outputs.has_npm_lockfile }}
npm_private: ${{ steps.npm.outputs.private }}
npm_docs: ${{ steps.npm.outputs.docs }}
node_versions: ${{ steps.node_versions.outputs.json }}
npm_access: ${{ steps.access.outputs.access }}
steps:
- name: Generate GitHub App installation token
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a
continue-on-error: true
id: gh_app_token
with:
app_id: ${{ inputs.app_id }}
installation_retrieval_mode: id
installation_retrieval_payload: ${{ inputs.installation_id }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
permissions: |-
{
"contents": "read",
"metadata": "read"
}
- name: Checkout versioned sha
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
fetch-depth: 0
submodules: recursive
ref: ${{ needs.versioned_source.outputs.sha }}
token: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
- name: Check for package.json
id: npm
run: |
if test -f "package.json"
then
echo "found package.json"
echo "enabled=true" >> $GITHUB_OUTPUT
echo "private=$(jq -r '.private' package.json)" >> $GITHUB_OUTPUT
echo "docs=$(jq -r '.scripts | has("doc")' package.json)" >> $GITHUB_OUTPUT
echo "NODE_VERSIONS=[]" >> $GITHUB_ENV
else
echo "enabled=false" >> $GITHUB_OUTPUT
fi
- name: Check for package locks
id: npm_lock
run: |
has_npm_lockfile="$([ -e package-lock.json ] || [ -e npm-shrinkwrap.json ] && echo true || echo false)"
echo "has_npm_lockfile=${has_npm_lockfile}" >> $GITHUB_OUTPUT
- name: Set access
id: access
run: |
access="public"
if [ "${{ github.event.repository.private }}" = "true" ]
then
access="restricted"
fi
echo "access=${access}" >> $GITHUB_OUTPUT
- name: Setup Node.js
uses: actions/setup-node@1a4442cacd436585916779262731d5b162bc6ec7
with:
node-version: 12.x
if: steps.npm.outputs.enabled == 'true'
- name: Check engine
if: steps.npm.outputs.enabled == 'true'
run: |
if npx -q -y -- check-engine
then
echo "NODE_VERSIONS=$(echo "${NODE_VERSIONS}" | jq -c '. + ["12.x"]')" >> $GITHUB_ENV
fi
- name: Setup Node.js
uses: actions/setup-node@1a4442cacd436585916779262731d5b162bc6ec7
with:
node-version: 14.x
if: steps.npm.outputs.enabled == 'true'
- name: Check engine
if: steps.npm.outputs.enabled == 'true'
run: |
if npx -q -y -- check-engine
then
echo "NODE_VERSIONS=$(echo "${NODE_VERSIONS}" | jq -c '. + ["14.x"]')" >> $GITHUB_ENV
fi
- name: Setup Node.js
uses: actions/setup-node@1a4442cacd436585916779262731d5b162bc6ec7
with:
node-version: 16.x
if: steps.npm.outputs.enabled == 'true'
- name: Check engine
if: steps.npm.outputs.enabled == 'true'
run: |
if npx -q -y -- check-engine
then
echo "NODE_VERSIONS=$(echo "${NODE_VERSIONS}" | jq -c '. + ["16.x"]')" >> $GITHUB_ENV
fi
- name: Setup Node.js
uses: actions/setup-node@1a4442cacd436585916779262731d5b162bc6ec7
with:
node-version: 18.x
if: steps.npm.outputs.enabled == 'true'
- name: Check engine
if: steps.npm.outputs.enabled == 'true'
run: |
if npx -q -y -- check-engine
then
echo "NODE_VERSIONS=$(echo "${NODE_VERSIONS}" | jq -c '. + ["18.x"]')" >> $GITHUB_ENV
fi
- name: Set Node.js versions
if: steps.npm.outputs.enabled == 'true'
id: node_versions
run: |
echo "json=[\"16.x\"]" >> $GITHUB_OUTPUT
if [ "${NODE_VERSIONS}" != "[]" ]
then
echo "json=${NODE_VERSIONS}" >> $GITHUB_OUTPUT
fi
is_docker:
name: Is docker
runs-on: ${{ fromJSON(inputs.runs_on) }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
needs:
- versioned_source
defaults:
run:
working-directory: ${{ inputs.working_directory }}
shell: bash --noprofile --norc -eo pipefail -x {0}
outputs:
docker_images: ${{ steps.docker_images_json.outputs.build }}
docker_images_crlf: ${{ steps.docker_images_crlf.outputs.build }}
docker_compose_tests: ${{ steps.docker_compose_tests.outputs.found }}
bake_targets: ${{ steps.bake_targets_json.outputs.build }}
docker_bake_json: ${{ steps.docker_bake.outputs.json }}
docker_test_matrix: ${{ steps.docker_test.outputs.build }}
steps:
- name: Generate GitHub App installation token
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a
continue-on-error: true
id: gh_app_token
with:
app_id: ${{ inputs.app_id }}
installation_retrieval_mode: id
installation_retrieval_payload: ${{ inputs.installation_id }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
permissions: |-
{
"contents": "read",
"metadata": "read"
}
- name: Checkout versioned sha
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
fetch-depth: 0
submodules: recursive
ref: ${{ needs.versioned_source.outputs.sha }}
token: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
- id: docker_images_json
name: Build JSON array from comma-separated list
uses: kanga333/json-array-builder@c7cd9d3a8b17cd368e9c2210bc3c16b0e2714ce5
with:
cmd: bash -c "echo $INPUT | tr -d '[:space:]'"
separator: ","
env:
INPUT: ${{ inputs.docker_images }}
- id: docker_images_crlf
name: Build newline-separated list from JSON array
run: |
build="$(echo "${{ join(fromJSON(env.INPUT),' ') }}" | tr " " "\n")"
DELIMITER=$(echo $RANDOM | md5sum | head -c 32)
echo "build<<${DELIMITER}" >> $GITHUB_OUTPUT
echo "${build}" >> $GITHUB_OUTPUT
echo "${DELIMITER}" >> $GITHUB_OUTPUT
env:
INPUT: ${{ steps.docker_images_json.outputs.build }}
- id: bake_targets_json
name: Build JSON array from comma-separated list
uses: kanga333/json-array-builder@c7cd9d3a8b17cd368e9c2210bc3c16b0e2714ce5
with:
cmd: bash -c "echo $INPUT | tr -d '[:space:]'"
separator: ","
env:
INPUT: ${{ inputs.bake_targets }}
- name: Check for docker compose test files
id: docker_compose_tests
run: |
if [ -n "$(ls docker-compose.test.{yml,yaml} 2>/dev/null)" ]
then
echo "found=true" >> $GITHUB_OUTPUT
else
echo "found=false" >> $GITHUB_OUTPUT
fi
- name: Setup buildx
uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226
with:
version: v0.9.1
- name: Pre-process Docker bake files
id: docker_bake
if: |
join(fromJSON(steps.docker_images_json.outputs.build)) != '' ||
steps.docker_compose_tests.outputs.found == 'true'
env:
BAKE_FILE: /tmp/docker-bake.json
run: |
if [ -n "$(ls docker-bake{.override,}.{json,hcl} 2>/dev/null)" ]
then
files="$(echo $(ls -1 docker-bake{.override,}.{json,hcl} 2>/dev/null) | sed 's/ / -f /')"
else
echo '${{ steps.bake_targets_json.outputs.build }}' | jq -s '{target: (map({(.[]):{}}))}' > ${BAKE_FILE}
files="${BAKE_FILE}"
fi
# log merged files and targets
docker buildx bake --print ${{ join(fromJSON(steps.bake_targets_json.outputs.build),' ') }} -f ${files}
json="$(docker buildx bake --print ${{ join(fromJSON(steps.bake_targets_json.outputs.build),' ') }} -f ${files} \
| jq -cr '
.target |= map_values(."inherits" += ["docker-metadata-action"]) |
.target |= map_values(."platforms" //= ["linux/amd64"]) |
del(.group."default") |
if .group == {} then del(.group) else . end
')"
echo "json=${json}">> $GITHUB_OUTPUT
- name: Build docker test matrix
id: docker_test
if: steps.docker_bake.outputs.json != ''
env:
BAKE_JSON: ${{ steps.docker_bake.outputs.json }}
RUNS_ON: ${{ inputs.runs_on }}
DOCKER_RUNS_ON: ${{ inputs.docker_runs_on }}
run: |
matrix="$(jq -cr '.target | to_entries |
{include: map(.value.platforms[] as $p |
{target: .key, platform: $p}
)}' <<< "${BAKE_JSON}")"
matrix="$(jq -cr --argjson in "$DOCKER_RUNS_ON" --argjson default "$RUNS_ON" '.include |=
map(.platform as $p |
.runs_on = if ($in | has($p)) then $in[$p] else $default end)' <<< "${matrix}")"
echo "build=${matrix}">> $GITHUB_OUTPUT
is_python:
name: Is python
env:
SUPPORTED_VERSIONS: |
3.8
3.9
3.10
3.11
runs-on: ${{ fromJSON(inputs.runs_on) }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
needs:
- versioned_source
defaults:
run:
working-directory: ${{ inputs.working_directory }}
shell: bash --noprofile --norc -eo pipefail -x {0}
outputs:
python_poetry: ${{ steps.python_poetry.outputs.enabled }}
python_versions: ${{ steps.python_versions.outputs.json }}
pypi_publish: ${{ steps.python_poetry.outputs.pypi_publish }}
steps:
- name: Generate GitHub App installation token
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a
continue-on-error: true
id: gh_app_token
with:
app_id: ${{ inputs.app_id }}
installation_retrieval_mode: id
installation_retrieval_payload: ${{ inputs.installation_id }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
permissions: |-
{
"contents": "read",
"metadata": "read"
}
- name: Checkout versioned sha
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
fetch-depth: 0
submodules: recursive
ref: ${{ needs.versioned_source.outputs.sha }}
token: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
- name: Identify Poetry project
id: python_poetry
run: |
if test -f "pyproject.toml"
then
echo "found pyproject.toml"
if grep 'build-backend.*poetry' pyproject.toml
then
echo "Poetry used"
echo "enabled=true" >> $GITHUB_OUTPUT
echo "PYTHON_VERSIONS=[]" >> $GITHUB_ENV
else
echo "Poetry not used"
echo "enabled=false" >> $GITHUB_OUTPUT
fi
has_package=$(awk -F "=" '/^packages/ {print $2}' pyproject.toml)
if [ -n "${has_package}" ] && [ "${{ github.event.repository.visibility }}" = "public" ]
then
echo "pypi_publish=true" >> $GITHUB_OUTPUT
else
echo "pypi_publish=false" >> $GITHUB_OUTPUT
fi
else
echo "enabled=false" >> $GITHUB_OUTPUT
echo "pypi_publish=false" >> $GITHUB_OUTPUT
fi
- name: Setup python
id: setup-python
uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
with:
python-version: ${{ env.SUPPORTED_VERSIONS }}
- name: Setup poetry
if: steps.setup-python.outputs.python-version != ''
uses: abatilo/actions-poetry@192395c0d10c082a7c62294ab5d9a9de40e48974
with:
poetry-version: 1.5.1
- name: Validate project Python requirements
if: steps.python_poetry.outputs.enabled == 'true'
run: |
versions=()
while IFS= read -r version; do
echo "Setting up Python $version"
error_check=`(poetry env use $version 2>&1 || true)`
if ! grep -q "Please choose a compatible version" <<< $error_check; then
versions+=("\"$version\"")
else
echo "Python $version does not meet project requirements."
fi
done <<< "$(echo -n "${SUPPORTED_VERSIONS}")"
echo "PYTHON_VERSIONS=[$(IFS=,; echo "${versions[*]}")]" >> $GITHUB_ENV
- name: Output compatible Python versions
if: steps.python_poetry.outputs.enabled == 'true'
id: python_versions
run: |
echo "json=[\"3.x\"]" >> $GITHUB_OUTPUT
if [ "${PYTHON_VERSIONS}" != "[]" ]
then
echo "json=${PYTHON_VERSIONS}" >> $GITHUB_OUTPUT
fi
is_cargo:
name: Is rust
runs-on: ${{ fromJSON(inputs.runs_on) }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
needs:
- versioned_source
if: inputs.cargo_targets != ''
defaults:
run:
working-directory: ${{ inputs.working_directory }}
shell: bash --noprofile --norc -eo pipefail -x {0}
outputs:
cargo_targets: ${{ steps.cargo_targets.outputs.build }}
cargo: ${{ steps.cargo_yml.outputs.enabled }}
steps:
- name: Generate GitHub App installation token
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a
continue-on-error: true
id: gh_app_token
with:
app_id: ${{ inputs.app_id }}
installation_retrieval_mode: id
installation_retrieval_payload: ${{ inputs.installation_id }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
permissions: |-
{
"contents": "read",
"metadata": "read"
}
- name: Checkout versioned sha
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
fetch-depth: 0
submodules: recursive
ref: ${{ needs.versioned_source.outputs.sha }}
token: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
- id: cargo_targets
name: Build JSON array from comma-separated list
uses: kanga333/json-array-builder@c7cd9d3a8b17cd368e9c2210bc3c16b0e2714ce5
with:
cmd: bash -c "echo $INPUT | tr -d '[:space:]'"
separator: ","
env:
INPUT: ${{ inputs.cargo_targets }}
- name: Check Cargo.toml
id: cargo_yml
run: |
if test -f "Cargo.toml"
then
echo "found Cargo.toml"
echo "enabled=true" >> $GITHUB_OUTPUT
else
echo "enabled=false" >> $GITHUB_OUTPUT
fi
is_balena:
name: Is balena
runs-on: ${{ fromJSON(inputs.runs_on) }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
needs:
- versioned_source
if: inputs.balena_slugs != ''
defaults:
run:
working-directory: ${{ inputs.working_directory }}
shell: bash --noprofile --norc -eo pipefail -x {0}
outputs:
balena_slugs: ${{ steps.balena_slugs.outputs.build }}
balena_yml: ${{ steps.balena_yml.outputs.enabled }}
steps:
- name: Generate GitHub App installation token
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a
continue-on-error: true
id: gh_app_token
with:
app_id: ${{ inputs.app_id }}
installation_retrieval_mode: id
installation_retrieval_payload: ${{ inputs.installation_id }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
permissions: |-
{
"contents": "read",
"metadata": "read"
}
- name: Checkout versioned sha
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
fetch-depth: 0
submodules: recursive
ref: ${{ needs.versioned_source.outputs.sha }}
token: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
- id: balena_slugs
name: Build JSON array from comma-separated list
uses: kanga333/json-array-builder@c7cd9d3a8b17cd368e9c2210bc3c16b0e2714ce5
with:
cmd: bash -c "echo $INPUT | tr -d '[:space:]'"
separator: ","
env:
INPUT: ${{ inputs.balena_slugs }}
- name: Check for balena.yml
id: balena_yml
run: |
if test -f balena.yml
then
echo "found balena.yml"
echo "enabled=true" >> $GITHUB_OUTPUT
else
echo "enabled=false" >> $GITHUB_OUTPUT
fi
is_custom:
name: Is custom
runs-on: ${{ fromJSON(inputs.runs_on) }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
needs:
- versioned_source
defaults:
run:
working-directory: .
shell: bash --noprofile --norc -eo pipefail -x {0}
outputs:
custom_test: ${{ steps.custom.outputs.test }}
custom_publish: ${{ steps.custom.outputs.publish }}
custom_finalize: ${{ steps.custom.outputs.finalize }}
custom_clean: ${{ steps.custom.outputs.clean }}
custom_always: ${{ steps.custom.outputs.always }}
custom_test_matrix: ${{ steps.custom_test_matrix.outputs.build }}
custom_publish_matrix: ${{ steps.custom_publish_matrix.outputs.build }}
custom_finalize_matrix: ${{ steps.custom_finalize_matrix.outputs.build }}
steps:
- name: Generate GitHub App installation token
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a
continue-on-error: true
id: gh_app_token
with:
app_id: ${{ inputs.app_id }}
installation_retrieval_mode: id
installation_retrieval_payload: ${{ inputs.installation_id }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
permissions: |-
{
"contents": "read",
"metadata": "read"
}
- name: Checkout event sha
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
fetch-depth: 1
submodules: recursive
ref: ${{ github.sha }}
token: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
path: /tmp/_target
- id: custom_test_matrix
name: Build JSON array from comma-separated list
uses: kanga333/json-array-builder@c7cd9d3a8b17cd368e9c2210bc3c16b0e2714ce5
with:
cmd: bash -c "echo $INPUT | tr -d '[:space:]'"
separator: ","
env:
INPUT: ${{ inputs.custom_test_matrix }}
- id: custom_publish_matrix
name: Build JSON array from comma-separated list
uses: kanga333/json-array-builder@c7cd9d3a8b17cd368e9c2210bc3c16b0e2714ce5
with:
cmd: bash -c "echo $INPUT | tr -d '[:space:]'"
separator: ","
env:
INPUT: ${{ inputs.custom_publish_matrix }}
- id: custom_finalize_matrix
name: Build JSON array from comma-separated list
uses: kanga333/json-array-builder@c7cd9d3a8b17cd368e9c2210bc3c16b0e2714ce5
with:
cmd: bash -c "echo $INPUT | tr -d '[:space:]'"
separator: ","
env:
INPUT: ${{ inputs.custom_finalize_matrix }}
- name: Check for custom actions
id: custom
working-directory: /tmp/_target
run: |
if [ -d .github/actions/test ]
then
echo "test=true" >> $GITHUB_OUTPUT
fi
if [ -d .github/actions/publish ]
then
echo "publish=true" >> $GITHUB_OUTPUT
fi
if [ -d .github/actions/finalize ]
then
echo "finalize=true" >> $GITHUB_OUTPUT
fi
if [ -d .github/actions/clean ]
then
echo "clean=true" >> $GITHUB_OUTPUT
fi
if [ -d .github/actions/always ]
then
echo "always=true" >> $GITHUB_OUTPUT
fi
is_website:
name: Is website
runs-on: ${{ fromJSON(inputs.runs_on) }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
needs:
- versioned_source
if: inputs.cloudflare_website != ''
defaults:
run:
working-directory: ${{ inputs.working_directory }}
shell: bash --noprofile --norc -eo pipefail -x {0}
outputs:
has_readme: ${{ steps.has_readme.outputs.enabled }}
steps:
- name: Generate GitHub App installation token
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a
continue-on-error: true
id: gh_app_token
with:
app_id: ${{ inputs.app_id }}
installation_retrieval_mode: id
installation_retrieval_payload: ${{ inputs.installation_id }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
permissions: |-
{
"contents": "read",
"metadata": "read"
}
- name: Checkout versioned sha
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
fetch-depth: 0
submodules: recursive
ref: ${{ needs.versioned_source.outputs.sha }}
token: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
- name: Check for README for building a website
id: has_readme
run: |
if test -e "README.md"
then
echo "found README.md"
echo "enabled=true" >> $GITHUB_OUTPUT
else
echo "enabled=false" >> $GITHUB_OUTPUT
fi
is_cloudformation:
name: Is CloudFormation
runs-on: ${{ fromJSON(inputs.runs_on) }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
needs:
- versioned_source
if: inputs.cloudformation_templates != ''
defaults:
run:
working-directory: ${{ inputs.working_directory }}
shell: bash --noprofile --norc -eo pipefail -x {0}
outputs:
cloudformation: ${{ steps.validate_json.outputs.enabled }}
stacks: ${{ steps.cloudformation_stacks.outputs.matrix }}
includes: ${{ steps.cloudformation_stacks.outputs.includes }}
steps:
- name: Generate GitHub App installation token
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a
continue-on-error: true
id: gh_app_token
with:
app_id: ${{ inputs.app_id }}
installation_retrieval_mode: id
installation_retrieval_payload: ${{ inputs.installation_id }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
permissions: |-
{
"contents": "read",
"metadata": "read"
}
- name: Checkout versioned sha
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
fetch-depth: 0
submodules: recursive
ref: ${{ needs.versioned_source.outputs.sha }}
token: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
- name: Validate CloudFormation input
id: validate_json
run: |
if echo '${{ inputs.cloudformation_templates }}' | jq -r .stacks
then
if [[ '${{ github.event.pull_request.head.repo.full_name }}' != '${{ github.repository }}' ]]; then
echo '::warning:: CloudFormation stacks are skipped for external contributions.'
echo "enabled=false" >> $GITHUB_OUTPUT
exit 0
fi
echo "enabled=true" >> $GITHUB_OUTPUT
else
echo '::warning::invalid JSON or no CloudFormation stacks?'
echo "enabled=false" >> $GITHUB_OUTPUT
fi
- name: Generate stacks matrix
id: cloudformation_stacks
run: |
stacks="$(echo '${{ inputs.cloudformation_templates }}' \
| jq -r '.stacks[].name' | jq -Rcn '[inputs'])"
includes="$(echo '${{ inputs.cloudformation_templates }}' \
| jq -r '.stacks[] | with_entries(if .key == "name" then .key = "stack" else . end)' \
| jq -rsc)"
echo "matrix=${stacks}" >> $GITHUB_OUTPUT
echo "includes=${includes}" >> $GITHUB_OUTPUT
is_terraform:
name: Is Terraform
runs-on: ${{ fromJSON(inputs.runs_on) }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
needs:
- versioned_source
if: inputs.terraform_projects != ''
defaults:
run:
working-directory: ${{ inputs.working_directory }}
shell: bash --noprofile --norc -eo pipefail -x {0}
outputs:
terraform: ${{ steps.validate_json.outputs.enabled }}
projects: ${{ steps.terraform_projects.outputs.deploy }}
includes: ${{ steps.terraform_projects.outputs.includes }}
steps:
- name: Validate Terraform input
id: validate_json
run: |
if echo '${{ inputs.terraform_projects }}' | jq -r .projects
then
if [[ '${{ github.event.pull_request.head.repo.full_name }}' != '${{ github.repository }}' ]]; then
echo '::warning:: Terraform projects are skipped for external contributions.'
echo "enabled=false" >> $GITHUB_OUTPUT
exit 0
fi
echo "enabled=true" >> $GITHUB_OUTPUT
else
echo '::warning::invalid JSON or no Terraform projects?'
echo "enabled=false" >> $GITHUB_OUTPUT
fi
- name: Generate projects matrix
id: terraform_projects
run: |
projects="$(echo '${{ inputs.terraform_projects }}' \
| jq -r '.projects[].main_tf' | jq -Rcn '[inputs'])"
includes="$(echo '${{ inputs.terraform_projects }}' \
| jq -r '.projects[] | with_entries(if .key == "main_tf" then .key = "project" else . end)' \
| jq -rsc)"
echo "deploy=${projects}" >> $GITHUB_OUTPUT
echo "includes=${includes}" >> $GITHUB_OUTPUT
npm_test:
name: Test npm
runs-on: ${{ fromJSON(inputs.runs_on) }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
needs:
- is_npm
- versioned_source
if: |
github.event.pull_request.state == 'open' &&
needs.is_npm.outputs.npm == 'true'
defaults:
run:
working-directory: ${{ inputs.working_directory }}
shell: bash --noprofile --norc -eo pipefail -x {0}
strategy:
fail-fast: false
matrix:
node_version: ${{ fromJSON(needs.is_npm.outputs.node_versions) }}
outputs:
package: ${{ steps.meta.outputs.package }}
version: ${{ steps.meta.outputs.version }}
branch_tag: ${{ steps.meta.outputs.branch_tag }}
sha_tag: ${{ steps.meta.outputs.sha_tag }}
version_tag: ${{ steps.meta.outputs.version_tag }}
steps:
- name: Generate GitHub App installation token
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a
continue-on-error: true
id: gh_app_token
with:
app_id: ${{ inputs.app_id }}
installation_retrieval_mode: id
installation_retrieval_payload: ${{ inputs.installation_id }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
permissions: |-
{
"contents": "read",
"metadata": "read"
}
- name: Checkout versioned sha
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
fetch-depth: 0
submodules: recursive
ref: ${{ needs.versioned_source.outputs.sha }}
token: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
- name: Create local refs
if: github.event.pull_request.state == 'open' && inputs.disable_versioning != true
run: |
git update-ref refs/tags/${{ needs.versioned_source.outputs.tag }} ${{ needs.versioned_source.outputs.tag_sha }}
- name: Sort node versions
id: node_versions
env:
VERSIONS: ${{ needs.is_npm.outputs.node_versions }}
run: |
echo "min=$(echo "${VERSIONS}" | jq -r '.[]' | sort --version-sort | head -n1)" >> $GITHUB_OUTPUT
echo "max=$(echo "${VERSIONS}" | jq -r '.[]' | sort --version-sort --reverse | head -n1)" >> $GITHUB_OUTPUT
- name: Setup Node.js
uses: actions/setup-node@1a4442cacd436585916779262731d5b162bc6ec7
with:
node-version: ${{ matrix.node_version }}
registry-url: ${{ env.NPM_REGISTRY }}
cache: npm
if: needs.is_npm.outputs.has_npm_lockfile == 'true'
- name: Setup Node.js
uses: actions/setup-node@1a4442cacd436585916779262731d5b162bc6ec7
with:
node-version: ${{ matrix.node_version }}
registry-url: ${{ env.NPM_REGISTRY }}
if: needs.is_npm.outputs.has_npm_lockfile != 'true'
- name: Generate metadata
id: meta
run: |
package="$(jq -r '.name' package.json)"
version="$(jq -r '.version' package.json)"
branch_tag="$(echo 'build-${{ github.event.pull_request.head.ref }}' | sed 's/[^[:alnum:]]/-/g')"
sha_tag="${branch_tag}-${{ github.event.pull_request.head.sha }}"
version_tag="${version}-${branch_tag}-${{ github.event.pull_request.head.sha }}"
echo "package=${package}" >> $GITHUB_OUTPUT
echo "version=${version}" >> $GITHUB_OUTPUT
echo "branch_tag=${branch_tag}" >> $GITHUB_OUTPUT
echo "sha_tag=${sha_tag}" >> $GITHUB_OUTPUT
echo "version_tag=${version_tag}" >> $GITHUB_OUTPUT
- name: Install native dependencies (if necessary)
run: |
npm run flowzone-preinstall --if-present
- name: Install dependencies
run: |
runner_os="$(echo "${RUNNER_OS}" | tr '[:upper:]' '[:lower:]')"
os_count="$(jq '.os | length' package.json)"
index="$(jq --arg os "${runner_os}" '.os | index($os) | select( . != null )' package.json)"
if [[ -n "$index" ]] || [[ "$os_count" -lt 1 ]]; then
if [ ${{ needs.is_npm.outputs.has_npm_lockfile }} == 'true' ]; then
npm ci
else
npm i
fi
else
echo "${runner_os} is not supported in package.json"
fi
- name: Run build
run: npm run build --if-present
- name: Run tests
if: inputs.pseudo_terminal != true
run: npm test
- name: Run tests (pseudo-tty)
if: inputs.pseudo_terminal == true
shell: script -q -e -c "bash --noprofile --norc -eo pipefail -x {0}" /tmp/test-session
run: npm test
- name: Run pack
if: needs.is_npm.outputs.npm_private != 'true' && steps.node_versions.outputs.max == matrix.node_version
run: |
mkdir ${{ runner.temp }}/npm-pack && npm pack --pack-destination=${{ runner.temp }}/npm-pack
# FIXME: workaround when `npm pack` for npm 6.x dumps tarball into the current directory because it has no `--pack-destination` flag
[[ "$(npm --version)" =~ ^6\..* ]] && find . -maxdepth 1 -name '*.tgz' -exec mv {} ${{ runner.temp }}/npm-pack \; || true
- name: Upload artifact
if: needs.is_npm.outputs.npm_private != 'true' && steps.node_versions.outputs.max == matrix.node_version
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32
with:
name: npm-${{ github.event.pull_request.head.sha }}-${{ matrix.node_version }}
path: ${{ runner.temp }}/npm-pack/*.tgz
retention-days: 90
- name: Generate docs (if present)
if: needs.is_npm.outputs.npm_docs == 'true'
shell: bash
run: npm run doc
- name: Compress docs
if: needs.is_npm.outputs.npm_docs == 'true' && steps.node_versions.outputs.max == matrix.node_version
run: tar --auto-compress -cvf ${{ runner.temp }}/docs.tar.zst ./docs
- name: Upload artifact
if: needs.is_npm.outputs.npm_docs == 'true' && steps.node_versions.outputs.max == matrix.node_version
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32
with:
name: docs-${{ github.event.pull_request.head.sha }}-${{ matrix.node_version }}
path: ${{ runner.temp }}/docs.tar.zst
retention-days: 90
npm_publish:
name: Publish npm
runs-on: ${{ fromJSON(inputs.runs_on) }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
needs:
- is_npm
- npm_test
- custom_test
- docker_test
- cargo_test
- python_test
if: |
!failure() && !cancelled() &&
needs.npm_test.result == 'success' &&
needs.is_npm.outputs.npm_private != 'true'
defaults:
run:
working-directory: .
shell: bash --noprofile --norc -eo pipefail -x {0}
steps:
- name: Sort node versions
id: node_versions
env:
VERSIONS: ${{ needs.is_npm.outputs.node_versions }}
run: |
echo "min=$(echo "${VERSIONS}" | jq -r '.[]' | sort --version-sort | head -n1)" >> $GITHUB_OUTPUT
echo "max=$(echo "${VERSIONS}" | jq -r '.[]' | sort --version-sort --reverse | head -n1)" >> $GITHUB_OUTPUT
- name: Download npm artifact
uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a
with:
path: ${{ runner.temp }}
name: npm-${{ github.event.pull_request.head.sha }}-${{ steps.node_versions.outputs.max }}
- name: Setup Node.js
uses: actions/setup-node@1a4442cacd436585916779262731d5b162bc6ec7
with:
node-version: "18"
registry-url: ${{ env.NPM_REGISTRY }}
- name: Publish draft release
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
run: |
npm config set ignore-scripts true
pack="$(ls ${{ runner.temp }}/*.tgz | sort -t- -n -k3 | tail -n1)"
tar xvf "${pack}"
(cd package
npm --loglevel=verbose --logs-max=0 --no-git-tag-version version ${{ needs.npm_test.outputs.version_tag }}-${{ github.run_attempt }} --allow-same-version
)
tar czvf "${pack}" package
if [ ${{ github.run_attempt }} -gt 1 ]; then
npm --loglevel=verbose --logs-max=0 unpublish ${{ needs.npm_test.outputs.package }}@${{ needs.npm_test.outputs.version_tag }}-$((${{ github.run_attempt }} - 1)) || true
fi
npm --loglevel=verbose --logs-max=0 publish --tag=${{ needs.npm_test.outputs.sha_tag }} "${pack}" --access="${{ needs.is_npm.outputs.npm_access }}"
npm --loglevel=verbose --logs-max=0 dist-tag add ${{ needs.npm_test.outputs.package }}@${{ needs.npm_test.outputs.version_tag }}-${{ github.run_attempt }} ${{ needs.npm_test.outputs.branch_tag }}
npm_finalize:
name: Finalize npm
runs-on: ${{ fromJSON(inputs.runs_on) }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
needs:
- is_npm
if: |
(github.event.pull_request.merged == true || github.event_name == 'push') &&
needs.is_npm.outputs.npm == 'true' &&
needs.is_npm.outputs.npm_private != 'true'
defaults:
run:
working-directory: .
shell: bash --noprofile --norc -eo pipefail -x {0}
steps:
- name: Generate GitHub App installation token
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a
continue-on-error: true
id: gh_app_token
with:
app_id: ${{ inputs.app_id }}
installation_retrieval_mode: id
installation_retrieval_payload: ${{ inputs.installation_id }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
permissions: |-
{
"contents": "read",
"metadata": "read"
}
- name: Sort node versions
id: node_versions
env:
VERSIONS: ${{ needs.is_npm.outputs.node_versions }}
run: |
echo "min=$(echo "${VERSIONS}" | jq -r '.[]' | sort --version-sort | head -n1)" >> $GITHUB_OUTPUT
echo "max=$(echo "${VERSIONS}" | jq -r '.[]' | sort --version-sort --reverse | head -n1)" >> $GITHUB_OUTPUT
- name: Download npm artifact from last run
uses: dawidd6/action-download-artifact@268677152d06ba59fcec7a7f0b5d961b6ccd7e1e
with:
github_token: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
commit: ${{ github.event.pull_request.head.sha || github.event.head_commit.id }}
path: ${{ runner.temp }}
workflow_conclusion: success
name: npm-${{ github.event.pull_request.head.sha }}-${{ steps.node_versions.outputs.max }}
- name: Setup Node.js
uses: actions/setup-node@1a4442cacd436585916779262731d5b162bc6ec7
with:
node-version: "18"
registry-url: ${{ env.NPM_REGISTRY }}
- name: Publish final release
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
run: |
npm config set ignore-scripts true
pack="$(ls ${{ runner.temp }}/*.tgz | sort -t- -n -k3 | tail -n1)"
npm --loglevel=verbose --logs-max=0 publish --tag "latest" "${pack}" --access="${{ needs.is_npm.outputs.npm_access }}"
npm_docs_finalize:
name: Finalize npm docs
runs-on: ${{ fromJSON(inputs.runs_on) }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
needs:
- is_npm
if: |
(github.event.pull_request.merged == true || github.event_name == 'push') &&
needs.is_npm.outputs.npm_docs == 'true'
defaults:
run:
working-directory: .
shell: bash --noprofile --norc -eo pipefail -x {0}
steps:
- name: Generate GitHub App installation token
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a
continue-on-error: true
id: gh_app_token
with:
app_id: ${{ inputs.app_id }}
installation_retrieval_mode: id
installation_retrieval_payload: ${{ inputs.installation_id }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
permissions: |-
{
"pages": "write",
"contents": "read",
"metadata": "read"
}
repositories: '[ "${{ github.event.pull_request.head.repo.name }}" ]'
- name: Sort node versions
id: node_versions
env:
VERSIONS: ${{ needs.is_npm.outputs.node_versions }}
run: |
echo "min=$(echo "${VERSIONS}" | jq -r '.[]' | sort --version-sort | head -n1)" >> $GITHUB_OUTPUT
echo "max=$(echo "${VERSIONS}" | jq -r '.[]' | sort --version-sort --reverse | head -n1)" >> $GITHUB_OUTPUT
- name: Download npm docs artifact from last run
uses: dawidd6/action-download-artifact@268677152d06ba59fcec7a7f0b5d961b6ccd7e1e
with:
github_token: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
commit: ${{ github.event.pull_request.head.sha || github.event.head_commit.id }}
path: ${{ runner.temp }}
workflow_conclusion: success
name: docs-${{ github.event.pull_request.head.sha }}-${{ steps.node_versions.outputs.max }}
- name: Extract docs artifact
run: |
docs="$(ls ${{ runner.temp }}/*.tar.zst | sort -t- -n -k3 | tail -n1)"
tar -xvf "${docs}"
- name: Publish generated docs to GitHub Pages
uses: peaceiris/actions-gh-pages@373f7f263a76c20808c831209c920827a82a2847
with:
github_token: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
publish_dir: docs
publish_branch: docs
docker_test:
name: Test docker
runs-on: ${{ matrix.runs_on }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
needs:
- is_docker
- versioned_source
if: |
github.event.pull_request.state == 'open' &&
needs.is_docker.outputs.docker_bake_json != ''
defaults:
run:
working-directory: ${{ inputs.working_directory }}
shell: bash --noprofile --norc -eo pipefail -x {0}
strategy:
fail-fast: false
matrix: ${{ fromJSON(needs.is_docker.outputs.docker_test_matrix) }}
env:
DOCKER_BUILDKIT: "1"
steps:
- name: Generate GitHub App installation token
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a
continue-on-error: true
id: gh_app_token
with:
app_id: ${{ inputs.app_id }}
installation_retrieval_mode: id
installation_retrieval_payload: ${{ inputs.installation_id }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
permissions: |-
{
"contents": "read",
"metadata": "read"
}
- name: Checkout versioned sha
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
fetch-depth: 0
submodules: recursive
ref: ${{ needs.versioned_source.outputs.sha }}
token: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
- name: Create local refs
if: github.event.pull_request.state == 'open' && inputs.disable_versioning != true
run: |
git update-ref refs/tags/${{ needs.versioned_source.outputs.tag }} ${{ needs.versioned_source.outputs.tag_sha }}
- name: Sanitize docker strings
id: strings
env:
PLATFORM_SLUG_MAP: |
{
"linux/386": "i386",
"linux/amd64": "amd64",
"linux/arm64": "arm64v8",
"linux/arm/v7": "arm32v7",
"linux/arm/v6": "arm32v6",
"linux/arm/v5": "arm32v5",
"linux/s390x": "s390x",
"linux/mips64le": "mips64le",
"linux/ppc64le": "ppc64le",
"linux/riscv64": "riscv64",
"windows/amd64": "windows-amd64"
}
TARGET: ${{ matrix.target }}
PLATFORM: ${{ matrix.platform }}
IMAGE: ${{ matrix.image }}
run: |
target_slug="$(echo "${TARGET}" | sed 's/[^[:alnum:]]/-/g')"
if [ -n "${TARGET}" ] && [ -z "${target_slug}" ]
then
echo "::error::Unsupported platform: ${TARGET}"
fi
if [ "${TARGET}" != "default" ]
then
if [ "${{ inputs.docker_invert_tags }}" = "true" ]
then
prefix_slug="${target_slug}-"
else
suffix_slug="-${target_slug}"
fi
fi
platform_slug="$(jq -cr --arg platform "${PLATFORM}" '.[$platform] // ""' <<< "${PLATFORM_SLUG_MAP}")"
if [ -n "${PLATFORM}" ] && [ -z "${platform_slug}" ]
then
echo "::error::Unsupported platform: ${PLATFORM}"
fi
if [ -n "${IMAGE}" ]
then
if [[ "${IMAGE}" =~ ^(.*\/)?([^\/]+)\/([^\/\:]+)(\:.+)?$ ]]
then
image_slug="${IMAGE}"
else
image_slug="docker.io/${IMAGE}"
fi
fi
if [[ "${image_slug}" =~ ^docker\.io\/(.*)$ ]]
then
dockerhub_slug="${BASH_REMATCH[1]}"
fi
echo "image=${image_slug}" >> $GITHUB_OUTPUT
echo "target=${target_slug}" >> $GITHUB_OUTPUT
echo "platform=${platform_slug}" >> $GITHUB_OUTPUT
echo "prefix=${prefix_slug}" >> $GITHUB_OUTPUT
echo "suffix=${suffix_slug}" >> $GITHUB_OUTPUT
echo "dockerhub=${dockerhub_slug}" >> $GITHUB_OUTPUT
- name: Generate docker metadata
id: test_meta
uses: docker/metadata-action@96383f45573cb7f253c731d3b3ab81c87ef81934
with:
images: |
sut
localhost:5000/sut
${{ needs.is_docker.outputs.docker_images_crlf }}
labels: |
org.opencontainers.image.version=${{ needs.versioned_source.outputs.semver }}
org.opencontainers.image.ref.name=${{ matrix.target }}
tags: |
type=raw,value=${{ github.event.pull_request.head.sha }}
type=raw,value=build-${{ github.event.pull_request.head.sha }}
type=raw,value=build-${{ github.event.pull_request.head.ref }}
flavor: |
latest=true
prefix=${{ steps.strings.outputs.prefix }}
suffix=${{ steps.strings.outputs.suffix }}
- name: Setup QEMU
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3
if: contains(matrix.runs_on, 'self-hosted') != true
with:
platforms: all
image: tonistiigi/binfmt:qemu-v6.2.0
- name: Setup buildx
uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226
with:
driver-opts: network=host
install: true
- name: Login to GitHub Container Registry
continue-on-error: true
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
if: github.event.repository.private
- name: Login to Docker Hub
continue-on-error: true
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d
with:
registry: docker.io
username: ${{ secrets.DOCKERHUB_USER || secrets.DOCKER_REGISTRY_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN || secrets.DOCKER_REGISTRY_PASS }}
if: github.event.repository.private
- name: Export common env vars
run: |
echo "DOCKER_BAKE_FILE=${{ runner.temp }}/docker-bake.json" >> $GITHUB_ENV
echo "DOCKER_TAR=${{ runner.temp }}/docker.tar" >> $GITHUB_ENV
echo "COMPOSE_PROJECT_NAME=${{ github.run_id }}" >> $GITHUB_ENV
echo "COMPOSE_FILE=${{ runner.temp }}/docker-compose.yml" >> $GITHUB_ENV
echo "COMPOSE_ENV_FILE=${{ runner.temp }}/.env" >> $GITHUB_ENV
- name: Add COMPOSE_VARS to compose env file
if: github.event.pull_request.head.repo.full_name == github.repository
env:
COMPOSE_VARS: ${{ secrets.COMPOSE_VARS }}
shell: bash
run: |
if [ -n "${COMPOSE_VARS}" ]
then
echo "${COMPOSE_VARS}" | base64 --decode > ${COMPOSE_ENV_FILE}
while read -r line
do
secret="$(echo "${line}" | awk -F'=' '{print $2}')"
echo "::add-mask::${secret}"
done < ${COMPOSE_ENV_FILE}
fi
- name: Add automatic GITHUB_TOKEN to compose env file
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
if ! grep -q '^GH_TOKEN=' ${COMPOSE_ENV_FILE}
then
echo "GH_TOKEN=${GH_TOKEN}" >> ${COMPOSE_ENV_FILE}
fi
if ! grep -q '^GITHUB_TOKEN=' ${COMPOSE_ENV_FILE}
then
echo "GITHUB_TOKEN=${GITHUB_TOKEN}" >> ${COMPOSE_ENV_FILE}
fi
- name: Write docker bake file
run: |
echo '${{ needs.is_docker.outputs.docker_bake_json }}' > "${DOCKER_BAKE_FILE}"
jq . "${DOCKER_BAKE_FILE}"
- name: Write docker compose file
if: needs.is_docker.outputs.docker_compose_tests == 'true'
run: |
files="
docker-compose.yml
docker-compose.yaml
docker-compose.test.yml
docker-compose.test.yaml
"
args=""
for file in ${files}
do
test -f "${file}" || continue
args="${args} -f ${file}"
if [ ! -f .env ]
then
yq '.services.*.env_file |= map(with(select(. == ".env") ; . = "${{ env.COMPOSE_ENV_FILE }}"))' -i "${file}"
fi
done
docker compose --env-file="${COMPOSE_ENV_FILE}" --project-directory="$(pwd)" ${args} config > "${COMPOSE_FILE}"
yq '(.services.* | select(.build != null)).platform |= "${{ matrix.platform }}"' -i "${COMPOSE_FILE}"
yq . "${COMPOSE_FILE}"
- name: Docker bake
id: docker_bake
uses: docker/bake-action@511fde2517761e303af548ec9e0ea74a8a100112
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
workdir: ${{ inputs.working_directory }}
files: |
${{ env.DOCKER_BAKE_FILE }}
${{ steps.test_meta.outputs.bake-file }}
targets: ${{ matrix.target }}
set: |
*.platform=${{ matrix.platform }}
*.secrets=id=GITHUB_TOKEN
*.secrets=id=GH_TOKEN
*.cache-to=type=gha,mode=min,scope=${{ github.head_ref }}-${{ matrix.target }}-${{ matrix.platform }}
*.cache-from=type=gha,scope=${{ github.head_ref }}-${{ matrix.target }}-${{ matrix.platform }}
load: true
provenance: false
- name: Save image to file
if: join(fromJSON(needs.is_docker.outputs.docker_images)) != ''
run: |
docker save ${{ join(fromJSON(steps.test_meta.outputs.json).tags,' ') }} -o ${DOCKER_TAR}
zstd -v ${DOCKER_TAR}
- name: Run docker compose tests
if: needs.is_docker.outputs.docker_compose_tests == 'true'
run: |
docker compose run sut || { docker compose logs ; exit 1 ; }
docker compose logs
- name: Upload artifacts
if: join(fromJSON(needs.is_docker.outputs.docker_images)) != ''
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32
with:
name: docker-${{ github.event.pull_request.head.sha }}-${{ steps.strings.outputs.target }}-${{ steps.strings.outputs.platform }}
path: ${{ env.DOCKER_TAR }}.zst
retention-days: 1
docker_publish:
name: Publish docker
runs-on: ${{ fromJSON(inputs.runs_on) }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
needs:
- is_docker
- npm_test
- custom_test
- docker_test
- cargo_test
- python_test
if: |
!failure() && !cancelled() &&
needs.docker_test.result == 'success' &&
join(fromJSON(needs.is_docker.outputs.docker_images)) != ''
defaults:
run:
working-directory: .
shell: bash --noprofile --norc -eo pipefail -x {0}
services:
registry:
image: registry:2.8.3
ports:
- 5000:5000
strategy:
fail-fast: false
matrix:
image: ${{ fromJSON(needs.is_docker.outputs.docker_images) }}
target: ${{ fromJSON(needs.is_docker.outputs.bake_targets) }}
env:
LOCAL_TAG: localhost:5000/sut:latest
steps:
- name: Sanitize docker strings
id: strings
env:
PLATFORM_SLUG_MAP: |
{
"linux/386": "i386",
"linux/amd64": "amd64",
"linux/arm64": "arm64v8",
"linux/arm/v7": "arm32v7",
"linux/arm/v6": "arm32v6",
"linux/arm/v5": "arm32v5",
"linux/s390x": "s390x",
"linux/mips64le": "mips64le",
"linux/ppc64le": "ppc64le",
"linux/riscv64": "riscv64",
"windows/amd64": "windows-amd64"
}
TARGET: ${{ matrix.target }}
PLATFORM: ${{ matrix.platform }}
IMAGE: ${{ matrix.image }}
run: |
target_slug="$(echo "${TARGET}" | sed 's/[^[:alnum:]]/-/g')"
if [ -n "${TARGET}" ] && [ -z "${target_slug}" ]
then
echo "::error::Unsupported platform: ${TARGET}"
fi
if [ "${TARGET}" != "default" ]
then
if [ "${{ inputs.docker_invert_tags }}" = "true" ]
then
prefix_slug="${target_slug}-"
else
suffix_slug="-${target_slug}"
fi
fi
platform_slug="$(jq -cr --arg platform "${PLATFORM}" '.[$platform] // ""' <<< "${PLATFORM_SLUG_MAP}")"
if [ -n "${PLATFORM}" ] && [ -z "${platform_slug}" ]
then
echo "::error::Unsupported platform: ${PLATFORM}"
fi
if [ -n "${IMAGE}" ]
then
if [[ "${IMAGE}" =~ ^(.*\/)?([^\/]+)\/([^\/\:]+)(\:.+)?$ ]]
then
image_slug="${IMAGE}"
else
image_slug="docker.io/${IMAGE}"
fi
fi
if [[ "${image_slug}" =~ ^docker\.io\/(.*)$ ]]
then
dockerhub_slug="${BASH_REMATCH[1]}"
fi
echo "image=${image_slug}" >> $GITHUB_OUTPUT
echo "target=${target_slug}" >> $GITHUB_OUTPUT
echo "platform=${platform_slug}" >> $GITHUB_OUTPUT
echo "prefix=${prefix_slug}" >> $GITHUB_OUTPUT
echo "suffix=${suffix_slug}" >> $GITHUB_OUTPUT
echo "dockerhub=${dockerhub_slug}" >> $GITHUB_OUTPUT
- name: Generate docker metadata
id: draft_meta
uses: docker/metadata-action@96383f45573cb7f253c731d3b3ab81c87ef81934
with:
images: |
${{ matrix.image }}
labels: |
org.opencontainers.image.version=${{ needs.versioned_source.outputs.semver }}
org.opencontainers.image.ref.name=${{ matrix.target }}
tags: |
type=raw,value=${{ github.event.pull_request.head.sha }}
type=raw,value=build-${{ github.event.pull_request.head.sha }}
type=raw,value=build-${{ github.event.pull_request.head.ref }}
flavor: |
latest=false
prefix=${{ steps.strings.outputs.prefix }}
suffix=${{ steps.strings.outputs.suffix }}
- name: Setup buildx
uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226
with:
driver-opts: network=host
install: true
- name: Setup crane
uses: imjasonh/setup-crane@00c9e93efa4e1138c9a7a5c594acd6c75a2fbf0c
with:
version: v0.14.0
- name: Warn if tests skipped
if: needs.is_docker.outputs.docker_compose_tests != 'true'
run: echo "::warning::Publishing Docker images without docker compose tests!"
- name: Download all artifacts
uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a
with:
path: ${{ runner.temp }}
- name: Decompress artifacts
run: |
for gz in ${{ runner.temp }}/docker-${{ github.event.pull_request.head.sha }}-${{ steps.strings.outputs.target }}-*/docker.tar.zst
do
zstd -vd "${gz}"
done
- name: Create local manifest
run: |
for tar in "${{ runner.temp }}"/*/docker.tar
do
artifact_name="$(basename "$(dirname "${tar}")")"
sha="$(echo "${artifact_name}" | awk -F- '{print $2}')"
target="$(echo "${artifact_name}" | awk -F- '{print $3}')"
platform="$(echo "${artifact_name}" | awk -F- '{print $4}')"
platform_tag="${LOCAL_TAG}-${platform}"
skopeo copy --all "docker-archive:${tar}" "docker://${platform_tag}" --dest-tls-verify=false
docker buildx imagetools create -t ${LOCAL_TAG} --append "${platform_tag}" || \
docker buildx imagetools create -t ${LOCAL_TAG} "${platform_tag}"
docker buildx imagetools inspect --raw "${LOCAL_TAG}" > "${{ runner.temp }}/manifest.json"
done
- name: Login to GitHub Container Registry
continue-on-error: true
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Login to Docker Hub
continue-on-error: true
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d
with:
registry: docker.io
username: ${{ secrets.DOCKERHUB_USER || secrets.DOCKER_REGISTRY_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN || secrets.DOCKER_REGISTRY_PASS }}
- name: Publish manifest to remote(s)
uses: akhilerm/tag-push-action@85bf542f43f5f2060ef76262a67ee3607cb6db37
with:
src: ${{ env.LOCAL_TAG }}
dst: |
${{ steps.draft_meta.outputs.tags }}
- name: Publish tags for each platform
if: inputs.docker_publish_platform_tags == true
env:
PLATFORM_SLUG_MAP: |
{
"linux/386": "i386",
"linux/amd64": "amd64",
"linux/arm64": "arm64v8",
"linux/arm/v7": "arm32v7",
"linux/arm/v6": "arm32v6",
"linux/arm/v5": "arm32v5",
"linux/s390x": "s390x",
"linux/mips64le": "mips64le",
"linux/ppc64le": "ppc64le",
"linux/riscv64": "riscv64",
"windows/amd64": "windows-amd64"
}
REMOTE_TAGS: ${{ steps.draft_meta.outputs.tags }}
run: |
for remote_tag in ${REMOTE_TAGS}
do
for b64 in $(jq -r '.manifests[].platform | @base64' <<< "$(docker buildx imagetools inspect --raw "${remote_tag}")")
do
json="$(echo "${b64}" | base64 --decode)"
os="$(echo "${json}" | jq -r '.os')"
arch="$(echo "${json}" | jq -r '.architecture')"
variant="$(echo "${json}" | jq -r '.variant // ""')"
if [ -z "${variant}" ]
then
platform="${os}/${arch}"
else
platform="${os}/${arch}/${variant}"
fi
platform_slug="$(jq -cr --arg platform "${platform}" '.[$platform] // ""' <<< "${PLATFORM_SLUG_MAP}")"
if [ -z "{platform_slug}" ]
then
echo "::error::Unsupported platform: ${PLATFORM}"
fi
crane copy "${remote_tag}" "${remote_tag}-${platform_slug}" --platform "${platform}"
done
done
docker_finalize:
name: Finalize docker
runs-on: ${{ fromJSON(inputs.runs_on) }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
needs:
- is_docker
- versioned_source
if: |
(github.event.pull_request.merged == true || github.event_name == 'push') &&
join(fromJSON(needs.is_docker.outputs.docker_images)) != ''
defaults:
run:
working-directory: .
shell: bash --noprofile --norc -eo pipefail -x {0}
strategy:
fail-fast: false
matrix:
image: ${{ fromJSON(needs.is_docker.outputs.docker_images) }}
target: ${{ fromJSON(needs.is_docker.outputs.bake_targets) }}
steps:
- name: Generate GitHub App installation token
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a
continue-on-error: true
id: gh_app_token
with:
app_id: ${{ inputs.app_id }}
installation_retrieval_mode: id
installation_retrieval_payload: ${{ inputs.installation_id }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
permissions: |-
{
"contents": "read",
"metadata": "read"
}
- name: Checkout versioned sha
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
fetch-depth: 0
submodules: recursive
ref: ${{ needs.versioned_source.outputs.sha }}
token: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
- name: Sanitize docker strings
id: strings
env:
PLATFORM_SLUG_MAP: |
{
"linux/386": "i386",
"linux/amd64": "amd64",
"linux/arm64": "arm64v8",
"linux/arm/v7": "arm32v7",
"linux/arm/v6": "arm32v6",
"linux/arm/v5": "arm32v5",
"linux/s390x": "s390x",
"linux/mips64le": "mips64le",
"linux/ppc64le": "ppc64le",
"linux/riscv64": "riscv64",
"windows/amd64": "windows-amd64"
}
TARGET: ${{ matrix.target }}
PLATFORM: ${{ matrix.platform }}
IMAGE: ${{ matrix.image }}
run: |
target_slug="$(echo "${TARGET}" | sed 's/[^[:alnum:]]/-/g')"
if [ -n "${TARGET}" ] && [ -z "${target_slug}" ]
then
echo "::error::Unsupported platform: ${TARGET}"
fi
if [ "${TARGET}" != "default" ]
then
if [ "${{ inputs.docker_invert_tags }}" = "true" ]
then
prefix_slug="${target_slug}-"
else
suffix_slug="-${target_slug}"
fi
fi
platform_slug="$(jq -cr --arg platform "${PLATFORM}" '.[$platform] // ""' <<< "${PLATFORM_SLUG_MAP}")"
if [ -n "${PLATFORM}" ] && [ -z "${platform_slug}" ]
then
echo "::error::Unsupported platform: ${PLATFORM}"
fi
if [ -n "${IMAGE}" ]
then
if [[ "${IMAGE}" =~ ^(.*\/)?([^\/]+)\/([^\/\:]+)(\:.+)?$ ]]
then
image_slug="${IMAGE}"
else
image_slug="docker.io/${IMAGE}"
fi
fi
if [[ "${image_slug}" =~ ^docker\.io\/(.*)$ ]]
then
dockerhub_slug="${BASH_REMATCH[1]}"
fi
echo "image=${image_slug}" >> $GITHUB_OUTPUT
echo "target=${target_slug}" >> $GITHUB_OUTPUT
echo "platform=${platform_slug}" >> $GITHUB_OUTPUT
echo "prefix=${prefix_slug}" >> $GITHUB_OUTPUT
echo "suffix=${suffix_slug}" >> $GITHUB_OUTPUT
echo "dockerhub=${dockerhub_slug}" >> $GITHUB_OUTPUT
- name: Generate docker metadata
id: final_meta
uses: docker/metadata-action@96383f45573cb7f253c731d3b3ab81c87ef81934
with:
images: |
${{ matrix.image }}
labels: |
org.opencontainers.image.version=${{ needs.versioned_source.outputs.semver }}
org.opencontainers.image.ref.name=${{ matrix.target }}
tags: |
type=raw,value=${{ github.base_ref || github.ref_name }}
type=raw,value=${{ needs.versioned_source.outputs.tag }}
type=raw,value=${{ needs.versioned_source.outputs.semver }}
flavor: |
latest=${{ needs.versioned_source.outputs.semver != '' }}
prefix=${{ steps.strings.outputs.prefix }},onlatest=true
suffix=${{ steps.strings.outputs.suffix }},onlatest=true
- name: Setup crane
uses: imjasonh/setup-crane@00c9e93efa4e1138c9a7a5c594acd6c75a2fbf0c
with:
version: v0.14.0
- name: Login to GitHub Container Registry
continue-on-error: true
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Login to Docker Hub
continue-on-error: true
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d
with:
registry: docker.io
username: ${{ secrets.DOCKERHUB_USER || secrets.DOCKER_REGISTRY_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN || secrets.DOCKER_REGISTRY_PASS }}
- name: Publish final tags
uses: akhilerm/tag-push-action@85bf542f43f5f2060ef76262a67ee3607cb6db37
with:
src: ${{ matrix.image }}:${{ steps.strings.outputs.prefix }}build-${{ github.event.pull_request.head.sha || github.event.head_commit.id }}${{ steps.strings.outputs.suffix }}
dst: |
${{ steps.final_meta.outputs.tags }}
- name: Publish tags for each platform
if: inputs.docker_publish_platform_tags == true
env:
PLATFORM_SLUG_MAP: |
{
"linux/386": "i386",
"linux/amd64": "amd64",
"linux/arm64": "arm64v8",
"linux/arm/v7": "arm32v7",
"linux/arm/v6": "arm32v6",
"linux/arm/v5": "arm32v5",
"linux/s390x": "s390x",
"linux/mips64le": "mips64le",
"linux/ppc64le": "ppc64le",
"linux/riscv64": "riscv64",
"windows/amd64": "windows-amd64"
}
REMOTE_TAGS: ${{ steps.final_meta.outputs.tags }}
run: |
for remote_tag in ${REMOTE_TAGS}
do
for b64 in $(jq -r '.manifests[].platform | @base64' <<< "$(docker buildx imagetools inspect --raw "${remote_tag}")")
do
json="$(echo "${b64}" | base64 --decode)"
os="$(echo "${json}" | jq -r '.os')"
arch="$(echo "${json}" | jq -r '.architecture')"
variant="$(echo "${json}" | jq -r '.variant // ""')"
if [ -z "${variant}" ]
then
platform="${os}/${arch}"
else
platform="${os}/${arch}/${variant}"
fi
platform_slug="$(jq -cr --arg platform "${platform}" '.[$platform] // ""' <<< "${PLATFORM_SLUG_MAP}")"
if [ -z "{platform_slug}" ]
then
echo "::error::Unsupported platform: ${PLATFORM}"
fi
crane copy "${remote_tag}" "${remote_tag}-${platform_slug}" --platform "${platform}"
done
done
- name: Update DockerHub Description
if: steps.strings.outputs.dockerhub != '' && github.base_ref == github.event.repository.default_branch
continue-on-error: true
uses: peter-evans/dockerhub-description@dc67fad7001ef9e8e3c124cb7a64e16d0a63d864
with:
username: ${{ secrets.DOCKERHUB_USER || secrets.DOCKER_REGISTRY_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN || secrets.DOCKER_REGISTRY_PASS }}
repository: ${{ steps.strings.outputs.dockerhub }}
readme-filepath: ./README.md
balena_publish:
name: Publish balena
runs-on: ${{ fromJSON(inputs.runs_on) }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
needs:
- is_balena
- npm_test
- custom_test
- docker_test
- cargo_test
- python_test
- versioned_source
if: |
!failure() && !cancelled() &&
needs.is_balena.result == 'success' &&
github.event.pull_request.state == 'open'
strategy:
fail-fast: false
matrix:
slug: ${{ fromJSON(needs.is_balena.outputs.balena_slugs) }}
defaults:
run:
working-directory: ${{ inputs.working_directory }}
shell: bash --noprofile --norc -eo pipefail -x {0}
steps:
- name: Generate GitHub App installation token
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a
continue-on-error: true
id: gh_app_token
with:
app_id: ${{ inputs.app_id }}
installation_retrieval_mode: id
installation_retrieval_payload: ${{ inputs.installation_id }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
permissions: |-
{
"contents": "read",
"metadata": "read"
}
- name: Checkout versioned sha
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
fetch-depth: 0
submodules: recursive
ref: ${{ needs.versioned_source.outputs.sha }}
token: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
- name: Create local refs
if: github.event.pull_request.state == 'open' && inputs.disable_versioning != true
run: |
git update-ref refs/tags/${{ needs.versioned_source.outputs.tag }} ${{ needs.versioned_source.outputs.tag_sha }}
- uses: balena-io/deploy-to-balena-action@4495424a8247c08911413a57afbbc62815a39e56
id: balena_deploy
with:
balena_token: ${{ secrets.BALENA_API_KEY || secrets.BALENA_API_KEY_PUSH }}
environment: ${{ inputs.balena_environment }}
fleet: ${{ matrix.slug }}
source: ${{ inputs.working_directory }}
registry_secrets: |
{
"ghcr.io": {
"username": "${{ github.actor }}",
"password": "${{ secrets.GITHUB_TOKEN }}"
},
"docker.io": {
"username": "${{ secrets.DOCKERHUB_USER }}",
"password": "${{ secrets.DOCKERHUB_TOKEN }}"
}
}
- name: Generate release notes
id: release_notes
run: |
set -ea
# prevent git from existing with 141
set +o pipefail
previous_tag="$(git --no-pager tag --list --sort=-version:refname "v*.*.*" --merged | head -n2 | tail -n1)"
release_notes_file="$(mktemp)"
git log ${previous_tag}..${{ github.event.pull_request.head.sha || github.event.head_commit.id }} --pretty=reference > "${release_notes_file}"
echo "file=${release_notes_file}" >> $GITHUB_OUTPUT
- name: Update balena release notes
run: |
set -ea
release_notes="$(cat < '${{ steps.release_notes.outputs.file }}' | jq -R -s .)"
app_id="$(curl --silent --retry 3 --fail \
"https://api.${{ inputs.balena_environment }}/v6/application?\$filter=slug%20eq%20%27${{ matrix.slug }}%27&\$select=id" \
-H 'Content-Type: application/json' \
-H 'Authorization: Bearer ${{ secrets.BALENA_API_KEY || secrets.BALENA_API_KEY_PUSH }}' \
| jq -r '.d[].id')"
release_id='${{ steps.balena_deploy.outputs.release_id }}'
if [[ -n $release_notes ]] && [[ -n $app_id ]] && [[ -n $release_id ]]; then
curl --silent --retry 3 --fail \
-X PATCH "https://api.${{ inputs.balena_environment }}/v6/release?\$filter=belongs_to__application%20eq%20${app_id}%20and%20id%20eq%20${release_id}" \
-H 'Content-Type: application/json' \
-H 'Authorization: Bearer ${{ secrets.BALENA_API_KEY || secrets.BALENA_API_KEY_PUSH }}' \
-d "{\"note\":${release_notes}}"
fi
balena_finalize:
name: Finalize balena
runs-on: ${{ fromJSON(inputs.runs_on) }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
needs:
- is_balena
- versioned_source
if: |
github.event.pull_request.merged == true || github.event_name == 'push'
strategy:
fail-fast: false
matrix:
slug: ${{ fromJSON(needs.is_balena.outputs.balena_slugs) }}
defaults:
run:
working-directory: ${{ inputs.working_directory }}
shell: bash --noprofile --norc -eo pipefail -x {0}
steps:
- name: Generate GitHub App installation token
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a
continue-on-error: true
id: gh_app_token
with:
app_id: ${{ inputs.app_id }}
installation_retrieval_mode: id
installation_retrieval_payload: ${{ inputs.installation_id }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
permissions: |-
{
"contents": "read",
"metadata": "read"
}
- name: Checkout versioned sha
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
fetch-depth: 0
submodules: recursive
ref: ${{ needs.versioned_source.outputs.sha }}
token: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
- uses: balena-io/deploy-to-balena-action@4495424a8247c08911413a57afbbc62815a39e56
id: balena_deploy
with:
balena_token: ${{ secrets.BALENA_API_KEY || secrets.BALENA_API_KEY_PUSH }}
environment: ${{ inputs.balena_environment }}
fleet: ${{ matrix.slug }}
source: ${{ inputs.working_directory }}
registry_secrets: |
{
"ghcr.io": {
"username": "${{ github.actor }}",
"password": "${{ secrets.GITHUB_TOKEN }}"
},
"docker.io": {
"username": "${{ secrets.DOCKERHUB_USER }}",
"password": "${{ secrets.DOCKERHUB_TOKEN }}"
}
}
- name: Generate release notes
id: release_notes
run: |
set -ea
# prevent git from existing with 141
set +o pipefail
previous_tag="$(git --no-pager tag --list --sort=-version:refname "v*.*.*" --merged | head -n2 | tail -n1)"
release_notes_file="$(mktemp)"
git log ${previous_tag}..${{ github.event.pull_request.head.sha || github.event.head_commit.id }} --pretty=reference > "${release_notes_file}"
echo "file=${release_notes_file}" >> $GITHUB_OUTPUT
- name: Update balena release notes
run: |
set -ea
release_notes="$(cat < '${{ steps.release_notes.outputs.file }}' | jq -R -s .)"
app_id="$(curl --silent --retry 3 --fail \
"https://api.${{ inputs.balena_environment }}/v6/application?\$filter=slug%20eq%20%27${{ matrix.slug }}%27&\$select=id" \
-H 'Content-Type: application/json' \
-H 'Authorization: Bearer ${{ secrets.BALENA_API_KEY || secrets.BALENA_API_KEY_PUSH }}' \
| jq -r '.d[].id')"
release_id='${{ steps.balena_deploy.outputs.release_id }}'
if [[ -n $release_notes ]] && [[ -n $app_id ]] && [[ -n $release_id ]]; then
curl --silent --retry 3 --fail \
-X PATCH "https://api.${{ inputs.balena_environment }}/v6/release?\$filter=belongs_to__application%20eq%20${app_id}%20and%20id%20eq%20${release_id}" \
-H 'Content-Type: application/json' \
-H 'Authorization: Bearer ${{ secrets.BALENA_API_KEY || secrets.BALENA_API_KEY_PUSH }}' \
-d "{\"note\":${release_notes}}"
fi
python_test:
name: Test python poetry
runs-on: ${{ fromJSON(inputs.runs_on) }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
needs:
- is_python
- versioned_source
if: |
github.event.pull_request.state == 'open' &&
needs.is_python.outputs.python_poetry == 'true'
defaults:
run:
working-directory: ${{ inputs.working_directory }}
shell: bash --noprofile --norc -eo pipefail -x {0}
strategy:
fail-fast: false
matrix:
python-version: ${{ fromJSON(needs.is_python.outputs.python_versions) }}
steps:
- name: Generate GitHub App installation token
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a
continue-on-error: true
id: gh_app_token
with:
app_id: ${{ inputs.app_id }}
installation_retrieval_mode: id
installation_retrieval_payload: ${{ inputs.installation_id }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
permissions: |-
{
"contents": "read",
"metadata": "read"
}
- name: Checkout versioned sha
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
fetch-depth: 0
submodules: recursive
ref: ${{ needs.versioned_source.outputs.sha }}
token: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
- name: Create local refs
if: github.event.pull_request.state == 'open' && inputs.disable_versioning != true
run: |
git update-ref refs/tags/${{ needs.versioned_source.outputs.tag }} ${{ needs.versioned_source.outputs.tag_sha }}
- name: Setup python
id: setup-python
uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
with:
python-version: ${{ matrix.python-version }}
- name: Setup poetry
if: steps.setup-python.outputs.python-version != ''
uses: abatilo/actions-poetry@192395c0d10c082a7c62294ab5d9a9de40e48974
with:
poetry-version: 1.5.1
- name: Run poetry install
run: |
poetry install
- name: Add linters and pytest to poetry
run: |
dep_list=`poetry show`
if (grep -wq ^flake8 <<< "$dep_list") && \
(grep -wq ^pydocstyle <<< "$dep_list") && \
(grep -wq ^pytest <<< "$dep_list")
then
echo "Dev dependencies already installed"
else
poetry add --group dev flake8@latest pydocstyle@latest pytest@latest
fi
- name: Lint with flake8
run: |
poetry run flake8 --max-line-length=120 --benchmark
- name: Lint with pydocstyle
run: |
poetry run pydocstyle
- name: Test with pytest
if: inputs.pseudo_terminal != true
run: |
poetry run pytest tests/
- name: Test with pytest (pseudo-tty)
if: inputs.pseudo_terminal == true
shell: script -q -e -c "bash --noprofile --norc -eo pipefail -x {0}" /tmp/test-session
run: |
poetry run pytest tests/
python_publish:
name: Publish to test PyPI
runs-on: ${{ fromJSON(inputs.runs_on) }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
needs:
- is_python
- npm_test
- custom_test
- docker_test
- cargo_test
- python_test
- versioned_source
if: |
!failure() && !cancelled() &&
needs.python_test.result == 'success' &&
needs.is_python.outputs.python_poetry == 'true' &&
needs.is_python.outputs.pypi_publish == 'true'
defaults:
run:
working-directory: ${{ inputs.working_directory }}
shell: bash --noprofile --norc -eo pipefail -x {0}
steps:
- name: Generate GitHub App installation token
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a
continue-on-error: true
id: gh_app_token
with:
app_id: ${{ inputs.app_id }}
installation_retrieval_mode: id
installation_retrieval_payload: ${{ inputs.installation_id }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
permissions: |-
{
"contents": "read",
"metadata": "read"
}
- name: Checkout versioned sha
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
fetch-depth: 0
submodules: recursive
ref: ${{ needs.versioned_source.outputs.sha }}
token: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
- name: Create local refs
if: github.event.pull_request.state == 'open' && inputs.disable_versioning != true
run: |
git update-ref refs/tags/${{ needs.versioned_source.outputs.tag }} ${{ needs.versioned_source.outputs.tag_sha }}
- name: Setup python
id: setup-python
uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
with:
python-version: "3.9"
- name: Setup poetry
if: steps.setup-python.outputs.python-version != ''
uses: abatilo/actions-poetry@192395c0d10c082a7c62294ab5d9a9de40e48974
with:
poetry-version: 1.5.1
- name: Generate Python metadata
id: python_meta
run: |
package=$(poetry version --no-ansi | awk '{print $1}')
version=$(poetry version --no-ansi | awk '{print $2}')
commit_sha="$(echo ${{ github.event.pull_request.head.sha }} | tr "a-z" "A-Z")"
decimal_sha=$(echo "ibase=16; $commit_sha" | bc)
version_tag="${version}-dev${decimal_sha}"
echo "package=${package}" >> $GITHUB_OUTPUT
echo "version=${version}" >> $GITHUB_OUTPUT
echo "version_tag=${version_tag}" >> $GITHUB_OUTPUT
- name: Run poetry install
run: |
poetry install
- name: Publish draft release
env:
PYPI_TOKEN: ${{ secrets.PYPI_TEST_TOKEN }}
run: |
poetry version ${{ steps.python_meta.outputs.version_tag }}
poetry config repositories.test-pypi https://test.pypi.org/legacy/
poetry config pypi-token.test-pypi $PYPI_TOKEN
poetry publish --build -r test-pypi
python_finalize:
name: Finalize python
runs-on: ${{ fromJSON(inputs.runs_on) }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
needs:
- is_python
- versioned_source
if: |
(github.event.pull_request.merged == true || github.event_name == 'push') &&
needs.is_python.outputs.python_poetry == 'true' &&
needs.is_python.outputs.pypi_publish == 'true'
defaults:
run:
working-directory: ${{ inputs.working_directory }}
shell: bash --noprofile --norc -eo pipefail -x {0}
steps:
- name: Generate GitHub App installation token
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a
continue-on-error: true
id: gh_app_token
with:
app_id: ${{ inputs.app_id }}
installation_retrieval_mode: id
installation_retrieval_payload: ${{ inputs.installation_id }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
permissions: |-
{
"contents": "read",
"metadata": "read"
}
- name: Checkout versioned sha
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
fetch-depth: 0
submodules: recursive
ref: ${{ needs.versioned_source.outputs.sha }}
token: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
- name: Setup python
id: setup-python
uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
with:
python-version: "3.9"
- name: Setup poetry
if: steps.setup-python.outputs.python-version != ''
uses: abatilo/actions-poetry@192395c0d10c082a7c62294ab5d9a9de40e48974
with:
poetry-version: 1.5.1
- name: Publish release
env:
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
run: |
poetry config pypi-token.pypi $PYPI_TOKEN
poetry publish --build
website_publish:
name: Publish website
runs-on: ${{fromJSON(inputs.runs_on)}}
env:
CF_BRANCH: ${{ github.event.pull_request.head.ref || github.event.repository.default_branch }}
needs:
- is_website
- npm_test
- custom_test
- docker_test
- cargo_test
- python_test
- versioned_source
if: |
!failure() && !cancelled() &&
(github.event.action != 'closed' || github.event.pull_request.merged == true) &&
needs.is_website.result == 'success'
steps:
- name: Generate GitHub App installation token
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a
continue-on-error: true
id: gh_app_token
with:
app_id: ${{ inputs.app_id }}
installation_retrieval_mode: id
installation_retrieval_payload: ${{ inputs.installation_id }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
permissions: |-
{
"contents": "read",
"metadata": "read"
}
- name: Checkout versioned sha
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
fetch-depth: 0
submodules: recursive
ref: ${{ needs.versioned_source.outputs.sha }}
token: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
- name: Create local refs
if: github.event.pull_request.state == 'open' && inputs.disable_versioning != true
run: |
git update-ref refs/tags/${{ needs.versioned_source.outputs.tag }} ${{ needs.versioned_source.outputs.tag_sha }}
- name: Setup Node.js
uses: actions/setup-node@1a4442cacd436585916779262731d5b162bc6ec7
with:
node-version: 18
- name: Docusaurus Builder
if: |
needs.is_website.outputs.has_readme == 'true' &&
inputs.docusaurus_website != false
uses: product-os/docusaurus-builder@3c2a832a54574094e54ea8c02074681d90295ab0
with:
repo: ${{ github.event.repository.name }}
org: ${{ github.repository_owner }}
default_branch: ${{ github.event.repository.default_branch }}
url: https://${{ inputs.cloudflare_website }}.pages.dev/
- name: Custom Website Builder
if: |
inputs.docusaurus_website == false
run: npm run deploy-docs --if-present
- name: Update deploy branch for merged PRs
if: github.event.pull_request.state != 'open'
run: |
echo "CF_BRANCH=${{ github.event.repository.default_branch }}" >> $GITHUB_ENV
- name: Deploy to Cloudflare Pages
uses: cloudflare/wrangler-action@4b3eae832ab5113c67958be31ca062ad46c593b6
with:
apiToken: ${{ secrets.CF_API_TOKEN }}
accountId: ${{ secrets.CF_ACCOUNT_ID }}
wranglerVersion: 3.5.1
preCommands: wrangler --version
command: pages deploy --branch ${{ env.CF_BRANCH }} --project-name=${{ inputs.cloudflare_website }} build/ | tee -a $GITHUB_STEP_SUMMARY
packageManager: npm
github_clean:
name: Clean GitHub release
runs-on: ${{ fromJSON(inputs.runs_on) }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
needs:
- event_types
if: |
github.event.action == 'closed' &&
github.event.pull_request.merged == false
defaults:
run:
working-directory: .
shell: bash --noprofile --norc -eo pipefail -x {0}
steps:
- name: Generate GitHub App installation token
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a
continue-on-error: true
id: gh_app_token
with:
app_id: ${{ inputs.app_id }}
installation_retrieval_mode: id
installation_retrieval_payload: ${{ inputs.installation_id }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
permissions: |-
{
"contents": "write",
"metadata": "read"
}
repositories: '[ "${{ github.event.pull_request.head.repo.name }}" ]'
- name: Delete draft GitHub release
run: gh release delete --yes '${{ github.event.pull_request.head.ref }}' || true
env:
GH_DEBUG: "true"
GH_PAGER: cat
GH_PROMPT_DISABLED: "true"
GH_REPO: ${{ github.repository }}
GH_TOKEN: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
github_publish:
name: Publish Github release
runs-on: ${{ fromJSON(inputs.runs_on) }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
needs:
- npm_publish
- python_publish
- cargo_publish
- custom_publish
if: |
!failure() && !cancelled() &&
github.event.pull_request.state == 'open'
defaults:
run:
working-directory: .
shell: bash --noprofile --norc -eo pipefail -x {0}
steps:
- name: Generate GitHub App installation token
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a
continue-on-error: true
id: gh_app_token
with:
app_id: ${{ inputs.app_id }}
installation_retrieval_mode: id
installation_retrieval_payload: ${{ inputs.installation_id }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
permissions: |-
{
"contents": "write",
"metadata": "read"
}
repositories: '[ "${{ github.event.pull_request.head.repo.name }}" ]'
- name: Delete draft GitHub release
run: gh release delete --yes '${{ github.event.pull_request.head.ref }}' || true
env:
GH_DEBUG: "true"
GH_PAGER: cat
GH_PROMPT_DISABLED: "true"
GH_REPO: ${{ github.repository }}
GH_TOKEN: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
- name: Download all artifacts
uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a
with:
path: ${{ runner.temp }}
- name: Check if any release artifacts exist
id: gh_artifacts
env:
GH_ARTIFACTS: ${{ runner.temp }}/gh-release-${{ github.event.pull_request.head.sha || github.event.head_commit.id }}
run: |
set -ea
artifact_count=0
[ -d "$GH_ARTIFACTS" ] && \
artifact_count=$(ls "$GH_ARTIFACTS" | wc -l | sed 's/^ *//;s/ *$//')
echo "count=$artifact_count" >> $GITHUB_OUTPUT
- name: Publish artifacts
if: steps.gh_artifacts.outputs.count != '0'
uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844
with:
token: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
name: ${{ github.event.pull_request.head.ref }}
tag_name: ${{ github.event.pull_request.head.ref }}
draft: true
prerelease: true
files: ${{ runner.temp }}/gh-release-${{ github.event.pull_request.head.sha || github.event.head_commit.id }}/*
github_finalize:
name: Finalize GitHub release
runs-on: ${{ fromJSON(inputs.runs_on) }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
needs:
- versioned_source
if: |
(
(
github.event.pull_request.merged == true &&
inputs.disable_versioning == false
) || (
github.event_name == 'push' &&
inputs.disable_versioning == true
)
)
defaults:
run:
working-directory: .
shell: bash --noprofile --norc -eo pipefail -x {0}
steps:
- name: Generate GitHub App installation token
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a
continue-on-error: true
id: gh_app_token
with:
app_id: ${{ inputs.app_id }}
installation_retrieval_mode: id
installation_retrieval_payload: ${{ inputs.installation_id }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
permissions: |-
{
"contents": "write",
"metadata": "read"
}
- name: Checkout versioned sha
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
fetch-depth: 0
submodules: recursive
ref: ${{ needs.versioned_source.outputs.sha }}
token: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
- name: Generate release notes
id: release_notes
run: |
set -ea
# prevent git from existing with 141
set +o pipefail
previous_tag="$(git --no-pager tag --list --sort=-version:refname "v*.*.*" --merged | head -n2 | tail -n1)"
release_notes_file="$(mktemp)"
git log ${previous_tag}..${{ github.event.pull_request.head.sha || github.event.head_commit.id }} --pretty=reference > "${release_notes_file}"
echo "file=${release_notes_file}" >> $GITHUB_OUTPUT
- name: Finalize GitHub release (if any)
env:
GH_DEBUG: "true"
GH_PAGER: cat
GH_PROMPT_DISABLED: "true"
GH_REPO: ${{ github.repository }}
GH_TOKEN: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
run: |
set -ea
if gh release view '${{ github.event.pull_request.head.ref }}'; then
gh release edit '${{ github.event.pull_request.head.ref }}' \
--notes-file '${{ steps.release_notes.outputs.file }}' \
--title '${{ needs.versioned_source.outputs.tag }}' \
--tag '${{ needs.versioned_source.outputs.tag }}' \
--prerelease='${{ inputs.github_prerelease }}' \
--draft=false
if [[ ${{ inputs.github_prerelease }} =~ false ]]; then
release_id="$(gh api "/repos/${{ github.repository }}/releases/tags/${{ needs.versioned_source.outputs.tag }}" \
-H 'Accept: application/vnd.github+json' | jq -r .id)"
gh api --method PATCH "/repos/${{ github.repository }}/releases/${release_id}" \
-H 'Accept: application/vnd.github+json' \
-F make_latest=true
fi
else
echo "No release found for the current PR"
fi
cargo_test:
name: Test rust
runs-on: ${{ fromJSON(inputs.runs_on) }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
needs:
- is_cargo
- versioned_source
if: |
github.event.pull_request.state == 'open' &&
needs.is_cargo.outputs.cargo == 'true'
defaults:
run:
working-directory: ${{ inputs.working_directory }}
shell: bash --noprofile --norc -eo pipefail -x {0}
strategy:
fail-fast: false
matrix:
target: ${{ fromJSON(needs.is_cargo.outputs.cargo_targets) }}
outputs:
package: ${{ steps.meta.outputs.package }}
version: ${{ steps.meta.outputs.version }}
branch_tag: ${{ steps.meta.outputs.branch_tag }}
sha_tag: ${{ steps.meta.outputs.sha_tag }}
version_tag: ${{ steps.meta.outputs.version_tag }}
steps:
- name: Generate GitHub App installation token
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a
continue-on-error: true
id: gh_app_token
with:
app_id: ${{ inputs.app_id }}
installation_retrieval_mode: id
installation_retrieval_payload: ${{ inputs.installation_id }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
permissions: |-
{
"contents": "read",
"metadata": "read"
}
- name: Checkout versioned sha
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
fetch-depth: 0
submodules: recursive
ref: ${{ needs.versioned_source.outputs.sha }}
token: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
- name: Create local refs
if: github.event.pull_request.state == 'open' && inputs.disable_versioning != true
run: |
git update-ref refs/tags/${{ needs.versioned_source.outputs.tag }} ${{ needs.versioned_source.outputs.tag_sha }}
- name: Set up toolchain ${{ matrix.target }}
uses: dtolnay/rust-toolchain@master
with:
toolchain: ${{ inputs.rust_toolchain }}
targets: ${{ matrix.target }}
components: rustfmt
- name: Check formatting
run: cargo fmt --check
- name: Install cross
run: cargo install cross --locked
- name: Lint with clippy
run: cross -v clippy --all-targets --all-features -- -D warnings
- name: Run tests for toolchain ${{ matrix.target }}
run: cross -v test --target ${{ matrix.target }}
- name: Generate metadata
id: meta
run: |
package="$(grep '^name = \"' Cargo.toml | awk -F[\"\"] '{print $2}')"
version="${{ needs.versioned_source.outputs.semver }}"
branch_tag="$(echo '${{ github.event.pull_request.head.ref }}' | sed 's/[^[:alnum:]]/-/g')"
sha_tag="${branch_tag}-${{ github.event.pull_request.head.sha }}"
version_tag="${version}-${branch_tag}-${{ github.event.pull_request.head.sha }}"
echo "package=${package}" >> $GITHUB_OUTPUT
echo "version=${version}" >> $GITHUB_OUTPUT
echo "branch_tag=${branch_tag}" >> $GITHUB_OUTPUT
echo "sha_tag=${sha_tag}" >> $GITHUB_OUTPUT
echo "version_tag=${version_tag}" >> $GITHUB_OUTPUT
cargo_publish:
name: Publish rust
runs-on: ${{ fromJSON(inputs.runs_on) }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
needs:
- is_cargo
- npm_test
- custom_test
- docker_test
- cargo_test
- python_test
- versioned_source
if: |
!failure() && !cancelled() &&
needs.cargo_test.result == 'success' &&
inputs.rust_binaries == true
defaults:
run:
working-directory: ${{ inputs.working_directory }}
shell: bash --noprofile --norc -eo pipefail -x {0}
strategy:
fail-fast: false
matrix:
target: ${{ fromJSON(needs.is_cargo.outputs.cargo_targets) }}
steps:
- name: Generate GitHub App installation token
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a
continue-on-error: true
id: gh_app_token
with:
app_id: ${{ inputs.app_id }}
installation_retrieval_mode: id
installation_retrieval_payload: ${{ inputs.installation_id }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
permissions: |-
{
"contents": "read",
"metadata": "read"
}
- name: Checkout versioned sha
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
fetch-depth: 0
submodules: recursive
ref: ${{ needs.versioned_source.outputs.sha }}
token: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
- name: Create local refs
if: github.event.pull_request.state == 'open' && inputs.disable_versioning != true
run: |
git update-ref refs/tags/${{ needs.versioned_source.outputs.tag }} ${{ needs.versioned_source.outputs.tag_sha }}
- name: Set up toolchain ${{ matrix.target }}
uses: dtolnay/rust-toolchain@master
with:
toolchain: ${{ inputs.rust_toolchain }}
targets: ${{ matrix.target }}
- name: Install cross
run: cargo install cross --locked
- name: Build release for toolchain ${{ matrix.target }}
run: cross -v build --release --target ${{ matrix.target }}
- name: Install LLVM
run: sudo apt-get install -y llvm
- name: LLVM strip
run: llvm-strip target/${{ matrix.target }}/release/${{ needs.cargo_test.outputs.package }}
- name: Compress
run: |
tar --auto-compress -cvf ${{ needs.cargo_test.outputs.package }}-${{ matrix.target }}.tar.zst -C target/${{ matrix.target }}/release ${{ needs.cargo_test.outputs.package }}
- name: Upload artifact
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32
with:
name: gh-release-${{ github.event.pull_request.head.sha || github.event.head_commit.id }}
path: ${{ needs.cargo_test.outputs.package }}-${{ matrix.target }}.tar.gz
retention-days: 1
cargo_finalize:
name: Finalize rust
runs-on: ${{ fromJSON(inputs.runs_on) }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
needs:
- is_cargo
- versioned_source
if: |
(github.event.pull_request.merged == true || github.event_name == 'push') &&
needs.is_cargo.outputs.cargo == 'true'
defaults:
run:
working-directory: ${{ inputs.working_directory }}
shell: bash --noprofile --norc -eo pipefail -x {0}
steps:
- name: Generate GitHub App installation token
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a
continue-on-error: true
id: gh_app_token
with:
app_id: ${{ inputs.app_id }}
installation_retrieval_mode: id
installation_retrieval_payload: ${{ inputs.installation_id }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
permissions: |-
{
"contents": "read",
"metadata": "read"
}
- name: Checkout versioned sha
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
fetch-depth: 0
submodules: recursive
ref: ${{ needs.versioned_source.outputs.sha }}
token: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
- name: Set up toolchain ${{ matrix.target }}
uses: dtolnay/rust-toolchain@stable
- name: Publish crate to ${{ env.CARGO_REGISTRY }}
env:
CARGO_REGISTRY_DEFAULT: ${{ env.CARGO_REGISTRY }}
CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }}
run: |
if [ -n "$CARGO_REGISTRY_TOKEN" ]; then
cargo publish
fi
custom_test:
name: Test custom
runs-on: ${{ matrix.os }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
needs:
- is_custom
- versioned_source
if: |
github.event.pull_request.state == 'open' &&
needs.is_custom.outputs.custom_test == 'true'
strategy:
fail-fast: false
matrix:
value: ${{ fromJSON(needs.is_custom.outputs.custom_test_matrix) }}
os: ${{ fromJSON(inputs.tests_run_on || inputs.custom_runs_on) }}
steps:
- name: Reject external custom actions
if: |
github.event.pull_request.state == 'open' &&
github.event.pull_request.head.repo.full_name != github.repository &&
inputs.restrict_custom_actions == true
run: |
echo "::error::Custom actions are disabled for external contributors and will be skipped. \
Please contact a member of the organization for assistance."
exit 1
- name: Generate GitHub App installation token
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a
continue-on-error: true
id: gh_app_token
with:
app_id: ${{ inputs.app_id }}
installation_retrieval_mode: id
installation_retrieval_payload: ${{ inputs.installation_id }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
permissions: ${{ inputs.token_scope }}
- name: Checkout event sha
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
fetch-depth: 1
submodules: recursive
ref: ${{ github.sha }}
token: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
path: /tmp/_target
- name: Checkout versioned sha
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
fetch-depth: 0
submodules: recursive
ref: ${{ needs.versioned_source.outputs.sha }}
token: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
- name: Create local refs
if: github.event.pull_request.state == 'open' && inputs.disable_versioning != true
run: |
git update-ref refs/tags/${{ needs.versioned_source.outputs.tag }} ${{ needs.versioned_source.outputs.tag_sha }}
- name: Set the matrix value env var
run: |
echo "matrix_value=${{ matrix.value }}" >> $GITHUB_ENV
echo "os_value=${{ matrix.os }}" >> $GITHUB_ENV
- uses: /tmp/_target/.github/actions/test
with:
json: ${{ toJSON(inputs) }}
secrets: ${{ toJSON(secrets) }}
variables: ${{ toJSON(vars) }}
custom_publish:
name: Publish custom
runs-on: ${{ matrix.os }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
needs:
- is_custom
- npm_test
- custom_test
- docker_test
- cargo_test
- python_test
- versioned_source
if: |
!failure() && !cancelled() &&
github.event.pull_request.state == 'open' &&
needs.is_custom.outputs.custom_publish == 'true'
strategy:
fail-fast: false
matrix:
value: ${{ fromJSON(needs.is_custom.outputs.custom_publish_matrix) }}
os: ${{ fromJSON(inputs.tests_run_on || inputs.custom_runs_on) }}
steps:
- name: Reject external custom actions
if: |
github.event.pull_request.state == 'open' &&
github.event.pull_request.head.repo.full_name != github.repository &&
inputs.restrict_custom_actions == true
run: |
echo "::error::Custom actions are disabled for external contributors and will be skipped. \
Please contact a member of the organization for assistance."
exit 1
- name: Generate GitHub App installation token
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a
continue-on-error: true
id: gh_app_token
with:
app_id: ${{ inputs.app_id }}
installation_retrieval_mode: id
installation_retrieval_payload: ${{ inputs.installation_id }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
permissions: ${{ inputs.token_scope }}
- name: Checkout event sha
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
fetch-depth: 1
submodules: recursive
ref: ${{ github.sha }}
token: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
path: /tmp/_target
- name: Checkout versioned sha
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
fetch-depth: 0
submodules: recursive
ref: ${{ needs.versioned_source.outputs.sha }}
token: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
- name: Create local refs
if: github.event.pull_request.state == 'open' && inputs.disable_versioning != true
run: |
git update-ref refs/tags/${{ needs.versioned_source.outputs.tag }} ${{ needs.versioned_source.outputs.tag_sha }}
- name: Set the matrix value env var
run: |
echo "matrix_value=${{ matrix.value }}" >> $GITHUB_ENV
echo "os_value=${{ matrix.os }}" >> $GITHUB_ENV
- uses: /tmp/_target/.github/actions/publish
with:
json: ${{ toJSON(inputs) }}
secrets: ${{ toJSON(secrets) }}
variables: ${{ toJSON(vars) }}
custom_finalize:
name: Finalize custom
runs-on: ${{ matrix.os }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
needs:
- is_custom
- versioned_source
if: |
(github.event.pull_request.merged == true || github.event_name == 'push') &&
needs.is_custom.outputs.custom_finalize == 'true'
strategy:
fail-fast: false
matrix:
value: ${{ fromJSON(needs.is_custom.outputs.custom_finalize_matrix) }}
os: ${{ fromJSON(inputs.tests_run_on || inputs.custom_runs_on) }}
steps:
- name: Reject external custom actions
if: |
github.event.pull_request.state == 'open' &&
github.event.pull_request.head.repo.full_name != github.repository &&
inputs.restrict_custom_actions == true
run: |
echo "::error::Custom actions are disabled for external contributors and will be skipped. \
Please contact a member of the organization for assistance."
exit 1
- name: Generate GitHub App installation token
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a
continue-on-error: true
id: gh_app_token
with:
app_id: ${{ inputs.app_id }}
installation_retrieval_mode: id
installation_retrieval_payload: ${{ inputs.installation_id }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
permissions: ${{ inputs.token_scope }}
- name: Checkout event sha
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
fetch-depth: 1
submodules: recursive
ref: ${{ github.sha }}
token: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
path: /tmp/_target
- name: Checkout versioned sha
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
fetch-depth: 0
submodules: recursive
ref: ${{ needs.versioned_source.outputs.sha }}
token: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
- name: Set the matrix value env var
run: |
echo "matrix_value=${{ matrix.value }}" >> $GITHUB_ENV
echo "os_value=${{ matrix.os }}" >> $GITHUB_ENV
- uses: /tmp/_target/.github/actions/finalize
with:
json: ${{ toJSON(inputs) }}
secrets: ${{ toJSON(secrets) }}
variables: ${{ toJSON(vars) }}
custom_clean:
name: Clean custom
runs-on: ${{ matrix.os }}
strategy:
fail-fast: true
matrix:
os: ${{ fromJSON(inputs.tests_run_on || inputs.custom_runs_on) }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
needs:
- is_custom
- versioned_source
if: |
github.event.action == 'closed' &&
github.event.pull_request.merged == false &&
needs.is_custom.outputs.custom_clean == 'true'
steps:
- name: Reject external custom actions
if: |
github.event.pull_request.state == 'open' &&
github.event.pull_request.head.repo.full_name != github.repository &&
inputs.restrict_custom_actions == true
run: |
echo "::error::Custom actions are disabled for external contributors and will be skipped. \
Please contact a member of the organization for assistance."
exit 1
- name: Generate GitHub App installation token
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a
continue-on-error: true
id: gh_app_token
with:
app_id: ${{ inputs.app_id }}
installation_retrieval_mode: id
installation_retrieval_payload: ${{ inputs.installation_id }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
permissions: ${{ inputs.token_scope }}
- name: Checkout event sha
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
fetch-depth: 1
submodules: recursive
ref: ${{ github.sha }}
token: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
path: /tmp/_target
- name: Checkout versioned sha
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
fetch-depth: 0
submodules: recursive
ref: ${{ needs.versioned_source.outputs.sha }}
token: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
- uses: /tmp/_target/.github/actions/clean
with:
json: ${{ toJSON(inputs) }}
secrets: ${{ toJSON(secrets) }}
variables: ${{ toJSON(vars) }}
custom_always:
name: Always custom
runs-on: ${{ matrix.os }}
strategy:
fail-fast: true
matrix:
os: ${{ fromJSON(inputs.tests_run_on || inputs.custom_runs_on) }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
needs:
- custom_test
- custom_publish
- custom_finalize
- custom_clean
- is_custom
- versioned_source
if: |
always() &&
needs.is_custom.outputs.custom_always == 'true'
steps:
- name: Reject external custom actions
if: |
github.event.pull_request.state == 'open' &&
github.event.pull_request.head.repo.full_name != github.repository &&
inputs.restrict_custom_actions == true
run: |
echo "::error::Custom actions are disabled for external contributors and will be skipped. \
Please contact a member of the organization for assistance."
exit 1
- name: Generate GitHub App installation token
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a
continue-on-error: true
id: gh_app_token
with:
app_id: ${{ inputs.app_id }}
installation_retrieval_mode: id
installation_retrieval_payload: ${{ inputs.installation_id }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
permissions: ${{ inputs.token_scope }}
- name: Checkout event sha
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
fetch-depth: 1
submodules: recursive
ref: ${{ github.sha }}
token: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
path: /tmp/_target
- name: Checkout versioned sha
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
fetch-depth: 0
submodules: recursive
ref: ${{ needs.versioned_source.outputs.sha }}
token: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
- uses: /tmp/_target/.github/actions/always
with:
json: ${{ toJSON(inputs) }}
secrets: ${{ toJSON(secrets) }}
variables: ${{ toJSON(vars) }}
cloudformation_test:
name: Test CloudFormation
runs-on: ${{ fromJSON(inputs.runs_on) }}
strategy:
fail-fast: true
matrix:
stack: ${{ fromJSON(needs.is_cloudformation.outputs.stacks) }}
include: ${{ fromJSON(needs.is_cloudformation.outputs.includes) }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
needs:
- is_cloudformation
- versioned_source
if: |
github.event.pull_request.state == 'open' &&
needs.is_cloudformation.outputs.cloudformation == 'true'
defaults:
run:
working-directory: ${{ inputs.working_directory }}
shell: bash --noprofile --norc -eo pipefail -x {0}
env:
AWS_RETRY_MODE: adaptive
AWS_MAX_ATTEMPTS: 10
AWS_REGION: ${{ matrix.region || inputs.aws_region }}
AWS_DEFAULT_REGION: ${{ matrix.region || inputs.aws_region }}
ATTEMPTS: 5
TIMEOUT: 3
steps:
- name: Generate GitHub App installation token
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a
continue-on-error: true
id: gh_app_token
with:
app_id: ${{ inputs.app_id }}
installation_retrieval_mode: id
installation_retrieval_payload: ${{ inputs.installation_id }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
permissions: |-
{
"contents": "read",
"metadata": "read"
}
- name: Checkout versioned sha
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
fetch-depth: 0
submodules: recursive
ref: ${{ needs.versioned_source.outputs.sha }}
token: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
- name: Random delay
run: |
DELAY=${DELAY-5}
random=$(((RANDOM % DELAY) + 1))
echo "sleeping for ${random}s"
sleep "${random}s"
- uses: aws-actions/configure-aws-credentials@010d0da01d0b5a38af31e9c3470dbfdabdecca3a
if: github.event.pull_request.head.repo.full_name == github.repository
continue-on-error: true
with:
role-to-assume: ${{ matrix.role || inputs.aws_iam_role }}
role-session-name: github-${{ github.job }}-${{ github.run_id }}-${{ github.run_attempt }}
aws-region: ${{ matrix.region || inputs.aws_region }}
mask-aws-account-id: false
- name: Get caller identity (AWS/whoami)
run: aws sts get-caller-identity
- name: Convenience functions
id: functions
run: |
EOF="$(openssl rand -hex 16)"
# https://sre.google/sre-book/addressing-cascading-failures/
with_backoff="$(mktemp)"
cat << $EOF > "${with_backoff}"
function with_backoff() {
local max_attempts=\${ATTEMPTS-3}
local timeout=\${TIMEOUT-2}
local attempt=0
local exitCode=0
set +e
while [[ \$attempt < \$max_attempts ]]; do
"\$@"
exitCode=\$?
[[ \$exitCode == 0 ]] && break
echo "Failure! Retrying in \$timeout.." 1>&2
sleep "\$timeout"
attempt=\$(( attempt + 1 ))
timeout=\$(( timeout * 2 ))
done
[[ \$exitCode != 0 ]] && echo "You've failed me for the last time! (\$*)" 1>&2
set -e
return \$exitCode
}
$EOF
echo "with_backoff=${with_backoff}" >> $GITHUB_OUTPUT
- name: Create templates bucket
id: make_bucket
run: |
# If at first you don't succeed, back off exponentially.
source '${{ steps.functions.outputs.with_backoff }}'
bucket="$(with_backoff aws s3api list-buckets | jq -r '.Buckets[] | select(.Name | (startswith("cfn-") and endswith("-${{ matrix.region || inputs.aws_region }}"))).Name' | head -n 1)"
if [[ -z "$bucket" ]]; then
result="$(with_backoff aws s3 mb "s3://cfn-$(uuidgen)-${{ matrix.region || inputs.aws_region }}" \
--region '${{ matrix.region || inputs.aws_region }}')"
bucket="${result#*:}"
bucket="${bucket//[[:space:]]/}"
fi
echo "s3_bucket=${bucket}" >> $GITHUB_OUTPUT
- name: Wait for resources
run: |
stack_status="$(aws cloudformation describe-stacks \
--stack-name '${{ matrix.stack }}' --output text --query Stacks[*].StackStatus || true)"
if [[ -n "$stack_status" ]]; then
aws cloudformation wait stack-exists --stack-name '${{ matrix.stack }}'
if [[ "$stack_status" =~ CREATE_IN_PROGRESS ]]; then
aws cloudformation wait stack-create-complete --stack-name '${{ matrix.stack }}'
fi
if [[ "$stack_status" =~ UPDATE_IN_PROGRESS ]]; then
aws cloudformation wait stack-update-complete --stack-name '${{ matrix.stack }}'
fi
if [[ "$stack_status" =~ ROLLBACK_IN_PROGRESS ]]; then
aws cloudformation wait stack-rollback-complete --stack-name '${{ matrix.stack }}'
fi
aws cloudformation describe-stacks --stack-name '${{ matrix.stack }}'
fi
- name: Generate shared outputs
id: shared
env:
SECRETS_CONTEXT: ${{ toJson(secrets) }}
VARS_CONTEXT: ${{ toJson(vars) }}
run: |
set -a
trap 'rm -f .env' EXIT
to_envs() { jq -r "to_entries[] | \"\(.key)="\'"\(.value)"\'"\""; }
printf "matrix: params='%s' tags='%s' caps='%s'" \
'${{ toJSON(matrix.params) }}' \
'${{ toJSON(matrix.tags) }}' \
'${{ toJSON(matrix.capabilities) }}'
stack="$(echo '${{ inputs.cloudformation_templates }}' | jq -r '.stacks[] | select(.name=="${{ matrix.stack }}")')"
template_file="$(echo "${stack}" | jq -rc .template)"
tags="$(echo "${stack}" | jq -rc .tags[] | paste -sd' ' -) github_pull_request=${{ github.event.pull_request.number }} github_sha=${{ github.event.pull_request.head.sha || github.event.head_commit.id }}"
params="$(echo "${stack}" | jq -rc .params[] | paste -sd' ' - || echo '')"
kvparams="$(echo "${stack}" | jq -rc .params | jq -r 'map(split("=") as [$ParameterKey, $ParameterValue] | {$ParameterKey, $ParameterValue})[] | "ParameterKey=" + .ParameterKey + ",ParameterValue=" + .ParameterValue' | paste -sd' ' - || echo '')"
caps="$(echo "${stack}" | jq -rc .capabilities[] | paste -sd' ' -)"
echo "${SECRETS_CONTEXT}" | to_envs > .env
echo "${VARS_CONTEXT}" | to_envs >> .env
source .env && rm -f .env
echo "stack_name=${{ matrix.stack }}" >> $GITHUB_OUTPUT
echo "template_file=${template_file}" >> $GITHUB_OUTPUT
echo "tags=${tags}" >> $GITHUB_OUTPUT
EOF="$(openssl rand -hex 16)"
if [[ -n "$params" ]]; then
params="$(echo "${params}" | envsubst)"
echo "params<<$EOF" >> $GITHUB_OUTPUT
echo --parameter-overrides ${params} >> $GITHUB_OUTPUT
echo $EOF >> $GITHUB_OUTPUT
fi
if [[ -n "$kvparams" ]]; then
kvparams="$(echo "${kvparams}" | envsubst)"
echo "kvparams<<$EOF" >> $GITHUB_OUTPUT
echo --parameters ${kvparams} >> $GITHUB_OUTPUT
echo $EOF >> $GITHUB_OUTPUT
fi
echo "caps=${caps}" >> $GITHUB_OUTPUT
- name: Validate template
run: |
source '${{ steps.functions.outputs.with_backoff }}'
tmpvalid="$(openssl rand -hex 16)"
trap 'aws s3 rm s3://${{ steps.make_bucket.outputs.s3_bucket }}/${tmpvalid}' EXIT
with_backoff aws s3 cp '${{ steps.shared.outputs.template_file }}' \
"s3://${{ steps.make_bucket.outputs.s3_bucket }}/${tmpvalid}"
with_backoff aws cloudformation validate-template \
--template-url "https://s3.amazonaws.com/${{ steps.make_bucket.outputs.s3_bucket }}/${tmpvalid}"
- name: Package template
run: |
source '${{ steps.functions.outputs.with_backoff }}'
mkdir -p "package/$(dirname '${{ steps.shared.outputs.template_file }}')"
with_backoff aws cloudformation package \
--template-file '${{ steps.shared.outputs.template_file }}' \
--s3-bucket '${{ steps.make_bucket.outputs.s3_bucket }}' \
--output-template-file 'package/${{ steps.shared.outputs.template_file }}'
- name: Estimate costs
continue-on-error: true
run: |
aws cloudformation estimate-template-cost \
--template-body 'file://package/${{ steps.shared.outputs.template_file }}' \
${{ steps.shared.outputs.kvparams || '' }}
- name: Delete existing change set
continue-on-error: true
run: |
source '${{ steps.functions.outputs.with_backoff }}'
change_set_ids="$(with_backoff aws cloudformation list-change-sets \
--stack-name '${{ matrix.stack }}' | jq -r '.Summaries[].ChangeSetId')"
for id in ${change_set_ids}; do
pr_tag="$(with_backoff aws cloudformation describe-change-set \
--change-set-name "${id}" \
--query Tags | jq -r '.[] | select(.Key=="github_pull_request").Value')"
if [[ -n "$pr_tag" ]] && [[ "$pr_tag" == '${{ github.event.pull_request.number }}' ]]; then
with_backoff aws cloudformation delete-change-set --change-set-name "${id}"
fi
done
- name: Generate change set
id: change_set
run: |
source '${{ steps.functions.outputs.with_backoff }}'
result="$(with_backoff aws cloudformation deploy \
--stack-name '${{ steps.shared.outputs.stack_name }}' \
--template-file 'package/${{ steps.shared.outputs.template_file }}' \
--s3-bucket '${{ steps.make_bucket.outputs.s3_bucket }}' \
--capabilities ${{ steps.shared.outputs.caps }} \
--tags ${{ steps.shared.outputs.tags }} \
--no-fail-on-empty-changeset \
--no-execute-changeset \
${{ steps.shared.outputs.params || '' }})"
if ! [[ "$result" =~ 'No changes to deploy' ]]; then
cmd="${result#*:}"
cmd=${cmd//$'\n'/}
echo "command=${cmd}" >> $GITHUB_OUTPUT
else
echo '::notice::no changes'
fi
- name: Describe change set(s)
if: steps.change_set.outputs.command != ''
run: |
result="$(${{ steps.change_set.outputs.command }})"
if [[ -n "$result" ]]; then
replace="$(echo "${result}" | jq -r '.Changes[].ResourceChange | select(.Replacement=="True")' | jq -rs '. | length')"
destroy="$(echo "${result}" | jq -r '.Changes[].ResourceChange | select(.Action=="Remove")' | jq -rs '. | length')"
if [[ $replace -gt 0 ]] || [[ $destroy -gt 0 ]]; then
echo '::warning::change set may destroy and/or replace existing resources'
else
echo '::notice::change set may add or update resources'
fi
echo "${result}" | jq -r
fi
cloudformation_finalize:
name: Finalize CloudFormation
runs-on: ${{ fromJSON(inputs.runs_on) }}
strategy:
fail-fast: false
matrix:
stack: ${{ fromJSON(needs.is_cloudformation.outputs.stacks) }}
include: ${{ fromJSON(needs.is_cloudformation.outputs.includes) }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
needs:
- is_cloudformation
if: |
(github.event.pull_request.merged == true || github.event_name == 'push') &&
needs.is_cloudformation.outputs.cloudformation == 'true'
env:
AWS_RETRY_MODE: adaptive
AWS_MAX_ATTEMPTS: 10
AWS_REGION: ${{ matrix.region || inputs.aws_region }}
AWS_DEFAULT_REGION: ${{ matrix.region || inputs.aws_region }}
ATTEMPTS: 5
TIMEOUT: 3
steps:
- name: Random delay
run: |
DELAY=${DELAY-5}
random=$(((RANDOM % DELAY) + 1))
echo "sleeping for ${random}s"
sleep "${random}s"
- uses: aws-actions/configure-aws-credentials@010d0da01d0b5a38af31e9c3470dbfdabdecca3a
if: github.event.pull_request.head.repo.full_name == github.repository
continue-on-error: true
with:
role-to-assume: ${{ matrix.role || inputs.aws_iam_role }}
role-session-name: github-${{ github.job }}-${{ github.run_id }}-${{ github.run_attempt }}
aws-region: ${{ matrix.region || inputs.aws_region }}
mask-aws-account-id: false
- name: Get caller identity (AWS/whoami)
run: aws sts get-caller-identity
- name: Wait for resources
run: |
stack_status="$(aws cloudformation describe-stacks \
--stack-name '${{ matrix.stack }}' --output text --query Stacks[*].StackStatus || true)"
if [[ -n "$stack_status" ]]; then
aws cloudformation wait stack-exists --stack-name '${{ matrix.stack }}'
if [[ "$stack_status" =~ CREATE_IN_PROGRESS ]]; then
aws cloudformation wait stack-create-complete --stack-name '${{ matrix.stack }}'
fi
if [[ "$stack_status" =~ UPDATE_IN_PROGRESS ]]; then
aws cloudformation wait stack-update-complete --stack-name '${{ matrix.stack }}'
fi
if [[ "$stack_status" =~ ROLLBACK_IN_PROGRESS ]]; then
aws cloudformation wait stack-rollback-complete --stack-name '${{ matrix.stack }}'
fi
aws cloudformation describe-stacks --stack-name '${{ matrix.stack }}'
fi
- name: Convenience functions
id: functions
run: |
EOF="$(openssl rand -hex 16)"
# https://sre.google/sre-book/addressing-cascading-failures/
with_backoff="$(mktemp)"
cat << $EOF > "${with_backoff}"
function with_backoff() {
local max_attempts=\${ATTEMPTS-3}
local timeout=\${TIMEOUT-2}
local attempt=0
local exitCode=0
set +e
while [[ \$attempt < \$max_attempts ]]; do
"\$@"
exitCode=\$?
[[ \$exitCode == 0 ]] && break
echo "Failure! Retrying in \$timeout.." 1>&2
sleep "\$timeout"
attempt=\$(( attempt + 1 ))
timeout=\$(( timeout * 2 ))
done
[[ \$exitCode != 0 ]] && echo "You've failed me for the last time! (\$*)" 1>&2
set -e
return \$exitCode
}
$EOF
echo "with_backoff=${with_backoff}" >> $GITHUB_OUTPUT
- name: Execute change set
run: |
source '${{ steps.functions.outputs.with_backoff }}'
change_set_ids="$(with_backoff aws cloudformation list-change-sets \
--stack-name '${{ matrix.stack }}' \
| jq -r '.Summaries[] | select(.ExecutionStatus=="AVAILABLE").ChangeSetId')"
for id in ${change_set_ids}; do
pr_tag="$(with_backoff aws cloudformation describe-change-set \
--change-set-name "${id}" \
--query Tags | jq -r '.[] | select(.Key=="github_pull_request").Value')"
if [[ -n "$pr_tag" ]] && [[ "$pr_tag" == '${{ github.event.pull_request.number }}' ]]; then
with_backoff aws cloudformation execute-change-set --change-set-name "${id}"
fi
done
- name: Wait for resources
run: |
stack_status="$(aws cloudformation describe-stacks \
--stack-name '${{ matrix.stack }}' --output text --query Stacks[*].StackStatus || true)"
if [[ -n "$stack_status" ]]; then
aws cloudformation wait stack-exists --stack-name '${{ matrix.stack }}'
if [[ "$stack_status" =~ CREATE_IN_PROGRESS ]]; then
aws cloudformation wait stack-create-complete --stack-name '${{ matrix.stack }}'
fi
if [[ "$stack_status" =~ UPDATE_IN_PROGRESS ]]; then
aws cloudformation wait stack-update-complete --stack-name '${{ matrix.stack }}'
fi
if [[ "$stack_status" =~ ROLLBACK_IN_PROGRESS ]]; then
aws cloudformation wait stack-rollback-complete --stack-name '${{ matrix.stack }}'
fi
aws cloudformation describe-stacks --stack-name '${{ matrix.stack }}'
fi
terraform_test:
name: Test Terraform
runs-on: ${{ fromJSON(inputs.runs_on) }}
strategy:
fail-fast: true
matrix:
project: ${{ fromJSON(needs.is_terraform.outputs.projects) }}
include: ${{ fromJSON(needs.is_terraform.outputs.includes) }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
needs:
- is_terraform
- versioned_source
if: |
github.event.pull_request.state == 'open' &&
needs.is_terraform.outputs.terraform == 'true'
defaults:
run:
working-directory: ${{ matrix.project }}
env:
KUBE_CTX: ${{ vars.KUBECTL_CONTEXT }}
KUBE_NAMESPACE: ${{ vars.KUBE_NAMESPACE }}
LOCK_TIMEOUT: 300s
steps:
- name: Generate GitHub App installation token
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a
continue-on-error: true
id: gh_app_token
with:
app_id: ${{ inputs.app_id }}
installation_retrieval_mode: id
installation_retrieval_payload: ${{ inputs.installation_id }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
permissions: |-
{
"contents": "read",
"metadata": "read"
}
- name: Checkout versioned sha
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
fetch-depth: 0
submodules: recursive
ref: ${{ needs.versioned_source.outputs.sha }}
token: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
- name: Random delay
run: |
DELAY=${DELAY-5}
random=$(((RANDOM % DELAY) + 1))
echo "sleeping for ${random}s"
sleep "${random}s"
- uses: aws-actions/configure-aws-credentials@010d0da01d0b5a38af31e9c3470dbfdabdecca3a
if: github.event.pull_request.head.repo.full_name == github.repository
continue-on-error: true
with:
role-to-assume: ${{ matrix.role || inputs.aws_iam_role }}
role-session-name: github-${{ github.job }}-${{ github.run_id }}-${{ github.run_attempt }}
aws-region: ${{ matrix.region || inputs.aws_region }}
mask-aws-account-id: false
- name: Get caller identity (AWS/whoami)
run: aws sts get-caller-identity
- name: Update kubeconfig
run: |
aws eks update-kubeconfig --name $(echo "${KUBE_CTX}" | awk -F'/' '{print $2}')
- uses: hashicorp/setup-terraform@633666f66e0061ca3b725c73b2ec20cd13a8fdd1
with:
terraform_wrapper: false
terraform_version: 1.3.9
- name: Terraform init
run: terraform init
- name: Terraform show
run: terraform show
- name: Terraform validate
run: terraform validate
- name: Terraform plan
env:
SECRETS_CONTEXT: ${{ toJson(secrets) }}
VARS_CONTEXT: ${{ toJson(vars) }}
run: |
set -a
trap 'rm -f .env "${json_plan}"' EXIT
to_envs() { jq -r "to_entries[] | \"\(.key)="\'"\(.value)"\'"\""; }
echo "${SECRETS_CONTEXT}" | to_envs > .env
echo "${VARS_CONTEXT}" | to_envs >> .env
source .env && rm -f .env
rc=0
terraform plan -lock-timeout=${LOCK_TIMEOUT} -detailed-exitcode || rc=$?
if [[ $rc -eq 1 ]]; then
false
fi
json_plan="$(mktemp)"
terraform plan -lock-timeout=${LOCK_TIMEOUT} -json > "${json_plan}"
msg=$(cat < "${json_plan}" | jq -rs '.[] | select(.type=="change_summary")."@message"')
add=$(cat < "${json_plan}" | jq -rs '.[] | select(.type=="change_summary").changes.add')
change=$(cat < "${json_plan}" | jq -rs '.[] | select(.type=="change_summary").changes.change')
remove=$(cat < "${json_plan}" | jq -rs '.[] | select(.type=="change_summary").changes.remove')
if [[ -n "$remove" ]] && [[ "$remove" -gt 0 ]]; then
echo "::warning::${msg}"
else
echo "::notice::${msg}"
fi
terraform_finalize:
name: Finalize Terraform
runs-on: ${{ fromJSON(inputs.runs_on) }}
strategy:
fail-fast: false
matrix:
project: ${{ fromJSON(needs.is_terraform.outputs.projects) }}
include: ${{ fromJSON(needs.is_terraform.outputs.includes) }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
needs:
- is_terraform
if: |
(github.event.pull_request.merged == true || github.event_name == 'push') &&
needs.is_terraform.outputs.terraform == 'true'
defaults:
run:
working-directory: ${{ matrix.project }}
env:
KUBE_CTX: ${{ vars.KUBECTL_CONTEXT }}
KUBE_NAMESPACE: ${{ vars.KUBE_NAMESPACE }}
LOCK_TIMEOUT: 300s
steps:
- name: Generate GitHub App installation token
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a
continue-on-error: true
id: gh_app_token
with:
app_id: ${{ inputs.app_id }}
installation_retrieval_mode: id
installation_retrieval_payload: ${{ inputs.installation_id }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
permissions: |-
{
"contents": "read",
"metadata": "read"
}
- name: Checkout versioned sha
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
fetch-depth: 0
submodules: recursive
ref: ${{ needs.versioned_source.outputs.sha }}
token: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
- name: Random delay
run: |
DELAY=${DELAY-5}
random=$(((RANDOM % DELAY) + 1))
echo "sleeping for ${random}s"
sleep "${random}s"
- uses: aws-actions/configure-aws-credentials@010d0da01d0b5a38af31e9c3470dbfdabdecca3a
if: github.event.pull_request.head.repo.full_name == github.repository
continue-on-error: true
with:
role-to-assume: ${{ matrix.role || inputs.aws_iam_role }}
role-session-name: github-${{ github.job }}-${{ github.run_id }}-${{ github.run_attempt }}
aws-region: ${{ matrix.region || inputs.aws_region }}
mask-aws-account-id: false
- name: Get caller identity (AWS/whoami)
run: aws sts get-caller-identity
- name: Update kubeconfig
run: |
aws eks update-kubeconfig --name $(echo "${KUBE_CTX}" | awk -F'/' '{print $2}')
- uses: hashicorp/setup-terraform@633666f66e0061ca3b725c73b2ec20cd13a8fdd1
with:
terraform_wrapper: false
terraform_version: 1.3.9
- name: Terraform init
run: terraform init
- name: Terraform apply
env:
SECRETS_CONTEXT: ${{ toJson(secrets) }}
VARS_CONTEXT: ${{ toJson(vars) }}
run: |
set -a
trap 'rm -f .env' EXIT
to_envs() { jq -r "to_entries[] | \"\(.key)="\'"\(.value)"\'"\""; }
echo "${SECRETS_CONTEXT}" | to_envs > .env
echo "${VARS_CONTEXT}" | to_envs >> .env
source .env && rm -f .env
rc=0
terraform plan -lock-timeout=${LOCK_TIMEOUT} -detailed-exitcode || rc=$?
if [[ $rc -eq 2 ]]; then
terraform apply -lock-timeout=${LOCK_TIMEOUT} -auto-approve
elif [[ $rc -eq 0 ]]; then
true
else
false
fi
protect_branch:
name: Protect branch
runs-on: ${{ fromJSON(inputs.runs_on) }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
needs:
- versioned_source
if: |
github.event.pull_request.state == 'open' &&
inputs.protect_branch == true &&
github.event.pull_request.head.repo.full_name == github.repository &&
github.event.repository.default_branch == github.event.pull_request.base.ref
defaults:
run:
working-directory: .
shell: bash --noprofile --norc -eo pipefail -x {0}
env:
BRANCH_PROTECTION_URI: repos/${{ github.repository }}/branches/${{ github.event.repository.default_branch }}/protection
steps:
- name: Generate GitHub App installation token
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a
continue-on-error: true
id: gh_app_token
with:
app_id: ${{ inputs.app_id }}
installation_retrieval_mode: id
installation_retrieval_payload: ${{ inputs.installation_id }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
permissions: |-
{
"administration": "write",
"contents": "read",
"metadata": "read"
}
repositories: '[ "${{ github.event.pull_request.head.repo.name }}" ]'
- name: Get branch protection rules
id: branch_protection
env:
GH_DEBUG: "true"
GH_PAGER: cat
GH_PROMPT_DISABLED: "true"
GH_REPO: ${{ github.repository }}
GH_TOKEN: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
run: |
result="$(gh api "${BRANCH_PROTECTION_URI}" --jq '.' ; exit=$?)"
if [[ "${exit}" -gt 0 ]]
then
message="$(echo "${result}" | jq -r '.message // ""')"
case "${message}" in
"Branch not Found"|"Branch not protected")
echo "::warning::Failed to get branch protection rules ${message} ${result}"
exit 0
;;
*)
echo "::error::Failed to get branch protection rules ${message} ${result}"
exit 1
;;
esac
fi
echo "json=${result}" >> $GITHUB_OUTPUT
echo "required_status_checks__strict=$(jq -cr '.required_status_checks.strict // true' <<< "${result}")" >> $GITHUB_OUTPUT
echo "required_status_checks__contexts=$(jq -cr '.required_status_checks.contexts // []' <<< "${result}")" >> $GITHUB_OUTPUT
echo "required_pull_request_reviews__required_approving_review_count=$(jq -cr '.required_pull_request_reviews.required_approving_review_count // 0' <<< "${result}")" >> $GITHUB_OUTPUT
echo "required_pull_request_reviews__dismiss_stale_reviews=$(jq -cr '.required_pull_request_reviews.dismiss_stale_reviews // false' <<< "${result}")" >> $GITHUB_OUTPUT
echo "required_pull_request_reviews__require_code_owner_reviews=$(jq -cr '.required_pull_request_reviews.require_code_owner_reviews // false' <<< "${result}")" >> $GITHUB_OUTPUT
echo "required_pull_request_reviews__dismissal_restrictions__users=$(jq -cr '.required_pull_request_reviews.dismissal_restrictions.users // []' <<< "${result}")" >> $GITHUB_OUTPUT
echo "required_pull_request_reviews__dismissal_restrictions__teams=$(jq -cr '.required_pull_request_reviews.dismissal_restrictions.teams // []' <<< "${result}")" >> $GITHUB_OUTPUT
echo "required_pull_request_reviews__dismissal_restrictions__apps=$(jq -cr '.required_pull_request_reviews.dismissal_restrictions.apps // []' <<< "${result}")" >> $GITHUB_OUTPUT
echo "required_linear_history__enabled=$(jq -cr '.required_linear_history.enabled // false' <<< "${result}")" >> $GITHUB_OUTPUT
echo "allow_force_pushes__enabled=$(jq -cr '.allow_force_pushes.enabled // false' <<< "${result}")" >> $GITHUB_OUTPUT
echo "allow_deletions__enabled=$(jq -cr '.allow_deletions.enabled // false' <<< "${result}")" >> $GITHUB_OUTPUT
echo "required_conversation_resolution__enabled=$(jq -cr '.required_conversation_resolution.enabled // false' <<< "${result}")" >> $GITHUB_OUTPUT
echo "required_signatures__enabled=$(jq -cr '.required_signatures.enabled // false' <<< "${result}")" >> $GITHUB_OUTPUT
echo "enforce_admins__enabled=$(jq -cr '.enforce_admins.enabled // false' <<< "${result}")" >> $GITHUB_OUTPUT
echo "block_creations__enabled=$(jq -cr '.block_creations.enabled // false' <<< "${result}")" >> $GITHUB_OUTPUT
- name: Check if PR is draft
id: is_draft_pr
env:
GH_DEBUG: "true"
GH_PAGER: cat
GH_PROMPT_DISABLED: "true"
GH_REPO: ${{ github.repository }}
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
if gh pr view ${{ github.event.pull_request.number }} --json isDraft | jq -e '.isDraft == true'
then
echo "result=true" >> $GITHUB_OUTPUT
else
echo "result=false" >> $GITHUB_OUTPUT
fi
- name: Prepare protection rules
id: prepare_protection_rules
if: steps.branch_protection.outputs.json != ''
env:
REQUIRED_STATUS_CHECKS_INPUT: ${{ inputs.required_status_checks }}
REQUIRED_APROVING_REVIEW_COUNT_INPUT: ${{ inputs.required_approving_review_count }}
required_status_checks__strict: ${{ steps.branch_protection.outputs.required_status_checks__strict }}
required_status_checks__contexts: ${{ steps.branch_protection.outputs.required_status_checks__contexts }}
required_pull_request_reviews__required_approving_review_count: ${{ steps.branch_protection.outputs.required_pull_request_reviews__required_approving_review_count }}
required_pull_request_reviews__dismissal_restrictions__users: ${{ steps.branch_protection.outputs.required_pull_request_reviews__dismissal_restrictions__users }}
required_pull_request_reviews__dismissal_restrictions__teams: ${{ steps.branch_protection.outputs.required_pull_request_reviews__dismissal_restrictions__teams }}
required_pull_request_reviews__dismissal_restrictions__apps: ${{ steps.branch_protection.outputs.required_pull_request_reviews__dismissal_restrictions__apps }}
required_pull_request_reviews__dismiss_stale_reviews: ${{ steps.branch_protection.outputs.required_pull_request_reviews__dismiss_stale_reviews }}
required_pull_request_reviews__require_code_owner_reviews: ${{ steps.branch_protection.outputs.required_pull_request_reviews__require_code_owner_reviews }}
enforce_admins__enabled: ${{ steps.branch_protection.outputs.enforce_admins__enabled }}
required_linear_history__enabled: ${{ steps.branch_protection.outputs.required_linear_history__enabled }}
required_conversation_resolution__enabled: ${{ steps.branch_protection.outputs.required_conversation_resolution__enabled }}
allow_force_pushes__enabled: ${{ steps.branch_protection.outputs.allow_force_pushes__enabled }}
allow_deletions__enabled: ${{ steps.branch_protection.outputs.allow_deletions__enabled }}
required_signatures__enabled: ${{ steps.branch_protection.outputs.required_signatures__enabled }}
block_creations__enabled: ${{ steps.branch_protection.outputs.block_creations__enabled }}
run: |
new_required_status_checks__contexts="$(echo $required_status_checks__contexts | \
jq -r "map(
select(
test(\"^${{ inputs.job_name }} /\";\"i\") | not
)
) | . + ${REQUIRED_STATUS_CHECKS_INPUT} | unique"
)"
if [ $(echo ${new_required_status_checks__contexts} | jq 'length') -lt 1 ]
then
echo "::error::Not applying empty list of status checks"
exit 1
fi
newjson=$(cat <<-END
{
"required_status_checks": {
"strict": ${required_status_checks__strict},
"contexts": ${new_required_status_checks__contexts}
},
"required_pull_request_reviews": {
"dismissal_restrictions": {
"users": ${required_pull_request_reviews__dismissal_restrictions__users},
"teams": ${required_pull_request_reviews__dismissal_restrictions__teams},
"apps": ${required_pull_request_reviews__dismissal_restrictions__apps}
},
"dismiss_stale_reviews": ${required_pull_request_reviews__dismiss_stale_reviews},
"require_code_owner_reviews": ${required_pull_request_reviews__require_code_owner_reviews},
"required_approving_review_count": ${REQUIRED_APROVING_REVIEW_COUNT_INPUT},
"bypass_pull_request_allowances": {
"users": [],
"teams": []
}
},
"enforce_admins": ${enforce_admins__enabled},
"required_signatures": ${required_signatures__enabled},
"restrictions": null,
"required_linear_history": ${required_linear_history__enabled},
"allow_force_pushes": ${allow_force_pushes__enabled},
"allow_deletions": ${allow_deletions__enabled},
"block_creations": ${block_creations__enabled},
"required_conversation_resolution": ${required_conversation_resolution__enabled}
}
END
)
# unsupported restrictions outside of Github organisations
if [[ -z '${{ github.event.organization }}' ]]; then
newjson="$(echo "${newjson}" | jq -r 'del(.required_pull_request_reviews.dismissal_restrictions, .required_pull_request_reviews.bypass_pull_request_allowances)')"
fi
diff_exit_code="$(diff -Z <(echo ${new_required_status_checks__contexts} | jq 'sort_by(.)') <(echo ${required_status_checks__contexts} | jq 'sort_by(.)') 1>&2; echo $?)"
if [ ${diff_exit_code} -eq 0 ] && [ ${required_pull_request_reviews__required_approving_review_count} -eq ${REQUIRED_APROVING_REVIEW_COUNT_INPUT} ]
then
echo "::debug::Branch protection rules are unchanged, skipping update"
exit 0
fi
result=$(echo "${newjson}" | jq -c '. | @json' )
echo "result=${result}" >> $GITHUB_OUTPUT
- name: Apply branch protection rules
id: apply_branch_protection_rules
if: |
steps.is_draft_pr.outputs.result == 'false' &&
steps.prepare_protection_rules.outputs.result != ''
env:
GH_DEBUG: "true"
GH_PAGER: cat
GH_PROMPT_DISABLED: "true"
GH_REPO: ${{ github.repository }}
GH_TOKEN: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
run: |
result="$(echo '${{ fromJSON(steps.prepare_protection_rules.outputs.result) }}' \
| gh api --method PUT ${{ env.BRANCH_PROTECTION_URI }} --input -)"
message="$(echo "${result}" | jq -r .message)"
if ! [[ $message =~ null ]]
then
echo "::error::Failed to apply branch protection rules with ${message} ${result} "
exit 1
fi
echo "::notice::Branch protection rules have been updated"
repo_config:
name: Apply repo settings
runs-on: ${{ fromJSON(inputs.runs_on) }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
needs:
- versioned_source
if: |
github.event.pull_request.merged == true &&
inputs.repo_config == true
steps:
- name: Generate GitHub App installation token
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a
continue-on-error: true
id: gh_app_token
with:
app_id: ${{ inputs.app_id }}
installation_retrieval_mode: id
installation_retrieval_payload: ${{ inputs.installation_id }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
permissions: |-
{
"administration": "write",
"contents": "read",
"metadata": "read"
}
repositories: '[ "${{ github.event.pull_request.head.repo.name }}" ]'
- name: Configure repository
env:
GH_DEBUG: "true"
GH_PAGER: cat
GH_PROMPT_DISABLED: "true"
GH_REPO: ${{ github.repository }}
GH_TOKEN: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
run: |
# only change repository visibility if explicitly set to one of the permissible values
visibility=''
if [[ '${{ inputs.repo_visibility }}' =~ private|public|internal ]]; then
gh repo edit '${{ github.repository }}' \
--visibility '${{ inputs.repo_visibility }}' || true
fi
if [[ -n '${{ inputs.repo_description }}' ]]; then
gh repo edit '${{ github.repository }}' \
--description '${{ inputs.repo_description }}'
fi
if [[ -n '${{ inputs.repo_homepage }}' ]]; then
homepage='${{ inputs.repo_homepage }}'
elif [[ -n '${{ inputs.cloudflare_website }}' ]]; then
homepage='https://${{ inputs.cloudflare_website }}.pages.dev'
else
homepage=''
fi
[[ -n "${homepage}" ]] && gh repo edit '${{ github.repository }}' \
--homepage "${homepage}"
# HTTP 422: This organization does not allow private repository forking
if ! gh repo edit '${{ github.repository }}' \
--allow-forking=${{ inputs.repo_allow_forking }}; then
echo '::warning::Failed to configure some repository settings.'
fi
# FIXME: https://github.com/cli/cli/issues/6652#issuecomment-1323908232
gh repo edit '${{ github.repository }}' \
--default-branch=${{ inputs.repo_default_branch }} \
--delete-branch-on-merge=${{ inputs.repo_delete_branch_on_merge }} \
--enable-auto-merge=${{ inputs.repo_enable_auto_merge }} \
--enable-issues=${{ inputs.repo_enable_issues }} \
--enable-merge-commit=${{ inputs.repo_enable_merge_commit }} \
--enable-projects=${{ inputs.repo_enable_projects }} \
--enable-rebase-merge=${{ inputs.repo_enable_rebase_merge }} \
--enable-squash-merge=${{ inputs.repo_enable_squash_merge }} \
--enable-wiki=${{ inputs.repo_enable_wiki }}
# allow_update_branch not currently available for update via gh-cli
gh api --method PATCH '/repos/${{ github.repository }}' \
-H "Accept: application/vnd.github+json" \
-F allow_update_branch='${{ inputs.repo_allow_update_branch }}'
all_tests:
name: All tests
runs-on: ${{ fromJSON(inputs.runs_on) }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
needs:
- npm_test
- docker_test
- python_test
- cargo_test
- custom_test
- cloudformation_test
- terraform_test
if: |
always() &&
github.event.pull_request.state == 'open'
steps:
- name: Reject failed jobs
run: |
if [ "${{ contains(needs.*.result, 'failure') }}" = "true" ]
then
echo "One or more jobs have failed"
exit 1
fi
- name: Reject cancelled jobs
run: |
if [ "${{ contains(needs.*.result, 'cancelled') }}" = "true" ]
then
echo "One or more jobs were cancelled"
exit 1
fi
all_jobs:
name: All jobs
runs-on: ${{ fromJSON(inputs.runs_on) }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
needs:
- event_types
- versioned_source
- is_npm
- is_docker
- is_python
- is_cargo
- is_balena
- is_custom
- is_website
- all_tests
- npm_publish
- docker_publish
- balena_publish
- python_publish
- website_publish
- github_publish
- cargo_publish
- custom_publish
- custom_always
- protect_branch
- gpt-review
if: |
always() &&
github.event.pull_request.state == 'open'
steps:
- name: Reject failed jobs
run: |
if [ "${{ contains(needs.*.result, 'failure') }}" = "true" ]
then
echo "One or more jobs have failed"
exit 1
fi
- name: Reject cancelled jobs
run: |
if [ "${{ contains(needs.*.result, 'cancelled') }}" = "true" ]
then
echo "One or more jobs were cancelled"
exit 1
fi
auto-merge:
name: Auto-merge
runs-on: ${{ fromJSON(inputs.runs_on) }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
needs:
- protect_branch
if: |
github.event.pull_request.state == 'open' &&
inputs.toggle_auto_merge == true &&
github.event.pull_request.user.type != 'Bot'
env:
BRANCH_PROTECTION_URI: repos/${{ github.repository }}/branches/${{ github.event.pull_request.base.ref }}/protection
steps:
- name: Generate GitHub App installation token
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a
continue-on-error: true
id: gh_app_token
with:
app_id: ${{ inputs.app_id }}
installation_retrieval_mode: id
installation_retrieval_payload: ${{ inputs.installation_id }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
permissions: |-
{
"administration": "read",
"contents": "write",
"metadata": "read",
"pull_requests": "write"
}
repositories: '[ "${{ github.event.pull_request.head.repo.name }}" ]'
- name: Get branch protection rules
id: branch_protection
env:
GH_DEBUG: "true"
GH_PAGER: cat
GH_PROMPT_DISABLED: "true"
GH_REPO: ${{ github.repository }}
GH_TOKEN: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
run: |
result="$(gh api "${BRANCH_PROTECTION_URI}" --jq '.' ; exit=$?)"
if [[ "${exit}" -gt 0 ]]
then
message="$(echo "${result}" | jq -r '.message // ""')"
case "${message}" in
"Branch not Found"|"Branch not protected")
echo "::warning::Failed to get branch protection rules ${message} ${result}"
exit 0
;;
*)
echo "::error::Failed to get branch protection rules ${message} ${result}"
exit 1
;;
esac
fi
echo "json=${result}" >> $GITHUB_OUTPUT
echo "required_status_checks__strict=$(jq -cr '.required_status_checks.strict // true' <<< "${result}")" >> $GITHUB_OUTPUT
echo "required_status_checks__contexts=$(jq -cr '.required_status_checks.contexts // []' <<< "${result}")" >> $GITHUB_OUTPUT
echo "required_pull_request_reviews__required_approving_review_count=$(jq -cr '.required_pull_request_reviews.required_approving_review_count // 0' <<< "${result}")" >> $GITHUB_OUTPUT
echo "required_pull_request_reviews__dismiss_stale_reviews=$(jq -cr '.required_pull_request_reviews.dismiss_stale_reviews // false' <<< "${result}")" >> $GITHUB_OUTPUT
echo "required_pull_request_reviews__require_code_owner_reviews=$(jq -cr '.required_pull_request_reviews.require_code_owner_reviews // false' <<< "${result}")" >> $GITHUB_OUTPUT
echo "required_pull_request_reviews__dismissal_restrictions__users=$(jq -cr '.required_pull_request_reviews.dismissal_restrictions.users // []' <<< "${result}")" >> $GITHUB_OUTPUT
echo "required_pull_request_reviews__dismissal_restrictions__teams=$(jq -cr '.required_pull_request_reviews.dismissal_restrictions.teams // []' <<< "${result}")" >> $GITHUB_OUTPUT
echo "required_pull_request_reviews__dismissal_restrictions__apps=$(jq -cr '.required_pull_request_reviews.dismissal_restrictions.apps // []' <<< "${result}")" >> $GITHUB_OUTPUT
echo "required_linear_history__enabled=$(jq -cr '.required_linear_history.enabled // false' <<< "${result}")" >> $GITHUB_OUTPUT
echo "allow_force_pushes__enabled=$(jq -cr '.allow_force_pushes.enabled // false' <<< "${result}")" >> $GITHUB_OUTPUT
echo "allow_deletions__enabled=$(jq -cr '.allow_deletions.enabled // false' <<< "${result}")" >> $GITHUB_OUTPUT
echo "required_conversation_resolution__enabled=$(jq -cr '.required_conversation_resolution.enabled // false' <<< "${result}")" >> $GITHUB_OUTPUT
echo "required_signatures__enabled=$(jq -cr '.required_signatures.enabled // false' <<< "${result}")" >> $GITHUB_OUTPUT
echo "enforce_admins__enabled=$(jq -cr '.enforce_admins.enabled // false' <<< "${result}")" >> $GITHUB_OUTPUT
echo "block_creations__enabled=$(jq -cr '.block_creations.enabled // false' <<< "${result}")" >> $GITHUB_OUTPUT
- name: Check if PR is draft
id: is_draft_pr
env:
GH_DEBUG: "true"
GH_PAGER: cat
GH_PROMPT_DISABLED: "true"
GH_REPO: ${{ github.repository }}
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
if gh pr view ${{ github.event.pull_request.number }} --json isDraft | jq -e '.isDraft == true'
then
echo "result=true" >> $GITHUB_OUTPUT
else
echo "result=false" >> $GITHUB_OUTPUT
fi
- name: Toggle auto-merge
if: |
steps.is_draft_pr.outputs.result == 'false' &&
steps.branch_protection.outputs.json != '' &&
steps.branch_protection.outputs.required_status_checks__contexts != '[]'
env:
GH_DEBUG: "true"
GH_PAGER: cat
GH_PROMPT_DISABLED: "true"
GH_REPO: ${{ github.repository }}
GH_TOKEN: ${{ steps.gh_app_token.outputs.token || secrets.FLOWZONE_TOKEN }}
run: |
gh pr merge ${{ github.event.pull_request.number }} --merge --auto || true
gpt-review:
name: GPT Review
runs-on: ${{ fromJSON(inputs.runs_on) }}
timeout-minutes: ${{ fromJSON(inputs.jobs_timeout_minutes) }}
needs:
- versioned_source
if: |
github.event.pull_request.state == 'open' &&
inputs.enable_gpt_review == true &&
github.event.repository.private != true
steps:
- name: Setup python
id: setup-python
uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
with:
python-version: 3.11
- name: Install gpt-review
run: |
python3 -m venv .env
source .env/bin/activate
python -m pip install --upgrade pip
python -m pip install gpt-review==0.9.4
- name: Configure models
run: |
if [ -f azure.yaml ]; then
echo "CONTEXT_FILE=azure.yaml" >> $GITHUB_ENV
exit 0
fi
yq e -n '.azure_model_map.turbo_llm_model_deployment_id = "gpt-3.5-turbo" |
.azure_model_map.smart_llm_model_deployment_id = "gpt-3.5-turbo-16k" |
.azure_model_map.large_llm_model_deployment_id = "gpt-3.5-turbo-16k" |
.azure_model_map.embedding_model_deployment_id = "text-embedding-ada-002"' > ${{ runner.temp }}/azure.yaml
echo "CONTEXT_FILE=${{ runner.temp }}/azure.yaml" >> $GITHUB_ENV
- name: Review PR and make comment
continue-on-error: true
run: |
source .env/bin/activate
gpt github review \
--access-token $GITHUB_TOKEN \
--pull-request $PR_NUMBER \
--repository $REPOSITORY_NAME
env:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
OPENAI_ORG_KEY: ${{ secrets.OPENAI_ORG_KEY }}
GIT_COMMIT_HASH: ${{ github.event.pull_request.head.sha }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
PR_NUMBER: ${{ github.event.pull_request.number }}
PR_TITLE: ${{ github.event.pull_request.title }}
REPOSITORY_NAME: ${{ github.repository }}
LINK: https://github.com/${{ github.repository }}/pull/${{ github.event.pull_request.number }}
FULL_SUMMARY: true
FILE_SUMMARY: false
TEST_SUMMARY: false
BUG_SUMMARY: false
RISK_SUMMARY: false
RISK_BREAKING: false