Skip to content

feat: expose statuses to compile torch #707

feat: expose statuses to compile torch

feat: expose statuses to compile torch #707

name: CML builds (weekly or not)
on:
pull_request:
push:
branches:
- main
- 'release/*'
tags:
- "v*"
# Workflow dispatch and schedule refer to the weekly build
workflow_dispatch:
inputs:
event_name:
description: "Event that triggered the workflow"
required: true
type: choice
default: weekly
options:
- weekly
- pr
- release
- push_to_main
- push_to_release
linux_python_versions:
description: "Space separated list of python versions (3.8, 3.9, 3.10 are supported) to launch on linux"
required: false
type: string
default: "3.8 3.9"
macos_python_versions:
description: "Space separated list of python versions (3.8, 3.9, 3.10 are supported) to launch on macos"
required: false
type: string
default: "3.8 3.9"
schedule:
# * is a special character in YAML so you have to quote this string
# At 22:00 on Sunday
# Timezone is UTC, so Paris time is +2 during the summer and +1 during winter
- cron: '0 22 * * 0'
concurrency:
# Add event_name in the group as workflow dispatch means we could run this in addition to other
# workflows already running on a PR or a merge e.g.
group: "${{ github.ref }}-${{ github.event_name }}-${{ github.workflow }}"
# Cancel the previous build, except on main
cancel-in-progress: ${{ github.event_name != 'push' || github.ref != 'refs/heads/main' }}
env:
ACTION_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
IS_PR: ${{ github.event_name == 'pull_request' }}
IS_WEEKLY: ${{ github.event_name == 'schedule' || ((github.event_name == 'workflow_dispatch') && (github.event.inputs.event_name == 'weekly')) }}
IS_RELEASE: ${{ github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') }}
IS_PUSH_TO_MAIN: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }}
IS_PUSH_TO_RELEASE: ${{ github.event_name == 'push' && startsWith(github.ref, 'refs/heads/release/') }}
IS_WORKFLOW_DISPATCH: ${{ github.event_name == 'workflow_dispatch' }}
AGENT_TOOLSDIRECTORY: /opt/hostedtoolcache
RUNNER_TOOL_CACHE: /opt/hostedtoolcache
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
# For reasons explained in the issue, we need to manually list the files that we want to download
# from LFS. This could be removed if tests stop to use files stored in LFS
# FIXME: https://github.com/zama-ai/concrete-ml-internal/issues/3600
LFS_TEST_FILES: "\
tests/data/mnist_2b_s1_1.zip, \
tests/data/mnist_test_batch.zip, \
tests/parameter_search/FashionMNIST_quant_state_dict.pt, \
tests/parameter_search/custom_data_fp32_state_dict.pt, \
tests/parameter_search/custom_data_quant_state_dict.pt"
jobs:
matrix-preparation:
# We skip the CI in cases of pushing to internal main (because all pushes to main internal are now from the bot)
if: ${{ !( github.repository == 'zama-ai/concrete-ml-internal' && github.event_name == 'push' && github.ref == 'refs/heads/main' ) }}
runs-on: ubuntu-20.04
timeout-minutes: 5
outputs:
linux-matrix: ${{ steps.set-matrix.outputs.linux-matrix }}
macos-matrix: ${{ steps.set-matrix.outputs.macos-matrix }}
needs-38-linux-runner: ${{ steps.set-matrix.outputs.needs-38-linux-runner }}
needs-39-linux-runner: ${{ steps.set-matrix.outputs.needs-39-linux-runner }}
needs-310-linux-runner: ${{ steps.set-matrix.outputs.needs-310-linux-runner }}
instance-type: ${{ steps.set-matrix.outputs.instance-type }}
steps:
- name: Checkout code
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
- name: Set matrix
id: set-matrix
run: |
echo "${{ github.event_name }}"
# Manage build type that will condition the rest of the CI
if [[ "${IS_PR}" == "true" ]]; then
BUILD_TYPE="pr"
elif [[ "${IS_WEEKLY}" == "true" ]]; then
BUILD_TYPE="weekly"
elif [[ "${IS_RELEASE}" == "true" ]]; then
BUILD_TYPE="release"
elif [[ "${IS_PUSH_TO_MAIN}" == "true" ]]; then
BUILD_TYPE="push_to_main"
elif [[ "${IS_PUSH_TO_RELEASE}" == "true" ]]; then
BUILD_TYPE="push_to_release"
elif [[ "${IS_WORKFLOW_DISPATCH}" == "true" ]];then
BUILD_TYPE="${{ github.event.inputs.event_name }}"
else
echo "Unknown BUILD_TYPE! Aborting"
exit 1
fi
# Manage instance type
INSTANCE_TYPE="c5.4xlarge"
if [[ "${BUILD_TYPE}" == "weekly" ]]; then
INSTANCE_TYPE="m6i.metal"
elif [[ "${BUILD_TYPE}" == "release" ]]; then
INSTANCE_TYPE="c6i.16xlarge"
fi
# Manage python versions
if [[ "${IS_WORKFLOW_DISPATCH}" == "true" ]]; then
LINUX_PYTHON_VERSIONS="${{ github.event.inputs.linux_python_versions }}"
MACOS_PYTHON_VERSIONS="${{ github.event.inputs.macos_python_versions }}"
elif [[ "${BUILD_TYPE}" == "pr" ]]; then
LINUX_PYTHON_VERSIONS="3.8"
MACOS_PYTHON_VERSIONS=""
elif [[ "${BUILD_TYPE}" == "weekly" ]]; then
LINUX_PYTHON_VERSIONS="3.8 3.9 3.10"
MACOS_PYTHON_VERSIONS="3.9"
elif [[ "${BUILD_TYPE}" == "release" ]]; then
LINUX_PYTHON_VERSIONS="3.8 3.9 3.10"
MACOS_PYTHON_VERSIONS=""
elif [[ "${BUILD_TYPE}" == "push_to_main" ]]; then
LINUX_PYTHON_VERSIONS="3.8"
MACOS_PYTHON_VERSIONS=""
elif [[ "${BUILD_TYPE}" == "push_to_release" ]]; then
LINUX_PYTHON_VERSIONS="3.8"
MACOS_PYTHON_VERSIONS=""
else
echo "Unknown BUILD_TYPE! Aborting"
exit 1
fi
echo "LINUX_PYTHON_VERSIONS: ${LINUX_PYTHON_VERSIONS}"
echo "MACOS_PYTHON_VERSIONS: ${MACOS_PYTHON_VERSIONS}"
echo "BUILD_TYPE: ${BUILD_TYPE}"
echo "INSTANCE_TYPE: ${INSTANCE_TYPE}"
MATRIX_JSON=$(mktemp --suffix=.json)
echo "Prepared build matrix:"
python3 ./script/actions_utils/generate_test_matrix.py \
--output-json "${MATRIX_JSON}" \
--linux-python-versions ${LINUX_PYTHON_VERSIONS} \
--macos-python-versions ${MACOS_PYTHON_VERSIONS}
LINUX_MATRIX=$(jq -rc '. | map(select(.os_kind=="linux"))' "${MATRIX_JSON}")
MACOS_MATRIX=$(jq -rc '. | map(select(.os_kind=="macos"))' "${MATRIX_JSON}")
echo "Linux Matrix:"
echo "${LINUX_MATRIX}" | jq '.'
echo "macOS Matrix:"
echo "${MACOS_MATRIX}" | jq '.'
echo "linux-matrix=${LINUX_MATRIX}" >> $GITHUB_OUTPUT
echo "macos-matrix=${MACOS_MATRIX}" >> $GITHUB_OUTPUT
NEEDS_LINUX_38_RUNNER=$(echo "${LINUX_MATRIX}" | \
jq -rc '. | map(select(.os_kind=="linux" and .python_version=="3.8")) | length > 0')
NEEDS_LINUX_39_RUNNER=$(echo "${LINUX_MATRIX}" | \
jq -rc '. | map(select(.os_kind=="linux" and .python_version=="3.9")) | length > 0')
NEEDS_LINUX_310_RUNNER=$(echo "${LINUX_MATRIX}" | \
jq -rc '. | map(select(.os_kind=="linux" and .python_version=="3.10")) | length > 0')
echo "Needs Linux 3.8 runner:"
echo "${NEEDS_LINUX_38_RUNNER}"
echo "Needs Linux 3.9 runner:"
echo "${NEEDS_LINUX_39_RUNNER}"
echo "Needs Linux 3.10 runner:"
echo "${NEEDS_LINUX_310_RUNNER}"
echo "needs-38-linux-runner=${NEEDS_LINUX_38_RUNNER}" >> $GITHUB_OUTPUT
echo "needs-39-linux-runner=${NEEDS_LINUX_39_RUNNER}" >> $GITHUB_OUTPUT
echo "needs-310-linux-runner=${NEEDS_LINUX_310_RUNNER}" >> $GITHUB_OUTPUT
echo "instance-type=${INSTANCE_TYPE}" >> $GITHUB_OUTPUT
start-runner-linux:
needs: [matrix-preparation]
name: Start EC2 runner
runs-on: ubuntu-20.04
timeout-minutes: 15
outputs:
label-38: ${{ steps.start-ec2-runner-38.outputs.label }}
ec2-instance-id-38: ${{ steps.start-ec2-runner-38.outputs.ec2-instance-id || '' }}
label-39: ${{ steps.start-ec2-runner-39.outputs.label }}
ec2-instance-id-39: ${{ steps.start-ec2-runner-39.outputs.ec2-instance-id || '' }}
label-310: ${{ steps.start-ec2-runner-310.outputs.label }}
ec2-instance-id-310: ${{ steps.start-ec2-runner-310.outputs.ec2-instance-id || '' }}
matrix: ${{ steps.update-linux-matrix.outputs.linux-matrix }}
steps:
- name: Checkout Code
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@5727f247b64f324ec403ac56ae05e220fd02b65f
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ secrets.AWS_REGION }}
- name: Start EC2 runner python 38
id: start-ec2-runner-38
if: ${{ !cancelled() && fromJSON(needs.matrix-preparation.outputs.needs-38-linux-runner) }}
uses: machulav/ec2-github-runner@4e0303de215db88e1c489e07a15ca4d867f488ea
with:
mode: start
github-token: ${{ secrets.EC2_RUNNER_BOT_TOKEN }}
ec2-image-id: ${{ secrets.AWS_EC2_AMI }}
ec2-instance-type: ${{ needs.matrix-preparation.outputs.instance-type }}
subnet-id: ${{ secrets.AWS_EC2_SUBNET_ID }}
security-group-id: ${{ secrets.AWS_EC2_SECURITY_GROUP_ID }}
aws-resource-tags: >
[
{"Key": "Name", "Value": "cml-ci-ec2-github-runner-py38"},
{"Key": "GitHubRepository", "Value": "${{ github.repository }}"},
{"Key": "Python version", "Value": "3.8"},
{"Key": "Actor", "Value": "${{ github.actor }}"},
{"Key": "Action", "Value": "${{ github.action }}"},
{"Key": "GitHash", "Value": "${{ github.sha }}"},
{"Key": "RefName", "Value": "${{ github.ref_name }}"},
{"Key": "RunId", "Value": "${{ github.run_id }}"},
{"Key": "Team", "Value": "CML"}
]
- name: Start EC2 runner python 39
id: start-ec2-runner-39
if: ${{ !cancelled() && fromJSON(needs.matrix-preparation.outputs.needs-39-linux-runner) }}
uses: machulav/ec2-github-runner@4e0303de215db88e1c489e07a15ca4d867f488ea
with:
mode: start
github-token: ${{ secrets.EC2_RUNNER_BOT_TOKEN }}
ec2-image-id: ${{ secrets.AWS_EC2_AMI }}
ec2-instance-type: ${{ needs.matrix-preparation.outputs.instance-type }}
subnet-id: ${{ secrets.AWS_EC2_SUBNET_ID }}
security-group-id: ${{ secrets.AWS_EC2_SECURITY_GROUP_ID }}
aws-resource-tags: >
[
{"Key": "Name", "Value": "cml-ci-ec2-github-runner-py39"},
{"Key": "GitHubRepository", "Value": "${{ github.repository }}"},
{"Key": "Python version", "Value": "3.9"},
{"Key": "Actor", "Value": "${{ github.actor }}"},
{"Key": "Action", "Value": "${{ github.action }}"},
{"Key": "GitHash", "Value": "${{ github.sha }}"},
{"Key": "RefName", "Value": "${{ github.ref_name }}"},
{"Key": "RunId", "Value": "${{ github.run_id }}"},
{"Key": "Team", "Value": "CML"}
]
- name: Start EC2 runner python 310
id: start-ec2-runner-310
if: ${{ !cancelled() && fromJSON(needs.matrix-preparation.outputs.needs-310-linux-runner) }}
uses: machulav/ec2-github-runner@4e0303de215db88e1c489e07a15ca4d867f488ea
with:
mode: start
github-token: ${{ secrets.EC2_RUNNER_BOT_TOKEN }}
ec2-image-id: ${{ secrets.AWS_EC2_AMI }}
ec2-instance-type: ${{ needs.matrix-preparation.outputs.instance-type }}
subnet-id: ${{ secrets.AWS_EC2_SUBNET_ID }}
security-group-id: ${{ secrets.AWS_EC2_SECURITY_GROUP_ID }}
aws-resource-tags: >
[
{"Key": "Name", "Value": "cml-ci-ec2-github-runner-py310"},
{"Key": "GitHubRepository", "Value": "${{ github.repository }}"},
{"Key": "Python version", "Value": "3.10"},
{"Key": "Actor", "Value": "${{ github.actor }}"},
{"Key": "Action", "Value": "${{ github.action }}"},
{"Key": "GitHash", "Value": "${{ github.sha }}"},
{"Key": "RefName", "Value": "${{ github.ref_name }}"},
{"Key": "RunId", "Value": "${{ github.run_id }}"},
{"Key": "Team", "Value": "CML"}
]
- name: Update Linux runs_on Matrix
id: update-linux-matrix
env:
MATRIX: ${{ needs.matrix-preparation.outputs.linux-matrix }}
run: |
MATRIX=$(echo "${MATRIX}" | jq -rc \
'(. | map(select(.os_kind=="linux" and .python_version=="3.8") |= . + {"runs_on": "${{ steps.start-ec2-runner-38.outputs.label }}"}) )')
MATRIX=$(echo "${MATRIX}" | jq -rc \
'(. | map(select(.os_kind=="linux" and .python_version=="3.9") |= . + {"runs_on": "${{ steps.start-ec2-runner-39.outputs.label }}"}) )')
MATRIX=$(echo "${MATRIX}" | jq -rc \
'(. | map(select(.os_kind=="linux" and .python_version=="3.10") |= . + {"runs_on": "${{ steps.start-ec2-runner-310.outputs.label }}"}) )')
echo "Updated matrix:"
echo "${MATRIX}"
echo "linux-matrix=${MATRIX}" >> $GITHUB_OUTPUT
build-linux:
needs: [start-runner-linux]
runs-on: ${{ matrix.runs_on }}
# Run in a clean container
container:
image: ubuntu:20.04
defaults:
run:
shell: bash
strategy:
fail-fast: false
matrix: ${{ fromJSON(format('{{"include":{0}}}', needs.start-runner-linux.outputs.matrix)) }}
env:
IS_REF_BUILD: ${{ matrix.python_version == '3.8' }}
PIP_INDEX_URL: ${{ secrets.PIP_INDEX_URL }}
PIP_EXTRA_INDEX_URL: ${{ secrets.PIP_EXTRA_INDEX_URL }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
steps:
- name: Add masks
run: |
echo "::add-mask::${{ secrets.INTERNAL_PYPI_URL_FOR_MASK }}"
echo "::add-mask::${{ secrets.INTERNAL_REPO_URL_FOR_MASK }}"
# Replace default archive.ubuntu.com from docker image with fr mirror
# original archive showed performance issues and is farther away
- name: Docker container related setup and git installation
run: |
TZ=Europe/Paris
echo "TZ=${TZ}" >> "$GITHUB_ENV"
ln -snf /usr/share/zoneinfo/${TZ} /etc/localtime && echo ${TZ} > /etc/timezone
sed -i 's|^deb http://archive|deb http://fr.archive|g' /etc/apt/sources.list
apt update && apt install git git-lfs -y
# By default, `git clone` downloads all LFS files, which we want to avoid in regular CI
- name: Disable LFS download by default
if: ${{ !fromJSON(env.IS_WEEKLY) && !fromJSON(env.IS_RELEASE) }}
run: |
git lfs install --skip-smudge
# Checkout the code
- name: Checkout Code
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
# Pull the only LFS files needed for regular tests, avoiding to download files stored for
# benchmarks and use cases
- name: Pull LFS test files
run: |
git lfs pull --include "${{ env.LFS_TEST_FILES }}" --exclude ""
- name: Set up Python ${{ matrix.python_version }}
uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
id: setup-python
with:
python-version: ${{ matrix.python_version }}
- name: Check python3 version
env:
SYSTEM_VERSION_COMPAT: 0
run: |
which python3
which pip3
- name: Install dependencies
id: install-deps
run: |
./script/make_utils/setup_os_deps.sh
mkdir -p ~/.aws
echo "[default]\nregion=eu-west-3\noutput=json\n" >> ~/.aws/config
# Needed to include Python.h
export C_INCLUDE_PATH="${C_INCLUDE_PATH}:/__w/_tool/Python/$(python -c 'import platform; print(platform.python_version())')/x64/include"
echo
echo "Using these tools:"
which python3
which pip3
echo
make setup_env
- name: Check commits first line format
id: commit-first-line
if: ${{ fromJSON(env.IS_PR) && steps.install-deps.outcome == 'success' && !cancelled() }}
uses: gsactions/commit-message-checker@16fa2d5de096ae0d35626443bcd24f1e756cafee
with:
pattern: '^((feat|fix|chore|refactor|style|test|docs)(\((bounds|helpers|data_types|debugging|extensions|fhe_circuit|mlir|graph|optimization|representation|tracing|values|benchmarks|ci|scripts|compilation|execution|deps)\))?(\!)?\:) .+$'
flags: 'gs'
error: "Your first line has to contain a commit type and scope like \"feat(my_feature): msg\".\
Pattern: '^((feat|fix|chore|refactor|style|test|docs)(\\((bounds|helpers|data_types|debugging|extensions|fhe_circuit|mlir|graph|optimization|representation|tracing|values|benchmarks|ci|scripts|compilation|execution|deps)\\))(\\!)??\\:)'"
excludeDescription: 'true' # optional: this excludes the description body of a pull request
excludeTitle: 'true' # optional: this excludes the title of a pull request
checkAllCommitMessages: 'true' # optional: this checks all commits associated with a pull request
accessToken: ${{ secrets.GITHUB_TOKEN }} # github access token is only required if checkAllCommitMessages is true
- name: Check commits line length
id: commit-line-length
if: ${{ fromJSON(env.IS_PR) && steps.install-deps.outcome == 'success' && !cancelled() }}
uses: gsactions/commit-message-checker@16fa2d5de096ae0d35626443bcd24f1e756cafee
with:
pattern: '(^.{0,74}$\r?\n?){0,20}'
flags: 'gm'
error: 'The maximum line length of 74 characters is exceeded.'
excludeDescription: 'true' # optional: this excludes the description body of a pull request
excludeTitle: 'true' # optional: this excludes the title of a pull request
checkAllCommitMessages: 'true' # optional: this checks all commits associated with a pull request
accessToken: ${{ secrets.GITHUB_TOKEN }} # github access token is only required if checkAllCommitMessages is true
- name: Commit conformance
id: commit-conformance
if: ${{ steps.install-deps.outcome == 'success' && !cancelled() }}
env:
FIRST_LINE_OK: ${{ (fromJSON(env.IS_PR) && steps.commit-first-line.outcome == 'success') || steps.commit-first-line.outcome == 'skipped' }}
LINE_LENGTH_OK: ${{ (fromJSON(env.IS_PR) && steps.commit-line-length.outcome == 'success') || steps.commit-line-length.outcome == 'skipped' }}
run: |
if [[ "${FIRST_LINE_OK}" != "true" || "${LINE_LENGTH_OK}" != "true" ]]; then
echo "Issues with commits. First line ok: ${FIRST_LINE_OK}. Line length ok: ${LINE_LENGTH_OK}."
exit 1
fi
- name: Check actionlint
run:
make actionlint
- name: Source code conformance
id: make-pcc
if: ${{ steps.install-deps.outcome == 'success' && !cancelled() }}
# pcc launches an internal target with proper flags
run: |
make pcc
# Checked for changes between main and the current branch in a PR. More specifically,
# this is used in regular CIs to avoid launching Pytest, checking codeblocks, building docs
# or other steps if the associated files were not touched. For most, we also check that the
# linux MD5 has not changed, which means that no libraries got updated. This is done in order
# to handle PRs which only upgrades dependencies
# Following the 'files_yaml' section, we define what files should trigger a defined acronym
# (src, codeblocks, ...) when some changes are detected in them. For example, if some
# dependencies were changed, 'tests', 'determinism', 'codeblocks' and 'determinism' acronyms
# will be affected. We use the license MD5 file for that because it is built on the
# poetry.lock as well as the Concrete Python version, which can be installed manually in the
# makefile.
# For codeblocks, 'make pytest_codeblocks' runs the `make_utils/pytest_codeblocks.sh` script,
# which executes a find and grep command to find them. In the following section, we manually
# re-define what this command does by looking at all markdown files that are neither in hidden
# directories nor in docs/_apidocs or similar paths. Additionally, as for others, we check for
# changes in the source directory or in installed dependencies.
- name: Get all changed files from main in PR
id: changed-files-in-pr
if: |
env.IS_PR
&& !fromJSON(env.IS_WEEKLY)
&& !fromJSON(env.IS_RELEASE)
&& steps.install-deps.outcome == 'success'
&& steps.make-pcc.outcome == 'success'
&& !cancelled()
uses: tj-actions/[email protected]
with:
files_yaml: |
src:
- src/**
tests:
- tests/**
- src/concrete/ml/pytest/**
determinism:
- tests/seeding/test_seeding.py
docs:
- docs/**
codeblocks:
- '**.md'
- '!.*/**'
- '!docs/_*/**'
dependencies:
- deps_licenses/licenses_linux_user.txt.md5
conftest:
- conftest.py
# Run determinism test. The only case where this is not run is for a PR that does not modify
# anything in the source code or the determinism test file. This step is also triggered
# if any dependency has been updated or the conftest.py file has changed
- name: Determinism
id: determinism
if: |
!(
steps.changed-files-in-pr.outcome == 'success'
&& steps.changed-files-in-pr.outputs.determinism_any_changed == 'false'
&& steps.changed-files-in-pr.outputs.src_any_changed == 'false'
&& steps.changed-files-in-pr.outputs.dependencies_any_changed == 'false'
&& steps.changed-files-in-pr.outputs.conftest_any_changed == 'false'
)
&& steps.install-deps.outcome == 'success'
&& steps.make-pcc.outcome == 'success'
&& !cancelled()
run: |
make determinism
# Build the documentation if anything changed in our documentation files or in the source code
- name: Build docs
id: build-docs
if: |
!(
steps.changed-files-in-pr.outcome == 'success'
&& steps.changed-files-in-pr.outputs.docs_any_changed == 'false'
&& steps.changed-files-in-pr.outputs.src_any_changed == 'false'
)
&& steps.install-deps.outcome == 'success'
&& steps.make-pcc.outcome == 'success'
&& steps.determinism.outcome != 'failure'
&& !cancelled()
run: |
make docs
- name: Generate release changelog
id: changelog
if: ${{ fromJSON(env.IS_RELEASE) && steps.install-deps.outcome == 'success' && !cancelled() }}
run: |
GIT_TAG=$(echo "${{ github.ref }}" | sed 's/refs\/tags\///g')
CHANGELOG_FILE="CHANGELOG_${GIT_TAG}.md"
echo "changelog-file=${CHANGELOG_FILE}" >> $GITHUB_OUTPUT
poetry run python ./script/make_utils/changelog_helper.py \
--to-ref "${GIT_TAG}" \
--to-ref-must-have-tag > "${CHANGELOG_FILE}"
# Make sure all necessary steps passed. For build-docs and determinism steps, we only check for
# non-failures as the 'changed-files-in-pr' step might skip them
- name: Stop if previous steps failed
id: conformance
if: ${{ always() && !cancelled() }}
env:
CONFORMANCE_STATUS: >-
${{
steps.commit-conformance.outcome == 'success'
&& steps.make-pcc.outcome == 'success'
&& steps.build-docs.outcome != 'failure'
&& steps.determinism.outcome != 'failure'
}}
run: |
if [[ "${CONFORMANCE_STATUS}" != "true" ]]; then
echo "Conformance failed, got:"
echo "Commit conformance success step: ${{ steps.commit-conformance.outcome }}"
echo "Make conformance step: ${{ steps.make-pcc.outcome }}"
echo "Build docs step: ${{ steps.build-docs.outcome }}"
echo "Determinism step: ${{ steps.determinism.outcome }}"
exit 1
fi
# Taring the docs allows for much faster upload speed (from ~3min worst case to ~2s best case)
- name: Tar docs artifacts
id: tar-docs
if: ${{ steps.conformance.outcome == 'success' && steps.build-docs.outcome == 'success' && !cancelled() }}
run: |
cd docs/_build/html
tar -cvf docs.tar ./*
# Only upload docs once from reference build
- name: Archive docs artifacts
if: ${{ fromJSON(env.IS_REF_BUILD) && steps.tar-docs.outcome == 'success' && !cancelled() }}
uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce
with:
name: html-docs
path: docs/_build/html/docs.tar
- name: Upload changelog artifacts
if: ${{ fromJSON(env.IS_REF_BUILD) && steps.changelog.outcome == 'success' && !cancelled() }}
uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce
with:
name: changelog
path: ${{ steps.changelog.outputs.changelog-file }}
# Create packages before tests, to be able to get them if some unexpected test failure happens
# Build the package only once, as we don't have binary dependency this can be used on Linux
# and macOS as long as the dependencies are available
- name: Build wheel
id: build-wheel
if: ${{ fromJSON(env.IS_REF_BUILD) && steps.conformance.outcome == 'success' && !cancelled() }}
run: |
rm -rf dist
poetry build -f wheel
- name: Upload wheel artifacts
if: ${{ fromJSON(env.IS_REF_BUILD) && steps.build-wheel.outcome == 'success' }}
uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce
with:
name: py3-wheel
path: dist/*.whl
# Run Pytest on a subset of our tests if the source code or our tests has changed, and if the
# current workflow does no take place in a weekly or release CI. This step is also triggered
# if any dependency has been updated or the conftest.py files has changed
- name: PyTest Source Code (regular)
id: pytest_regular
if: |
steps.changed-files-in-pr.outcome == 'success'
&& (
steps.changed-files-in-pr.outputs.src_any_changed == 'true'
|| steps.changed-files-in-pr.outputs.tests_any_changed == 'true'
|| steps.changed-files-in-pr.outputs.dependencies_any_changed == 'true'
|| steps.changed-files-in-pr.outputs.conftest_any_changed == 'true'
)
&& steps.conformance.outcome == 'success'
&& !cancelled()
run: |
make pytest
# Run Pytest on all of our tests on a weekly basis
- name: PyTest Source Code (weekly)
id: pytest_weekly
if: ${{ fromJSON(env.IS_WEEKLY) && steps.conformance.outcome == 'success' && !cancelled() }}
run: |
make pytest PYTEST_OPTIONS=--weekly
# Run Pytest on codeblocks if the source code or any markdown has changed, or if this is
# part of the weekly or release CI. This step is also triggered if any dependency has been
# updated
- name: PyTest CodeBlocks
if: |
(
!(
steps.changed-files-in-pr.outcome == 'success'
&& steps.changed-files-in-pr.outputs.codeblocks_any_changed == 'false'
&& steps.changed-files-in-pr.outputs.src_any_changed == 'false'
&& steps.changed-files-in-pr.outputs.dependencies_any_changed == 'false'
)
|| fromJSON(env.IS_WEEKLY)
|| fromJSON(env.IS_RELEASE)
)
&& steps.conformance.outcome == 'success'
&& !cancelled()
run: |
make pytest_codeblocks
# Run Pytest on all of our tests on a weekly basis or while releasing
- name: PyTest CodeBlocks with PyPI local wheel of Concrete-ML
if: |
(fromJSON(env.IS_WEEKLY) || fromJSON(env.IS_RELEASE))
&& steps.conformance.outcome == 'success'
&& !cancelled()
run: |
make pytest_codeblocks_pypi_wheel_cml
# Run Pytest on all of our tests on a weekly basis
- name: PyTest Notebooks
if: |
fromJSON(env.IS_WEEKLY)
&& steps.conformance.outcome == 'success'
&& !cancelled()
run: |
make pytest_nb
# Run Pytest on all of our tests on a weekly basis or while releasing
- name: PyTest with PyPI local wheel of Concrete-ML
if: |
(fromJSON(env.IS_WEEKLY) || fromJSON(env.IS_RELEASE))
&& steps.conformance.outcome == 'success' && !cancelled()
run: |
make pytest_pypi_wheel_cml
- name: Fast sanity check
if: ${{ steps.conformance.outcome == 'success' && !cancelled() }}
run: |
make fast_sanity_check
# Compute coverage only on reference build
- name: Test coverage
id: coverage
if: |
fromJSON(env.IS_REF_BUILD)
&& (
steps.pytest_regular.outcome != 'skipped'
|| steps.pytest_weekly.outcome != 'skipped'
)
&& !cancelled()
run: |
./script/actions_utils/coverage.sh global-coverage-infos.json
- name: Comment with coverage
uses: marocchino/sticky-pull-request-comment@f6a2580ed520ae15da6076e7410b088d1c5dddd9
if: ${{ steps.coverage.outcome != 'skipped' && !cancelled() }}
continue-on-error: true
with:
path: diff-coverage.txt
recreate: true
# Check installation with sync_env if the source code or some dependency versions have been
# changed while running a PR's CI
- name: Check installation with sync_env and python ${{ matrix.python_version }}
if: |
(fromJSON(env.IS_WEEKLY) || fromJSON(env.IS_RELEASE))
&& steps.conformance.outcome == 'success'
&& !cancelled()
run: |
./script/make_utils/check_installation_with_all_python.sh --version ${{ matrix.python_version }} --sync_env
# Check installation with pip if the source code or some dependency versions have been c
# hanged while running a PR's CI
- name: Check installation with pip and python ${{ matrix.python_version }}
if: |
(fromJSON(env.IS_WEEKLY) || fromJSON(env.IS_RELEASE))
&& steps.conformance.outcome == 'success'
&& !cancelled()
run: |
./script/make_utils/check_installation_with_all_python.sh --version ${{ matrix.python_version }} --pip
# Check installation with wheel if the source code or some dependency versions have been
# changed while running a PR's CI
- name: Check installation with wheel and python ${{ matrix.python_version }}
if: |
(fromJSON(env.IS_WEEKLY) || fromJSON(env.IS_RELEASE))
&& steps.conformance.outcome == 'success'
&& !cancelled()
run: |
./script/make_utils/check_installation_with_all_python.sh --version ${{ matrix.python_version }} --wheel
# Check installation with git clone if the source code or some dependency versions have been
# changed while running a PR's CI
- name: Check installation with clone and python ${{ matrix.python_version }}
if: |
(fromJSON(env.IS_WEEKLY) || fromJSON(env.IS_RELEASE))
&& steps.conformance.outcome == 'success'
&& !cancelled()
run: |
./script/make_utils/check_installation_with_all_python.sh --version ${{ matrix.python_version }} --clone
# This is to manage build matrices and have a single status point for PRs
# This can be updated to take macOS into account but is impractical for private repos because of
# long builds and therefore expensive macOS testing
linux-build-status:
name: Linux build status
needs: [build-linux]
runs-on: ubuntu-20.04
timeout-minutes: 2
if: ${{ always() }}
steps:
- name: Fail on unsuccessful Linux build
shell: bash
run: |
# success always if wasn't launched due to CI not supposed to be launched
if ${{ github.repository == 'zama-ai/concrete-ml-internal' && github.event_name == 'push' && github.ref == 'refs/heads/main' }}
then
exit 0
fi
if [[ ${{ needs.build-linux.result }} != "success" ]]; then
exit 1
fi
- name: Slack Notification
if: ${{ always() && !success() }}
continue-on-error: true
uses: rtCamp/action-slack-notify@12e36fc18b0689399306c2e0b3e0f2978b7f1ee7
env:
SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL }}
SLACK_ICON: https://pbs.twimg.com/profile_images/1274014582265298945/OjBKP9kn_400x400.png
SLACK_COLOR: ${{ needs.build-linux.result }}
SLACK_MESSAGE: "Build finished with status ${{ needs.build-linux.result }}. (${{ env.ACTION_RUN_URL }})"
SLACK_USERNAME: ${{ secrets.BOT_USERNAME }}
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
stop-runner-linux:
name: Stop EC2 runner
needs: [build-linux, start-runner-linux]
runs-on: ubuntu-20.04
timeout-minutes: 2
if: ${{ always() && (needs.start-runner-linux.result != 'skipped') }}
steps:
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@5727f247b64f324ec403ac56ae05e220fd02b65f
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ secrets.AWS_REGION }}
- name: Stop EC2 runner python 38
uses: machulav/ec2-github-runner@4e0303de215db88e1c489e07a15ca4d867f488ea
if: ${{ always() && needs.start-runner-linux.outputs.ec2-instance-id-38 }}
with:
github-token: ${{ secrets.EC2_RUNNER_BOT_TOKEN }}
label: ${{ needs.start-runner-linux.outputs.label-38 }}
ec2-instance-id: ${{ needs.start-runner-linux.outputs.ec2-instance-id-38 }}
mode: stop
- name: Stop EC2 runner python 39
uses: machulav/ec2-github-runner@4e0303de215db88e1c489e07a15ca4d867f488ea
if: ${{ always() && needs.start-runner-linux.outputs.ec2-instance-id-39 }}
with:
github-token: ${{ secrets.EC2_RUNNER_BOT_TOKEN }}
label: ${{ needs.start-runner-linux.outputs.label-39 }}
ec2-instance-id: ${{ needs.start-runner-linux.outputs.ec2-instance-id-39 }}
mode: stop
- name: Stop EC2 runner python 310
uses: machulav/ec2-github-runner@4e0303de215db88e1c489e07a15ca4d867f488ea
if: ${{ always() && needs.start-runner-linux.outputs.ec2-instance-id-310 }}
with:
github-token: ${{ secrets.EC2_RUNNER_BOT_TOKEN }}
label: ${{ needs.start-runner-linux.outputs.label-310 }}
ec2-instance-id: ${{ needs.start-runner-linux.outputs.ec2-instance-id-310 }}
mode: stop
build-macos:
needs: [matrix-preparation]
if: ${{ needs.matrix-preparation.outputs.macos-matrix != '[]' }}
runs-on: ${{ matrix.runs_on }}
defaults:
run:
shell: bash
strategy:
fail-fast: false
matrix: ${{ fromJSON(format('{{"include":{0}}}', needs.matrix-preparation.outputs.macos-matrix)) }}
env:
PIP_INDEX_URL: ${{ secrets.PIP_INDEX_URL }}
PIP_EXTRA_INDEX_URL: ${{ secrets.PIP_EXTRA_INDEX_URL }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
steps:
- name: Add masks
run: |
echo "::add-mask::${{ secrets.INTERNAL_PYPI_URL_FOR_MASK }}"
echo "::add-mask::${{ secrets.INTERNAL_REPO_URL_FOR_MASK }}"
# By default, `git clone` downloads all LFS files, which we want to avoid in regular CI
- name: Disable LFS download by default
if: ${{ !fromJSON(env.IS_WEEKLY) && !fromJSON(env.IS_RELEASE) }}
run: |
git lfs install --skip-smudge
# Checkout the code
- name: Checkout Code
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
# Pull the only LFS files needed for regular tests, avoiding to download files stored for
# benchmarks and use cases
- name: Pull LFS test files
run: |
git lfs pull --include "${{ env.LFS_TEST_FILES }}" --exclude ""
- name: Set up Python ${{ matrix.python_version }}
uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
with:
python-version: ${{ matrix.python_version }}
- name: Check python3 version
env:
SYSTEM_VERSION_COMPAT: 0
run: |
which python3
which pip3
sw_vers
- name: Install dependencies
id: install-deps
env:
SYSTEM_VERSION_COMPAT: 0
run: |
./script/make_utils/setup_os_deps.sh
mkdir -p ~/.aws
echo "[default]\nregion=eu-west-3\noutput=json\n" >> ~/.aws/config
which python3
which pip3
PATH="/usr/local/opt/make/libexec/gnubin:$PATH"
echo "PATH=${PATH}" >> "$GITHUB_ENV"
echo
echo "Using these tools:"
which python3
which pip3
echo
make setup_env
# macOS builds are already long, so we decide not to use --weekly on them, it could be
# changed. Remark also that, for mac, due to unexpected issues with GitHub, we have a
# slightly different way to launch pytest
- name: PyTest Source Code
run: |
make pytest_macOS_for_GitHub
publish-docs:
needs: [build-linux]
timeout-minutes: 10
outputs:
report: ${{ steps.report.outputs.report || 'Did not run.' }}
runs-on: ubuntu-20.04
if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }}
steps:
- name: Prepare docs push
id: docs-push-infos
run: |
if [[ ${{ secrets.AWS_REPO_PREPROD_DOCUMENTATION_BUCKET_NAME }} != "" ]] && \
[[ ${{ secrets.AWS_REPO_PREPROD_DOCUMENTATION_DISTRIBUTION_ID }} != "" ]]; then
REF_NAME=$(echo "${{ github.ref }}" | sed 's/refs\/heads\///g')
echo "has-preprod=true" >> $GITHUB_OUTPUT
echo "aws-bucket=${{ secrets.AWS_REPO_PREPROD_DOCUMENTATION_BUCKET_NAME }}" >> $GITHUB_OUTPUT
echo "aws-distribution=${{ secrets.AWS_REPO_PREPROD_DOCUMENTATION_DISTRIBUTION_ID }}" >> $GITHUB_OUTPUT
echo "dest-dir=concrete-ml/${REF_NAME}" >> $GITHUB_OUTPUT
else
echo "has-preprod=false" >> $GITHUB_OUTPUT
fi
- name: Download Documentation
if: ${{ fromJSON(steps.docs-push-infos.outputs.has-preprod) }}
id: download
uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a
with:
name: html-docs
- name: Untar docs artifacts
id: untar
if: ${{ fromJSON(steps.docs-push-infos.outputs.has-preprod) }}
run: |
tar -xvf docs.tar
rm docs.tar
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@5727f247b64f324ec403ac56ae05e220fd02b65f
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ secrets.AWS_REGION }}
- name: Publish Documentation to S3
id: publish
if: ${{ steps.untar.outcome == 'success' && !cancelled() }}
env:
AWS_S3_BUCKET: ${{ steps.docs-push-infos.outputs.aws-bucket }}
SOURCE_DIR: '.'
DEST_DIR: ${{ steps.docs-push-infos.outputs.dest-dir }}
run: |
aws s3 sync "${SOURCE_DIR}" s3://"${AWS_S3_BUCKET}/${DEST_DIR}" --delete --acl public-read
- name: Invalidate CloudFront Cache
if: ${{ steps.publish.outcome == 'success' }}
env:
SOURCE_PATH: "/${{ steps.docs-push-infos.outputs.dest-dir }}/*"
DISTRIBUTION_ID: ${{ steps.docs-push-infos.outputs.aws-distribution }}
run: |
aws cloudfront create-invalidation \
--distribution-id "${DISTRIBUTION_ID}" \
--paths "${SOURCE_PATH}"
- name: Set notification report
id: report
if: ${{ always() }}
run: |
REPORT="Publishing documentation finished with status ${{ job.status }}. \
Pushed to preprod: ${{ steps.docs-push-infos.outputs.has-preprod }}"
echo "${REPORT}"
echo "report=${REPORT}" >> $GITHUB_ENV
echo "REPORT=${REPORT}" >> "$GITHUB_ENV"
- name: Slack Notification
if: ${{ always() && !success() }}
continue-on-error: true
uses: rtCamp/action-slack-notify@12e36fc18b0689399306c2e0b3e0f2978b7f1ee7
env:
SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL }}
SLACK_ICON: https://pbs.twimg.com/profile_images/1274014582265298945/OjBKP9kn_400x400.png
SLACK_COLOR: ${{ job.status }}
SLACK_MESSAGE: "${{ env.REPORT }} (${{ env.ACTION_RUN_URL }})"
SLACK_USERNAME: ${{ secrets.BOT_USERNAME }}
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
# This action is launched automatically when a tag is pushed, which happens under the hood
# when we do a `make release`. Here, we set an AWS EC2 instance, as GitHub runners had
# broken configurations
start-package-release-runner:
needs: [matrix-preparation, build-linux]
name: Start package-release runner
runs-on: ubuntu-20.04
if: ${{ github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') }}
outputs:
label: ${{ steps.start-ec2-runner.outputs.label }}
ec2-instance-id: ${{ steps.start-ec2-runner.outputs.ec2-instance-id || '' }}
steps:
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@5727f247b64f324ec403ac56ae05e220fd02b65f
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ secrets.AWS_REGION }}
- name: Start EC2 runner python
id: start-ec2-runner
if: ${{ !cancelled() }}
uses: machulav/ec2-github-runner@4e0303de215db88e1c489e07a15ca4d867f488ea
with:
mode: start
github-token: ${{ secrets.EC2_RUNNER_BOT_TOKEN }}
ec2-image-id: ${{ secrets.AWS_EC2_AMI }}
ec2-instance-type: ${{ needs.matrix-preparation.outputs.instance-type }}
subnet-id: ${{ secrets.AWS_EC2_SUBNET_ID }}
security-group-id: ${{ secrets.AWS_EC2_SECURITY_GROUP_ID }}
aws-resource-tags: >
[
{"Key": "Name", "Value": "cml-package-release-ec2-github-runner"},
{"Key": "GitHubRepository", "Value": "${{ github.repository }}"},
{"Key": "Actor", "Value": "${{ github.actor }}"},
{"Key": "Action", "Value": "${{ github.action }}"},
{"Key": "GitHash", "Value": "${{ github.sha }}"},
{"Key": "RefName", "Value": "${{ github.ref_name }}"},
{"Key": "RunId", "Value": "${{ github.run_id }}"},
{"Key": "Team", "Value": "CML"}
]
# This action is launched automatically when a tag is pushed, which happens under the hood
# when we do a `make release` locally. This notably creates docker and pypi images. Most of
# the things are done in this action (as opposed to `make release` which just creates a tag),
# directly on the AWS EC2 instance created in start-package-release-runner
package-release:
needs: [start-package-release-runner]
outputs:
report: ${{ steps.report.outputs.report || 'Did not run.' }}
name: Package and artifacts release
runs-on: ${{ needs.start-package-release-runner.outputs.label }}
env:
PRIVATE_RELEASE_IMAGE_BASE: ghcr.io/zama-ai/concrete-ml
PUBLIC_RELEASE_IMAGE_BASE: zamafhe/concrete-ml
PIP_INDEX_URL: ${{ secrets.PIP_INDEX_URL }}
PIP_EXTRA_INDEX_URL: ${{ secrets.PIP_EXTRA_INDEX_URL }}
steps:
- name: Add masks
run: |
echo "::add-mask::${{ secrets.INTERNAL_PYPI_URL_FOR_MASK }}"
echo "::add-mask::${{ secrets.INTERNAL_REPO_URL_FOR_MASK }}"
- name: Checkout code
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
with:
fetch-depth: 0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Install dependencies
run: |
sudo apt update
# We need to freeze docker.io because its update requires user input
sudo apt-mark hold docker.io
sudo ./script/make_utils/setup_os_deps.sh --linux-install-python
make setup_env
- name: Set tag in env
# 'poetry version' cannot be piped properly so do it in 2 steps
# the project version does not have the leading v to be semver compatible
run: |
PROJECT_VERSION=$(poetry version)
PROJECT_VERSION=$(echo "$PROJECT_VERSION" | cut -d ' ' -f 2)
GIT_TAG=$(echo "${{ github.ref }}" | sed 's/refs\/tags\///g')
if [[ "v${PROJECT_VERSION}" != "${GIT_TAG}" ]]; then
echo "Mismatch between tag and version: ${GIT_TAG}, v${PROJECT_VERSION}"
exit 1
fi
# Check that the tag is part of the main or release/[GIT_TAG] branch
poetry run python ./script/actions_utils/check_tag_release_in_branch_main_or_release.py --git-tag "${GIT_TAG}"
echo "PROJECT_VERSION=${PROJECT_VERSION}" >> "$GITHUB_ENV"
echo "GIT_TAG=${GIT_TAG}" >> "$GITHUB_ENV"
PRIVATE_RELEASE_IMG_GIT_TAG="${PRIVATE_RELEASE_IMAGE_BASE}:${GIT_TAG}"
echo "PRIVATE_RELEASE_IMG_GIT_TAG=${PRIVATE_RELEASE_IMG_GIT_TAG}" >> "$GITHUB_ENV"
RELEASE_IMG_TAGS_TO_PUSH="${PRIVATE_RELEASE_IMG_GIT_TAG}"
EXISTING_TAGS=$(curl \
-X GET \
-H "Authorization: Bearer $(echo ${{ secrets.BOT_TOKEN }} | base64)" \
https://ghcr.io/v2/zama-ai/concrete-ml/tags/list | jq -rc '.tags | join(" ")')
# We want the space separated list of versions to be expanded
# shellcheck disable=SC2086
IS_LATEST_INFO=$(poetry run python script/make_utils/version_utils.py \
islatest \
--new-version "${GIT_TAG}" \
--existing-versions $EXISTING_TAGS)
IS_LATEST=$(echo "${IS_LATEST_INFO}" | jq -rc '.is_latest')
echo "IS_LATEST=${IS_LATEST}" >> "$GITHUB_ENV"
IS_PRERELEASE=$(echo "${IS_LATEST_INFO}" | jq -rc '.is_prerelease')
echo "IS_PRERELEASE=${IS_PRERELEASE}" >> "$GITHUB_ENV"
if [[ "${IS_LATEST}" == "true" ]]; then
RELEASE_IMG_LATEST_TAG="${PRIVATE_RELEASE_IMAGE_BASE}:latest"
RELEASE_IMG_TAGS_TO_PUSH="${RELEASE_IMG_TAGS_TO_PUSH},${RELEASE_IMG_LATEST_TAG}"
fi
if [[ "${IS_PRERELEASE}" == "false" ]]; then
PUBLIC_RELEASE_IMG_GIT_TAG="${PUBLIC_RELEASE_IMAGE_BASE}:${GIT_TAG}"
RELEASE_IMG_TAGS_TO_PUSH="${RELEASE_IMG_TAGS_TO_PUSH},${PUBLIC_RELEASE_IMG_GIT_TAG}"
if [[ "${IS_LATEST}" == "true" ]]; then
PUBLIC_RELEASE_IMG_LATEST_TAG="${PUBLIC_RELEASE_IMAGE_BASE}:latest"
RELEASE_IMG_TAGS_TO_PUSH="${RELEASE_IMG_TAGS_TO_PUSH},${PUBLIC_RELEASE_IMG_LATEST_TAG}"
fi
fi
echo "RELEASE_IMG_TAGS_TO_PUSH=${RELEASE_IMG_TAGS_TO_PUSH}" >> "$GITHUB_ENV"
- name: Create directory for artifacts
if: ${{ success() && !cancelled() }}
run: |
ARTIFACTS_RAW_DIR=/tmp/release_artifacts/raw
mkdir -p "${ARTIFACTS_RAW_DIR}"
echo "ARTIFACTS_RAW_DIR=${ARTIFACTS_RAW_DIR}" >> "$GITHUB_ENV"
ARTIFACTS_PACKAGED_DIR=/tmp/release_artifacts/packaged
mkdir -p "${ARTIFACTS_PACKAGED_DIR}"
echo "ARTIFACTS_PACKAGED_DIR=${ARTIFACTS_PACKAGED_DIR}" >> "$GITHUB_ENV"
- name: Download Documentation
if: ${{ success() && !cancelled() }}
id: download-docs
uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a
with:
name: html-docs
path: ${{ env.ARTIFACTS_RAW_DIR }}/html_docs/
- name: Untar docs artifacts
if: ${{ success() && !cancelled() }}
run: |
cd ${{ steps.download-docs.outputs.download-path }}
tar -xvf docs.tar
rm docs.tar
- name: Download changelog
if: ${{ success() && !cancelled() }}
id: download-changelog
uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a
with:
name: changelog
path: ${{ env.ARTIFACTS_RAW_DIR }}/changelog/
- name: Download python3 wheel
if: ${{ success() && !cancelled() }}
id: download-wheel
uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a
with:
name: py3-wheel
path: ${{ env.ARTIFACTS_PACKAGED_DIR }}/
- name: Copy wheel to docker build context
run: |
mkdir -p ./pkg
cp "${{ env.ARTIFACTS_PACKAGED_DIR }}"/*.whl ./pkg
- name: Login to GitHub Container Registry
uses: docker/login-action@49ed152c8eca782a232dede0303416e8f356c37b
with:
registry: ghcr.io
username: ${{ secrets.BOT_USERNAME }}
password: ${{ secrets.BOT_TOKEN }}
# Login to public DockerHub if we have a true release on our hands
- name: Login to DockerHub
if: ${{ env.IS_PRERELEASE == 'false' }}
uses: docker/login-action@49ed152c8eca782a232dede0303416e8f356c37b
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Create secrets file for Docker build
if: ${{ success() && !cancelled() }}
run: |
CN_VERSION_SPEC_FOR_RC="$(poetry run python \
./script/make_utils/pyproject_version_parser_helper.py \
--pyproject-toml-file pyproject.toml \
--get-pip-install-spec-for-dependency concrete-python)"
SECRETS_FILE="$(mktemp)"
if [[ "${IS_PRERELEASE}" == "true" ]]; then
{
echo "PRERELEASE=true";
echo "PIP_INDEX_URL=${PIP_INDEX_URL}";
echo "PIP_EXTRA_INDEX_URL=${PIP_EXTRA_INDEX_URL}";
echo "CN_VERSION='${CN_VERSION_SPEC_FOR_RC}'";
} >> "${SECRETS_FILE}"
fi
echo "" >> "${SECRETS_FILE}"
echo "SECRETS_FILE=${SECRETS_FILE}" >> "$GITHUB_ENV"
- name: Build Docker Concrete-ML Image
if: ${{ success() && !cancelled() }}
uses: docker/build-push-action@3b5e8027fcad23fda98b2e3ac259d8d67585f671
with:
context: .
file: docker/Dockerfile.release
load: true
push: false
tags: "${{ env.RELEASE_IMG_TAGS_TO_PUSH }}"
no-cache: true
secret-files: |
"build-env=${{ env.SECRETS_FILE }}"
- name: Remove secrets file
if: ${{ always() }}
run: |
rm -rf "${SECRETS_FILE}"
- name: Release image sanity check
if: ${{ success() && !cancelled() }}
run: |
echo "Running sanity check for ${PRIVATE_RELEASE_IMG_GIT_TAG}"
docker run --rm -v "$(pwd)"/docker/release_resources:/data \
"${PRIVATE_RELEASE_IMG_GIT_TAG}" /bin/bash -c "python ./sanity_check.py"
- name: Prepare docs push
id: docs-push-infos
run: |
if [[ "${IS_PRERELEASE}" == "true" ]]; then
if [[ ${{ secrets.AWS_REPO_PREPROD_DOCUMENTATION_BUCKET_NAME }} != "" ]] && \
[[ ${{ secrets.AWS_REPO_PREPROD_DOCUMENTATION_DISTRIBUTION_ID }} != "" ]]; then
echo "aws-bucket=${{ secrets.AWS_REPO_PREPROD_DOCUMENTATION_BUCKET_NAME }}" >> $GITHUB_OUTPUT
echo "aws-distribution=${{ secrets.AWS_REPO_PREPROD_DOCUMENTATION_DISTRIBUTION_ID }}" >> $GITHUB_OUTPUT
else
echo "No preprod buckets for prerelease!"
exit 1
fi
else
echo "aws-bucket=${{ secrets.AWS_REPO_DOCUMENTATION_BUCKET_NAME }}" >> $GITHUB_OUTPUT
echo "aws-distribution=${{ secrets.AWS_REPO_DOCUMENTATION_DISTRIBUTION_ID }}" >> $GITHUB_OUTPUT
fi
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@5727f247b64f324ec403ac56ae05e220fd02b65f
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ secrets.AWS_REGION }}
- name: Update versions.json for docs
if: ${{ success() && !cancelled() }}
env:
RAW_DOCS_DIR: ${{ steps.download-docs.outputs.download-path }}
run: |
DOWNLOADED_VERSIONS_JSON_FILE=$(mktemp --suffix=.json)
OUTPUT_VERSIONS_JSON_FILE=$(mktemp --suffix=.json)
OPTS=""
if [[ $IS_LATEST = "true" ]]; then
OPTS="${OPTS} --latest "
fi
if [[ $IS_PRERELEASE = "true" ]]; then
OPTS="${OPTS} --prerelease "
fi
aws s3api get-object \
--bucket ${{ steps.docs-push-infos.outputs.aws-bucket }} \
--key concrete-ml/versions.json "${DOWNLOADED_VERSIONS_JSON_FILE}"
# shellcheck disable=SC2086
poetry run python ./script/actions_utils/generate_versions_json.py \
--add-version "${PROJECT_VERSION}" \
--versions-json-file "${DOWNLOADED_VERSIONS_JSON_FILE}" \
--output-json "${OUTPUT_VERSIONS_JSON_FILE}" \
$OPTS
echo "OUTPUT_VERSIONS_JSON_FILE=${OUTPUT_VERSIONS_JSON_FILE}" >> "$GITHUB_ENV"
# Copy to docs to keep a version in docs artifacts
cp "${OUTPUT_VERSIONS_JSON_FILE}" "${RAW_DOCS_DIR}"/versions.json
- name: Create ready to upload/packaged artifacts and release body
if: ${{ success() && !cancelled() }}
env:
RAW_DOCS_DIR: ${{ steps.download-docs.outputs.download-path }}
RAW_CHANGELOG_DIR: ${{ steps.download-changelog.outputs.download-path }}
run: |
pushd "${RAW_DOCS_DIR}"
zip -r "${ARTIFACTS_PACKAGED_DIR}/html-docs.zip" ./*
tar -cvzf "${ARTIFACTS_PACKAGED_DIR}/html-docs.tar.gz" ./*
# Remove the versions.json to avoid pushing it to S3 but have it in release artifacts
rm versions.json
popd
cp "${RAW_CHANGELOG_DIR}"/* "${ARTIFACTS_PACKAGED_DIR}"
ls -a "${ARTIFACTS_PACKAGED_DIR}"
RELEASE_BODY_FILE=RELEASE_BODY.md
echo "RELEASE_BODY_FILE=${RELEASE_BODY_FILE}" >> "$GITHUB_ENV"
cp ./script/actions_utils/RELEASE_TEMPLATE.md "${RELEASE_BODY_FILE}"
{
echo "Docker Image: ${PUBLIC_RELEASE_IMAGE_BASE}:${GIT_TAG}";
echo "pip: https://pypi.org/project/concrete-ml/${PROJECT_VERSION}";
echo "Documentation: https://docs.zama.ai/concrete-ml";
echo "";
} >> "${RELEASE_BODY_FILE}"
cat "${RAW_CHANGELOG_DIR}"/* >> "${RELEASE_BODY_FILE}"
- name: Push release docker image
if: ${{ success() && !cancelled() }}
run: |
docker image push --all-tags "${PRIVATE_RELEASE_IMAGE_BASE}"
if [[ "${IS_PRERELEASE}" == "false" ]]; then
docker image push --all-tags "${PUBLIC_RELEASE_IMAGE_BASE}"
fi
- name: Push package to PyPi
if: ${{ success() && !cancelled() && !fromJSON(env.IS_PRERELEASE) }}
run: |
poetry run twine upload \
-u __token__ -p ${{ secrets.PYPI_BOT_TOKEN }} \
-r pypi "${{ env.ARTIFACTS_PACKAGED_DIR }}"/*.whl
- name: Push package to Internal PyPi
if: ${{ success() && !cancelled() && fromJSON(env.IS_PRERELEASE) }}
run: |
poetry run twine upload \
-u "${{ secrets.INTERNAL_PYPI_BOT_USERNAME }}" -p "${{ secrets.INTERNAL_PYPI_BOT_PASSWORD }}" \
--repository-url "${{ secrets.INTERNAL_PYPI_URL }}" "${{ env.ARTIFACTS_PACKAGED_DIR }}"/*.whl
- name: Push release documentation
if: ${{ success() && !cancelled() }}
env:
AWS_S3_BUCKET: ${{ steps.docs-push-infos.outputs.aws-bucket }}
SOURCE_DIR: ${{ steps.download-docs.outputs.download-path }}
DEST_DIR: 'concrete-ml/${{ env.PROJECT_VERSION }}'
run: |
aws s3 sync "${SOURCE_DIR}" s3://"${AWS_S3_BUCKET}/${DEST_DIR}" --delete --acl public-read
- name: Push release documentation as stable
if: ${{ success() && !cancelled() && !fromJSON(env.IS_PRERELEASE) && fromJSON(env.IS_LATEST) }}
env:
AWS_S3_BUCKET: ${{ steps.docs-push-infos.outputs.aws-bucket }}
SOURCE_DIR: ${{ steps.download-docs.outputs.download-path }}
DEST_DIR: 'concrete-ml/stable'
run: |
aws s3 sync "${SOURCE_DIR}" s3://"${AWS_S3_BUCKET}/${DEST_DIR}" --delete --acl public-read
- name: Invalidate CloudFront Cache for stable
if: ${{ success() && !fromJSON(env.IS_PRERELEASE) && fromJSON(env.IS_LATEST) }}
env:
SOURCE_PATH: "/concrete-ml/stable/*"
DISTRIBUTION_ID: ${{ steps.docs-push-infos.outputs.aws-distribution }}
run: |
aws cloudfront create-invalidation \
--distribution-id "${DISTRIBUTION_ID}" \
--paths "${SOURCE_PATH}"
- name: Create GitHub release
if: ${{ success() && !cancelled() }}
id: create-release
uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844
with:
body_path: ${{ env.RELEASE_BODY_FILE }}
prerelease: ${{ fromJSON(env.IS_PRERELEASE) }}
files: |
${{ env.ARTIFACTS_PACKAGED_DIR }}/*
tag_name: ${{ env.GIT_TAG }}
fail_on_unmatched_files: true
token: ${{ secrets.BOT_TOKEN }}
- name: Push updated versions.json
if: ${{ success() }}
run: |
aws s3 cp "${OUTPUT_VERSIONS_JSON_FILE}" \
s3://${{ steps.docs-push-infos.outputs.aws-bucket }}/concrete-ml/versions.json \
--acl public-read
aws cloudfront create-invalidation \
--distribution-id ${{ steps.docs-push-infos.outputs.aws-distribution }} \
--paths /concrete-ml/versions.json
- name: Set notification report
id: report
if: ${{ always() }}
run: |
REPORT="Creating release for ${GIT_TAG} finished with status ${{ job.status }}. \
GitHub release link: ${{ steps.create-release.outputs.url }}."
echo "${REPORT}"
echo "report=${REPORT}" >> $GITHUB_ENV
echo "REPORT=${REPORT}" >> $GITHUB_ENV
- name: Slack Notification
if: ${{ always() && !success() }}
continue-on-error: true
uses: rtCamp/action-slack-notify@12e36fc18b0689399306c2e0b3e0f2978b7f1ee7
env:
SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL }}
SLACK_ICON: https://pbs.twimg.com/profile_images/1274014582265298945/OjBKP9kn_400x400.png
SLACK_COLOR: ${{ job.status }}
SLACK_MESSAGE: "${{ env.REPORT }} (${{ env.ACTION_RUN_URL }})"
SLACK_USERNAME: ${{ secrets.BOT_USERNAME }}
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
# Close the AWS EC2 instance which was used during release preparation
stop-package-release-runner:
name: Stop EC2 runner
needs: [start-package-release-runner, package-release]
runs-on: ubuntu-20.04
if: ${{ always() && (needs.start-package-release-runner.result != 'skipped') }}
steps:
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@5727f247b64f324ec403ac56ae05e220fd02b65f
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ secrets.AWS_REGION }}
- name: Stop EC2 runner
uses: machulav/ec2-github-runner@4e0303de215db88e1c489e07a15ca4d867f488ea
if: ${{ always() && needs.start-package-release-runner.outputs.ec2-instance-id }}
with:
github-token: ${{ secrets.EC2_RUNNER_BOT_TOKEN }}
label: ${{ needs.start-package-release-runner.outputs.label }}
ec2-instance-id: ${{ needs.start-package-release-runner.outputs.ec2-instance-id }}
mode: stop
send-report:
if: ${{ always() }}
timeout-minutes: 2
needs:
[
matrix-preparation,
start-runner-linux,
build-linux,
stop-runner-linux,
build-macos,
publish-docs,
package-release,
]
name: Send Slack notification
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
- name: Prepare whole job status
if: ${{ always() }}
continue-on-error: true
env:
NEEDS_JSON: ${{ toJSON(needs) }}
run: |
echo "${NEEDS_JSON}" > /tmp/needs_context.json
JOB_STATUS=$(python3 ./script/actions_utils/actions_combine_status.py \
--needs_context_json /tmp/needs_context.json)
echo "JOB_STATUS=${JOB_STATUS}" >> "$GITHUB_ENV"
- name: Slack Notification
if: ${{ always() }}
continue-on-error: true
uses: rtCamp/action-slack-notify@12e36fc18b0689399306c2e0b3e0f2978b7f1ee7
env:
SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL }}
SLACK_ICON: https://pbs.twimg.com/profile_images/1274014582265298945/OjBKP9kn_400x400.png
SLACK_COLOR: ${{ env.JOB_STATUS || 'failure' }}
SLACK_MESSAGE: "Full run finished with status ${{ env.JOB_STATUS || 'failure' }} \
(${{ env.ACTION_RUN_URL }})\n\
- matrix-preparation: ${{ needs.matrix-preparation.result || 'Did not run.'}}\n\n\
- start-runner-linux: ${{ needs.start-runner-linux.result || 'Did not run.'}}\n\n\
- build-linux: ${{ needs.build-linux.result || 'Did not run.' }}\n\n\
- stop-runner-linux: ${{ needs.stop-runner-linux.result || 'Did not run.'}}\n\n\
- build-macos: ${{ needs.build-macos.result || 'Did not run.' }}\n\n\
- publish-docs: ${{ needs.publish-docs.outputs.report || 'Did not run.' }}\n\n\
- package-release: ${{ needs.package-release.outputs.report || 'Did not run.' }}"
SLACK_USERNAME: ${{ secrets.BOT_USERNAME }}
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}