Refresh notebooks for chore/debug_decision_tree_classifier_notebook #2509
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: CML builds (weekly or not) | |
on: | |
pull_request: | |
push: | |
branches: | |
- main | |
- 'release/*' | |
release: | |
types: [published] | |
# Workflow dispatch and schedule refer to the weekly build | |
workflow_dispatch: | |
inputs: | |
event_name: | |
description: "Event that triggers the workflow" | |
required: true | |
type: choice | |
default: weekly | |
options: | |
- weekly | |
- pr | |
- push_to_main | |
- push_to_release | |
linux_python_versions: | |
description: "Space separated list of python versions (3.8, 3.9, 3.10 are supported) to launch on linux" | |
required: false | |
type: string | |
default: "3.8 3.9" | |
macos_python_versions: | |
description: "Space separated list of python versions (3.8, 3.9, 3.10 are supported) to launch on macos" | |
required: false | |
type: string | |
default: "3.8 3.9" | |
manual_call: | |
description: "Do not uncheck this" | |
type: boolean | |
required: false | |
default: true | |
# Workflow call refers to the release process (it enables the current CI workflow to be called by | |
# another workflow from the same repository, in this case the release one) | |
# The "release" event is not put by default in order to avoid running the following CI without | |
# explicitly indicating it in the caller workflow | |
# Besides, GitHub actions are not able to differentiate 'workflow_dispatch' from 'workflow_call' | |
# based on 'github.event_name' and both are set to 'workflow_dispatch'. Therefore, an optional | |
# input 'manual_call' with proper default values is added to both as a workaround, following one | |
# user's suggestion : https://github.com/actions/runner/discussions/1884 | |
# FIXME: https://github.com/zama-ai/concrete-ml-internal/issues/3930 | |
workflow_call: | |
inputs: | |
event_name: | |
description: "Event that triggers the workflow" | |
required: true | |
type: string | |
manual_call: | |
description: 'To distinguish workflow_call from workflow_dispatch' | |
type: boolean | |
required: false | |
default: false | |
schedule: | |
# * is a special character in YAML so you have to quote this string | |
# At 22:00 on Sunday | |
# Timezone is UTC, so Paris time is +2 during the summer and +1 during winter | |
- cron: '0 22 * * 0' | |
concurrency: | |
# Add event_name in the group as workflow dispatch means we could run this in addition to other | |
# workflows already running on a PR or a merge e.g. | |
group: "${{ github.ref }}-${{ github.event_name }}-${{ github.workflow }}" | |
# Cancel the previous build, except on main | |
cancel-in-progress: ${{ github.event_name != 'push' || github.ref != 'refs/heads/main' }} | |
env: | |
ACTION_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} | |
# The CI can be triggered by the release workflow which itself can be triggered by the merge of a | |
# pull-request (following the 'prepare_release' workflow). Since GitHub weirdly propagates the | |
# original 'github.event_name' (here "pull_request") in all nested workflows, we need to | |
# differentiate the release CI from regular CIs by using 'inputs.event_name', which should be set | |
# to "release" by the release workflow | |
IS_PR: ${{ github.event_name == 'pull_request' && inputs.event_name != 'release' }} | |
IS_WEEKLY: ${{ github.event_name == 'schedule' || ((github.event_name == 'workflow_dispatch') && (inputs.event_name == 'weekly')) }} | |
# The 'IS_RELEASE' variable indicates that the workflow has been triggered by the releasing | |
# process itself, before publishing it. It should only happen when the release workflow triggers | |
# the CI, in which 'inputs.event_name' is set to "release" | |
IS_RELEASE: ${{ inputs.event_name == 'release' }} | |
IS_PUSH_TO_MAIN: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }} | |
IS_PUSH_TO_RELEASE: ${{ github.event_name == 'push' && startsWith(github.ref, 'refs/heads/release/') }} | |
IS_WORKFLOW_DISPATCH: ${{ github.event_name == 'workflow_dispatch' && inputs.manual_call}} | |
# The 'IS_PUBLISHED_RELEASE' variable indicates that the workflow has been triggered by a | |
# release's successful publishing | |
IS_PUBLISHED_RELEASE: ${{ github.event_name == 'release'}} | |
AGENT_TOOLSDIRECTORY: /opt/hostedtoolcache | |
RUNNER_TOOL_CACHE: /opt/hostedtoolcache | |
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | |
# The 'FAILED_TESTS_ARE_FLAKY' variable is used to print a warning messages if flaky tests are | |
# rerun. By default, we do not want to print this warning | |
FAILED_TESTS_ARE_FLAKY: "false" | |
jobs: | |
matrix-preparation: | |
# We skip the CI in cases of pushing to internal main (because all pushes to main internal are now from the bot) | |
if: ${{ !( github.repository == 'zama-ai/concrete-ml-internal' && github.event_name == 'push' && github.ref == 'refs/heads/main' ) }} | |
runs-on: ubuntu-20.04 | |
timeout-minutes: 5 | |
outputs: | |
linux-matrix: ${{ steps.set-matrix.outputs.linux-matrix }} | |
macos-matrix: ${{ steps.set-matrix.outputs.macos-matrix }} | |
needs-38-linux-runner: ${{ steps.set-matrix.outputs.needs-38-linux-runner }} | |
needs-39-linux-runner: ${{ steps.set-matrix.outputs.needs-39-linux-runner }} | |
needs-310-linux-runner: ${{ steps.set-matrix.outputs.needs-310-linux-runner }} | |
instance-type: ${{ steps.set-matrix.outputs.instance-type }} | |
steps: | |
- name: Checkout code | |
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 | |
- name: Set matrix | |
id: set-matrix | |
run: | | |
echo "${{ github.event_name }}" | |
# Manage build type that will condition the rest of the CI | |
if [[ "${IS_PR}" == "true" ]]; then | |
BUILD_TYPE="pr" | |
elif [[ "${IS_WEEKLY}" == "true" ]]; then | |
BUILD_TYPE="weekly" | |
elif [[ "${IS_RELEASE}" == "true" ]]; then | |
BUILD_TYPE="release" | |
elif [[ "${IS_PUSH_TO_MAIN}" == "true" ]]; then | |
BUILD_TYPE="push_to_main" | |
elif [[ "${IS_PUSH_TO_RELEASE}" == "true" ]]; then | |
BUILD_TYPE="push_to_release" | |
elif [[ "${IS_WORKFLOW_DISPATCH}" == "true" ]];then | |
BUILD_TYPE="${{ inputs.event_name }}" | |
elif [[ "${IS_PUBLISHED_RELEASE}" == "true" ]];then | |
BUILD_TYPE="published_release" | |
else | |
echo "Unknown BUILD_TYPE! Aborting" | |
exit 1 | |
fi | |
# Manage instance type | |
INSTANCE_TYPE="c5.4xlarge" | |
if [[ "${BUILD_TYPE}" == "weekly" ]]; then | |
INSTANCE_TYPE="m6i.metal" | |
elif [[ "${BUILD_TYPE}" == "release" ]]; then | |
INSTANCE_TYPE="m6i.16xlarge" | |
fi | |
# Manage python versions | |
if [[ "${IS_WORKFLOW_DISPATCH}" == "true" ]]; then | |
LINUX_PYTHON_VERSIONS="${{ inputs.linux_python_versions }}" | |
MACOS_PYTHON_VERSIONS="${{ inputs.macos_python_versions }}" | |
elif [[ "${BUILD_TYPE}" == "pr" ]]; then | |
LINUX_PYTHON_VERSIONS="3.8" | |
MACOS_PYTHON_VERSIONS="" | |
elif [[ "${BUILD_TYPE}" == "weekly" ]]; then | |
LINUX_PYTHON_VERSIONS="3.8 3.9 3.10" | |
MACOS_PYTHON_VERSIONS="3.9" | |
elif [[ "${BUILD_TYPE}" == "release" ]] || [[ "${BUILD_TYPE}" == "published_release" ]]; then | |
LINUX_PYTHON_VERSIONS="3.8 3.9 3.10" | |
MACOS_PYTHON_VERSIONS="" | |
elif [[ "${BUILD_TYPE}" == "push_to_main" ]]; then | |
LINUX_PYTHON_VERSIONS="3.8" | |
MACOS_PYTHON_VERSIONS="" | |
elif [[ "${BUILD_TYPE}" == "push_to_release" ]]; then | |
LINUX_PYTHON_VERSIONS="3.8" | |
MACOS_PYTHON_VERSIONS="" | |
else | |
echo "Unknown BUILD_TYPE! Aborting" | |
exit 1 | |
fi | |
echo "LINUX_PYTHON_VERSIONS: ${LINUX_PYTHON_VERSIONS}" | |
echo "MACOS_PYTHON_VERSIONS: ${MACOS_PYTHON_VERSIONS}" | |
echo "BUILD_TYPE: ${BUILD_TYPE}" | |
echo "INSTANCE_TYPE: ${INSTANCE_TYPE}" | |
MATRIX_JSON=$(mktemp --suffix=.json) | |
echo "Prepared build matrix:" | |
python3 ./script/actions_utils/generate_test_matrix.py \ | |
--output-json "${MATRIX_JSON}" \ | |
--linux-python-versions ${LINUX_PYTHON_VERSIONS} \ | |
--macos-python-versions ${MACOS_PYTHON_VERSIONS} | |
LINUX_MATRIX=$(jq -rc '. | map(select(.os_kind=="linux"))' "${MATRIX_JSON}") | |
MACOS_MATRIX=$(jq -rc '. | map(select(.os_kind=="macos"))' "${MATRIX_JSON}") | |
echo "Linux Matrix:" | |
echo "${LINUX_MATRIX}" | jq '.' | |
echo "macOS Matrix:" | |
echo "${MACOS_MATRIX}" | jq '.' | |
echo "linux-matrix=${LINUX_MATRIX}" >> $GITHUB_OUTPUT | |
echo "macos-matrix=${MACOS_MATRIX}" >> $GITHUB_OUTPUT | |
NEEDS_LINUX_38_RUNNER=$(echo "${LINUX_MATRIX}" | \ | |
jq -rc '. | map(select(.os_kind=="linux" and .python_version=="3.8")) | length > 0') | |
NEEDS_LINUX_39_RUNNER=$(echo "${LINUX_MATRIX}" | \ | |
jq -rc '. | map(select(.os_kind=="linux" and .python_version=="3.9")) | length > 0') | |
NEEDS_LINUX_310_RUNNER=$(echo "${LINUX_MATRIX}" | \ | |
jq -rc '. | map(select(.os_kind=="linux" and .python_version=="3.10")) | length > 0') | |
echo "Needs Linux 3.8 runner:" | |
echo "${NEEDS_LINUX_38_RUNNER}" | |
echo "Needs Linux 3.9 runner:" | |
echo "${NEEDS_LINUX_39_RUNNER}" | |
echo "Needs Linux 3.10 runner:" | |
echo "${NEEDS_LINUX_310_RUNNER}" | |
echo "needs-38-linux-runner=${NEEDS_LINUX_38_RUNNER}" >> $GITHUB_OUTPUT | |
echo "needs-39-linux-runner=${NEEDS_LINUX_39_RUNNER}" >> $GITHUB_OUTPUT | |
echo "needs-310-linux-runner=${NEEDS_LINUX_310_RUNNER}" >> $GITHUB_OUTPUT | |
echo "instance-type=${INSTANCE_TYPE}" >> $GITHUB_OUTPUT | |
start-runner-linux: | |
needs: [matrix-preparation] | |
name: Start EC2 runner | |
runs-on: ubuntu-20.04 | |
timeout-minutes: 15 | |
outputs: | |
label-38: ${{ steps.start-ec2-runner-38.outputs.label }} | |
ec2-instance-id-38: ${{ steps.start-ec2-runner-38.outputs.ec2-instance-id || '' }} | |
label-39: ${{ steps.start-ec2-runner-39.outputs.label }} | |
ec2-instance-id-39: ${{ steps.start-ec2-runner-39.outputs.ec2-instance-id || '' }} | |
label-310: ${{ steps.start-ec2-runner-310.outputs.label }} | |
ec2-instance-id-310: ${{ steps.start-ec2-runner-310.outputs.ec2-instance-id || '' }} | |
matrix: ${{ steps.update-linux-matrix.outputs.linux-matrix }} | |
steps: | |
- name: Checkout Code | |
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 | |
- name: Configure AWS credentials | |
uses: aws-actions/configure-aws-credentials@e3dd6a429d7300a6a4c196c26e071d42e0343502 | |
with: | |
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} | |
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | |
aws-region: ${{ secrets.AWS_REGION }} | |
- name: Start EC2 runner python 38 | |
id: start-ec2-runner-38 | |
if: ${{ !cancelled() && fromJSON(needs.matrix-preparation.outputs.needs-38-linux-runner) }} | |
uses: machulav/ec2-github-runner@fcfb31a5760dad1314a64a0e172b78ec6fc8a17e | |
with: | |
mode: start | |
github-token: ${{ secrets.EC2_RUNNER_BOT_TOKEN }} | |
ec2-image-id: ${{ secrets.AWS_EC2_AMI }} | |
ec2-instance-type: ${{ needs.matrix-preparation.outputs.instance-type }} | |
subnet-id: ${{ secrets.AWS_EC2_SUBNET_ID }} | |
security-group-id: ${{ secrets.AWS_EC2_SECURITY_GROUP_ID }} | |
aws-resource-tags: > | |
[ | |
{"Key": "Name", "Value": "cml-ci-ec2-github-runner-py38"}, | |
{"Key": "GitHubRepository", "Value": "${{ github.repository }}"}, | |
{"Key": "Python version", "Value": "3.8"}, | |
{"Key": "Actor", "Value": "${{ github.actor }}"}, | |
{"Key": "Action", "Value": "${{ github.action }}"}, | |
{"Key": "GitHash", "Value": "${{ github.sha }}"}, | |
{"Key": "RefName", "Value": "${{ github.ref_name }}"}, | |
{"Key": "RunId", "Value": "${{ github.run_id }}"}, | |
{"Key": "Team", "Value": "CML"} | |
] | |
- name: Start EC2 runner python 39 | |
id: start-ec2-runner-39 | |
if: ${{ !cancelled() && fromJSON(needs.matrix-preparation.outputs.needs-39-linux-runner) }} | |
uses: machulav/ec2-github-runner@fcfb31a5760dad1314a64a0e172b78ec6fc8a17e | |
with: | |
mode: start | |
github-token: ${{ secrets.EC2_RUNNER_BOT_TOKEN }} | |
ec2-image-id: ${{ secrets.AWS_EC2_AMI }} | |
ec2-instance-type: ${{ needs.matrix-preparation.outputs.instance-type }} | |
subnet-id: ${{ secrets.AWS_EC2_SUBNET_ID }} | |
security-group-id: ${{ secrets.AWS_EC2_SECURITY_GROUP_ID }} | |
aws-resource-tags: > | |
[ | |
{"Key": "Name", "Value": "cml-ci-ec2-github-runner-py39"}, | |
{"Key": "GitHubRepository", "Value": "${{ github.repository }}"}, | |
{"Key": "Python version", "Value": "3.9"}, | |
{"Key": "Actor", "Value": "${{ github.actor }}"}, | |
{"Key": "Action", "Value": "${{ github.action }}"}, | |
{"Key": "GitHash", "Value": "${{ github.sha }}"}, | |
{"Key": "RefName", "Value": "${{ github.ref_name }}"}, | |
{"Key": "RunId", "Value": "${{ github.run_id }}"}, | |
{"Key": "Team", "Value": "CML"} | |
] | |
- name: Start EC2 runner python 310 | |
id: start-ec2-runner-310 | |
if: ${{ !cancelled() && fromJSON(needs.matrix-preparation.outputs.needs-310-linux-runner) }} | |
uses: machulav/ec2-github-runner@fcfb31a5760dad1314a64a0e172b78ec6fc8a17e | |
with: | |
mode: start | |
github-token: ${{ secrets.EC2_RUNNER_BOT_TOKEN }} | |
ec2-image-id: ${{ secrets.AWS_EC2_AMI }} | |
ec2-instance-type: ${{ needs.matrix-preparation.outputs.instance-type }} | |
subnet-id: ${{ secrets.AWS_EC2_SUBNET_ID }} | |
security-group-id: ${{ secrets.AWS_EC2_SECURITY_GROUP_ID }} | |
aws-resource-tags: > | |
[ | |
{"Key": "Name", "Value": "cml-ci-ec2-github-runner-py310"}, | |
{"Key": "GitHubRepository", "Value": "${{ github.repository }}"}, | |
{"Key": "Python version", "Value": "3.10"}, | |
{"Key": "Actor", "Value": "${{ github.actor }}"}, | |
{"Key": "Action", "Value": "${{ github.action }}"}, | |
{"Key": "GitHash", "Value": "${{ github.sha }}"}, | |
{"Key": "RefName", "Value": "${{ github.ref_name }}"}, | |
{"Key": "RunId", "Value": "${{ github.run_id }}"}, | |
{"Key": "Team", "Value": "CML"} | |
] | |
- name: Update Linux runs_on Matrix | |
id: update-linux-matrix | |
env: | |
MATRIX: ${{ needs.matrix-preparation.outputs.linux-matrix }} | |
run: | | |
MATRIX=$(echo "${MATRIX}" | jq -rc \ | |
'(. | map(select(.os_kind=="linux" and .python_version=="3.8") |= . + {"runs_on": "${{ steps.start-ec2-runner-38.outputs.label }}"}) )') | |
MATRIX=$(echo "${MATRIX}" | jq -rc \ | |
'(. | map(select(.os_kind=="linux" and .python_version=="3.9") |= . + {"runs_on": "${{ steps.start-ec2-runner-39.outputs.label }}"}) )') | |
MATRIX=$(echo "${MATRIX}" | jq -rc \ | |
'(. | map(select(.os_kind=="linux" and .python_version=="3.10") |= . + {"runs_on": "${{ steps.start-ec2-runner-310.outputs.label }}"}) )') | |
echo "Updated matrix:" | |
echo "${MATRIX}" | |
echo "linux-matrix=${MATRIX}" >> $GITHUB_OUTPUT | |
build-linux: | |
needs: [start-runner-linux] | |
runs-on: ${{ matrix.runs_on }} | |
# Run in a clean container | |
container: | |
image: ubuntu:20.04 | |
defaults: | |
run: | |
shell: bash | |
strategy: | |
fail-fast: false | |
matrix: ${{ fromJSON(format('{{"include":{0}}}', needs.start-runner-linux.outputs.matrix)) }} | |
env: | |
IS_REF_BUILD: ${{ matrix.python_version == '3.8' }} | |
PIP_INDEX_URL: ${{ secrets.PIP_INDEX_URL }} | |
PIP_EXTRA_INDEX_URL: ${{ secrets.PIP_EXTRA_INDEX_URL }} | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | |
steps: | |
- name: Add masks | |
run: | | |
echo "::add-mask::${{ secrets.INTERNAL_PYPI_URL_FOR_MASK }}" | |
echo "::add-mask::${{ secrets.INTERNAL_REPO_URL_FOR_MASK }}" | |
# Replace default archive.ubuntu.com from docker image with fr mirror | |
# original archive showed performance issues and is farther away | |
- name: Docker container related setup and git installation | |
run: | | |
TZ=Europe/Paris | |
echo "TZ=${TZ}" >> "$GITHUB_ENV" | |
ln -snf /usr/share/zoneinfo/${TZ} /etc/localtime && echo ${TZ} > /etc/timezone | |
sed -i 's|^deb http://archive|deb http://fr.archive|g' /etc/apt/sources.list | |
apt update && apt install git git-lfs -y | |
# By default, `git clone` downloads all LFS files, which we want to avoid in CIs other than | |
# weekly ones (which also test notebooks) | |
- name: Disable LFS download by default | |
if: ${{ !fromJSON(env.IS_WEEKLY) }} | |
run: | | |
git lfs install --skip-smudge | |
# Checkout the code | |
# 'fetch-depth' is set to 0 in order to fetch all tags (used for generating the changelog) | |
- name: Checkout Code | |
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 | |
with: | |
fetch-depth: 0 | |
# Pull necessary LFS files (and thus avoid downloading files stored for benchmarks, use cases, ...) | |
- name: Pull LFS files | |
run: | | |
git lfs pull --include "tests/data/**, src/concrete/ml/pandas/_client_server_files/**" --exclude "" | |
- name: Set up Python ${{ matrix.python_version }} | |
uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d | |
id: setup-python | |
with: | |
python-version: ${{ matrix.python_version }} | |
- name: Check python3 version | |
env: | |
SYSTEM_VERSION_COMPAT: 0 | |
run: | | |
which python3 | |
which pip3 | |
- name: Install dependencies | |
id: install-deps | |
run: | | |
./script/make_utils/setup_os_deps.sh | |
mkdir -p ~/.aws | |
echo "[default]\nregion=eu-west-3\noutput=json\n" >> ~/.aws/config | |
# Needed to include Python.h | |
export C_INCLUDE_PATH="${C_INCLUDE_PATH}:/__w/_tool/Python/$(python -c 'import platform; print(platform.python_version())')/x64/include" | |
echo | |
echo "Using these tools:" | |
which python3 | |
which pip3 | |
echo | |
make setup_env | |
# Commit types are found in https://www.conventionalcommits.org/en/v1.0.0/ | |
- name: Check commits first line format | |
id: commit-first-line | |
if: ${{ fromJSON(env.IS_PR) && steps.install-deps.outcome == 'success' && !cancelled() }} | |
uses: gsactions/commit-message-checker@16fa2d5de096ae0d35626443bcd24f1e756cafee | |
with: | |
pattern: '^((build|chore|ci|docs|feat|fix|perf|refactor|revert|style|test)\:) .+$' | |
flags: 'gs' | |
error: "Your first line has to contain a commit type like \"feat: message\".\ | |
Pattern: '^((build|chore|ci|docs|feat|fix|perf|refactor|revert|style|test)\\:)'" | |
excludeDescription: 'true' # optional: this excludes the description body of a pull request | |
excludeTitle: 'true' # optional: this excludes the title of a pull request | |
checkAllCommitMessages: 'true' # optional: this checks all commits associated with a pull request | |
accessToken: ${{ secrets.GITHUB_TOKEN }} # github access token is only required if checkAllCommitMessages is true | |
- name: Check commits line length | |
id: commit-line-length | |
if: ${{ fromJSON(env.IS_PR) && steps.install-deps.outcome == 'success' && !cancelled() }} | |
uses: gsactions/commit-message-checker@16fa2d5de096ae0d35626443bcd24f1e756cafee | |
with: | |
pattern: '(^.{0,74}$\r?\n?){0,20}' | |
flags: 'gm' | |
error: 'The maximum line length of 74 characters is exceeded.' | |
excludeDescription: 'true' # optional: this excludes the description body of a pull request | |
excludeTitle: 'true' # optional: this excludes the title of a pull request | |
checkAllCommitMessages: 'true' # optional: this checks all commits associated with a pull request | |
accessToken: ${{ secrets.GITHUB_TOKEN }} # github access token is only required if checkAllCommitMessages is true | |
- name: Commit conformance | |
id: commit-conformance | |
if: ${{ steps.install-deps.outcome == 'success' && !cancelled() }} | |
env: | |
FIRST_LINE_OK: ${{ (fromJSON(env.IS_PR) && steps.commit-first-line.outcome == 'success') || steps.commit-first-line.outcome == 'skipped' }} | |
LINE_LENGTH_OK: ${{ (fromJSON(env.IS_PR) && steps.commit-line-length.outcome == 'success') || steps.commit-line-length.outcome == 'skipped' }} | |
run: | | |
if [[ "${FIRST_LINE_OK}" != "true" || "${LINE_LENGTH_OK}" != "true" ]]; then | |
echo "Issues with commits. First line ok: ${FIRST_LINE_OK}. Line length ok: ${LINE_LENGTH_OK}." | |
exit 1 | |
fi | |
- name: Check actionlint | |
run: | |
make actionlint | |
- name: Source code conformance | |
id: make-pcc | |
if: ${{ steps.install-deps.outcome == 'success' && !cancelled() }} | |
# pcc launches an internal target with proper flags | |
run: | | |
make pcc | |
# Checked for changes between main and the current branch in a PR. More specifically, | |
# this is used in regular CIs to avoid launching Pytest, checking codeblocks, building docs | |
# or other steps if the associated files were not touched. For most, we also check that the | |
# linux MD5 has not changed, which means that no libraries got updated. This is done in order | |
# to handle PRs which only upgrades dependencies | |
# Following the 'files_yaml' section, we define what files should trigger a defined acronym | |
# (src, codeblocks, ...) when some changes are detected in them. For example, if some | |
# dependencies were changed, 'tests', 'determinism', 'codeblocks' and 'determinism' acronyms | |
# will be affected. We use the license MD5 file for that because it is built on the | |
# poetry.lock as well as the Concrete Python version, which can be installed manually in the | |
# makefile. | |
# For codeblocks, 'make pytest_codeblocks' runs the `make_utils/pytest_codeblocks.sh` script, | |
# which executes a find and grep command to find them. In the following section, we manually | |
# re-define what this command does by looking at all markdown files that are neither in hidden | |
# directories nor in docs/_apidocs or similar paths. Additionally, as for others, we check for | |
# changes in the source directory or in installed dependencies. | |
# This step is skipped if it has been manually triggered in GitHub's Action interface as well | |
# as for release and weekly checks, as there are no changes to check in these cases | |
- name: Get all changed files from main in PR | |
id: changed-files-in-pr | |
if: | | |
fromJSON(env.IS_PR) | |
&& steps.install-deps.outcome == 'success' | |
&& steps.make-pcc.outcome == 'success' | |
&& !cancelled() | |
uses: tj-actions/[email protected] | |
with: | |
files_yaml: | | |
src: | |
- src/** | |
- '!src/concrete/ml/version.py' | |
tests: | |
- 'tests/**/test_*.py' | |
tests_utils: | |
- tests/data/** | |
- src/concrete/ml/pytest/** | |
determinism: | |
- tests/seeding/test_seeding.py | |
docs: | |
- docs/** | |
- '*.md' | |
- LICENSE | |
use_cases: | |
- use_case_examples/** | |
codeblocks: | |
- '**.md' | |
- '!.*/**' | |
- '!docs/_*/**' | |
- '!docs/SUMMARY.md' | |
- '!docs/references/api/**.md' | |
dependencies: | |
- deps_licenses/licenses_linux_user.txt.md5 | |
conftest: | |
- conftest.py | |
makefile: | |
- Makefile | |
# Run determinism test if: | |
# - during weekly or release CI, as well as when the CI has been triggered manually (through | |
# GitHub's Action interface) | |
# - the determinism test file has been changed | |
# - the source code has been changed | |
# - any dependency has been updated | |
# - conftest.py has been changed | |
# - Makefile has been changed | |
- name: Determinism | |
id: determinism | |
if: | | |
( | |
steps.changed-files-in-pr.outcome == 'skipped' | |
|| steps.changed-files-in-pr.outputs.determinism_any_changed == 'true' | |
|| steps.changed-files-in-pr.outputs.src_any_changed == 'true' | |
|| steps.changed-files-in-pr.outputs.dependencies_any_changed == 'true' | |
|| steps.changed-files-in-pr.outputs.conftest_any_changed == 'true' | |
|| steps.changed-files-in-pr.outputs.makefile_any_changed == 'true' | |
) | |
&& steps.install-deps.outcome == 'success' | |
&& steps.make-pcc.outcome == 'success' | |
&& !cancelled() | |
run: | | |
make determinism | |
# Build the documentation if : | |
# - the current workflow takes place in a release CI with the reference build | |
# - the current workflow takes place in a weekly CI or it has been triggered manually (through | |
# GitHub's Action interface) | |
# - any documentation files has been changed | |
# - the source code has been changed | |
# - Makefile has been changed | |
- name: Build docs | |
id: build-docs | |
if: | | |
( | |
(fromJSON(env.IS_RELEASE) && fromJSON(env.IS_REF_BUILD)) | |
|| steps.changed-files-in-pr.outcome == 'skipped' | |
|| steps.changed-files-in-pr.outputs.docs_any_changed == 'true' | |
|| steps.changed-files-in-pr.outputs.use_cases_any_changed == 'true' | |
|| steps.changed-files-in-pr.outputs.src_any_changed == 'true' | |
|| steps.changed-files-in-pr.outputs.makefile_any_changed == 'true' | |
) | |
&& steps.install-deps.outcome == 'success' | |
&& steps.make-pcc.outcome == 'success' | |
&& steps.determinism.outcome != 'failure' | |
&& !cancelled() | |
run: | | |
make docs_no_links | |
# Do not check links during the release process in order to avoid temporary connection errors | |
- name: Check links | |
id: check_links | |
if: | | |
!fromJSON(env.IS_RELEASE) | |
&& steps.build-docs.outcome == 'success' | |
&& !cancelled() | |
run: | | |
make check_links | |
make check_symlinks | |
# Make sure all necessary steps passed. For build-docs and determinism steps, we only check for | |
# non-failures as the 'changed-files-in-pr' step might skip them | |
- name: Stop if previous steps failed | |
id: conformance | |
if: ${{ always() && !cancelled() }} | |
env: | |
CONFORMANCE_STATUS: >- | |
${{ | |
steps.commit-conformance.outcome == 'success' | |
&& steps.make-pcc.outcome == 'success' | |
&& steps.determinism.outcome != 'failure' | |
&& steps.build-docs.outcome != 'failure' | |
&& steps.check_links.outcome != 'failure' | |
}} | |
run: | | |
if [[ "${CONFORMANCE_STATUS}" != "true" ]]; then | |
echo "Conformance failed, got:" | |
echo "Commit conformance success step: ${{ steps.commit-conformance.outcome }}" | |
echo "Make conformance step: ${{ steps.make-pcc.outcome }}" | |
echo "Determinism step: ${{ steps.determinism.outcome }}" | |
echo "Build docs step: ${{ steps.build-docs.outcome }}" | |
echo "Check links step: ${{ steps.check_links.outcome }}" | |
exit 1 | |
fi | |
# Tar the docs for releases with the reference build only | |
# Taring the docs allows for much faster upload speed (from ~3min worst case to ~2s best case) | |
- name: Tar docs artifacts | |
id: tar-docs | |
if: | | |
fromJSON(env.IS_RELEASE) | |
&& fromJSON(env.IS_REF_BUILD) | |
&& steps.conformance.outcome == 'success' | |
&& steps.build-docs.outcome == 'success' | |
&& !cancelled() | |
run: | | |
cd docs/_build/html | |
tar -cvf docs.tar ./* | |
- name: Upload docs artifacts | |
if: ${{ steps.tar-docs.outcome == 'success' && !cancelled() }} | |
uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 | |
with: | |
name: html-docs | |
path: docs/_build/html/docs.tar | |
# Generate the changelog for releases with the reference build only | |
# The changelog is generated by considering all commits from the latest stable previous | |
# version (not a release candidate) up to the new upcoming version | |
- name: Generate release changelog | |
id: changelog | |
if: | | |
fromJSON(env.IS_RELEASE) | |
&& fromJSON(env.IS_REF_BUILD) | |
&& steps.conformance.outcome == 'success' | |
&& !cancelled() | |
run: | | |
PROJECT_VERSION="$(poetry version --short)" | |
GIT_TAG="v${PROJECT_VERSION}" | |
CHANGELOG_FILE="CHANGELOG_${GIT_TAG}.md" | |
echo "changelog-file=${CHANGELOG_FILE}" >> $GITHUB_OUTPUT | |
poetry run python ./script/make_utils/changelog_helper.py \ | |
--to-ref "${{ github.sha }}" > "${CHANGELOG_FILE}" | |
- name: Upload changelog artifacts | |
if: ${{ steps.changelog.outcome == 'success' && !cancelled() }} | |
uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 | |
with: | |
name: changelog | |
path: ${{ steps.changelog.outputs.changelog-file }} | |
# Build the wheel for releases with the reference build only | |
# Create packages before tests, to be able to get them if some unexpected test failure happens | |
# Build the package only once, as we don't have binary dependency this can be used on Linux | |
# and macOS as long as the dependencies are available | |
- name: Build wheel | |
id: build-wheel | |
if: | | |
fromJSON(env.IS_RELEASE) | |
&& fromJSON(env.IS_REF_BUILD) | |
&& steps.conformance.outcome == 'success' | |
&& !cancelled() | |
run: | | |
rm -rf dist | |
poetry build -f wheel | |
- name: Upload wheel artifacts | |
if: ${{ steps.build-wheel.outcome == 'success' && !cancelled() }} | |
uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 | |
with: | |
name: py3-wheel | |
path: dist/*.whl | |
# Run Pytest on a subset of our tests if : | |
# - the current workflow does no take place in a weekly or release CI | |
# - if the CI has been triggered manually (through GitHub's Action interface) | |
# - the source code has been changed | |
# - any tests utils (pytest, data) has been changed | |
# - any dependency has been updated | |
# - conftest.py has been changed | |
# - Makefile has been changed | |
# If only some test files were changed, this step is skipped and each associated tests will be | |
# run individually in a following step (pytest_modified_tests_only) | |
# If regular tests failed, a following script checks for flaky tests. If all failed tests | |
# are known flaky tests, they are rerun. Otherwise, the step exits with status 1. | |
# The 'bash +e {0}' is added here in order to make sure that the step does not exit directly | |
# if 'make pytest' fails | |
- name: PyTest Source Code (regular) | |
id: pytest_regular | |
if: | | |
( | |
( | |
steps.changed-files-in-pr.outcome == 'success' | |
&& ( | |
steps.changed-files-in-pr.outputs.src_any_changed == 'true' | |
|| steps.changed-files-in-pr.outputs.tests_utils_any_changed == 'true' | |
|| steps.changed-files-in-pr.outputs.dependencies_any_changed == 'true' | |
|| steps.changed-files-in-pr.outputs.conftest_any_changed == 'true' | |
|| steps.changed-files-in-pr.outputs.makefile_any_changed == 'true' | |
) | |
) | |
|| fromJSON(env.IS_WORKFLOW_DISPATCH) | |
) | |
&& steps.conformance.outcome == 'success' | |
&& !cancelled() | |
shell: bash +e {0} | |
run: | | |
make pytest_and_report | |
# If regular tests failed, check for flaky tests | |
if [ $? -ne 0 ]; then | |
# Convert pytest report to formatted report with only information about failed tests | |
poetry run python ./script/actions_utils/pytest_failed_test_report.py \ | |
--pytest-input-report "pytest_report.json" \ | |
--failed-tests-report "failed_tests_report.json" \ | |
--failed-tests-comment "failed_tests_comment.txt" | |
# Check if all failed tests are known flaky tests | |
FAILED_TESTS_ARE_FLAKY=$(jq .all_failed_tests_are_flaky "failed_tests_report.json") | |
echo "FAILED_TESTS_ARE_FLAKY=${FAILED_TESTS_ARE_FLAKY}" >> "$GITHUB_ENV" | |
# If all failed tests are known flaky tests, try to rerun them | |
if [[ "${FAILED_TESTS_ARE_FLAKY}" == "true" ]]; then | |
make pytest_run_last_failed | |
# Else, return exit status 1 in order to make this step fail | |
else | |
exit 1 | |
fi | |
fi | |
# If regular tests passed but at least one known flaky test have been rerun, a warning | |
# comment is published in the PR and all flaky tests that initially failed are listed | |
- name: Warn PR with flaky tests | |
uses: marocchino/sticky-pull-request-comment@331f8f5b4215f0445d3c07b4967662a32a2d3e31 | |
if: steps.pytest_regular.outcome == 'success' && fromJSON(env.FAILED_TESTS_ARE_FLAKY) && !cancelled() | |
with: | |
header: flaky-test | |
recreate: true | |
path: failed_tests_comment.txt | |
# If regular pytest step has been skipped but some changes has been detected in test files, | |
# meaning there was no other changed impacting our testing suite, we only need to run these | |
# modified tests | |
# Note that if pytest utils or test data are changed, the regular pytest step should have been | |
# triggered instead | |
- name: PyTest on modified tests only | |
id: pytest_modified_tests_only | |
if: | | |
steps.changed-files-in-pr.outcome == 'success' | |
&& steps.pytest_regular.outcome == 'skipped' | |
&& steps.changed-files-in-pr.outputs.tests_any_changed == 'true' | |
&& steps.conformance.outcome == 'success' | |
&& !cancelled() | |
run: | | |
for file in ${{ steps.changed-files-in-pr.outputs.tests_all_changed_files }}; do | |
make pytest_one TEST="$file" | |
done | |
# Run Pytest on all of our tests on a weekly basis | |
- name: PyTest Source Code (weekly) | |
id: pytest_weekly | |
if: ${{ fromJSON(env.IS_WEEKLY) && steps.conformance.outcome == 'success' && !cancelled() }} | |
run: | | |
make pytest PYTEST_OPTIONS=--weekly | |
# Run Pytest on all of our tests on a weekly basis using PyPI's local wheel | |
- name: PyTest with PyPI local wheel of Concrete ML (weekly) | |
if: | | |
fromJSON(env.IS_WEEKLY) | |
&& steps.conformance.outcome == 'success' | |
&& !cancelled() | |
run: | | |
make pytest_pypi_wheel_cml | |
# Run Pytest on all of our tests (except flaky ones) using PyPI's local wheel during the | |
# release process | |
- name: PyTest (no flaky) with PyPI local wheel of Concrete ML (release) | |
if: | | |
fromJSON(env.IS_RELEASE) | |
&& steps.conformance.outcome == 'success' | |
&& !cancelled() | |
run: | | |
make pytest_pypi_wheel_cml_no_flaky | |
# Run Pytest on all of our tests (except flaky ones) using Concrete ML's latest version | |
# available on PyPI after publishing a release | |
- name: PyTest (no flaky) with PyPI (published release) | |
if: | | |
fromJSON(env.IS_PUBLISHED_RELEASE) | |
&& steps.conformance.outcome == 'success' | |
&& !cancelled() | |
run: | | |
PROJECT_VERSION="$(poetry version --short)" | |
make pytest_pypi_cml_no_flaky VERSION="$PROJECT_VERSION" | |
# Compute coverage only on reference build | |
- name: Test coverage (regular, weekly) | |
id: coverage | |
if: | | |
fromJSON(env.IS_REF_BUILD) | |
&& ( | |
steps.pytest_regular.outcome != 'skipped' | |
|| steps.pytest_weekly.outcome != 'skipped' | |
) | |
&& !cancelled() | |
run: | | |
./script/actions_utils/coverage.sh global-coverage-infos.json | |
- name: Comment with coverage | |
uses: marocchino/sticky-pull-request-comment@331f8f5b4215f0445d3c07b4967662a32a2d3e31 | |
if: ${{ steps.coverage.outcome != 'skipped' && !cancelled() }} | |
continue-on-error: true | |
with: | |
header: coverage | |
recreate: true | |
path: diff-coverage.txt | |
# Run Pytest on codeblocks if: | |
# - the current workflow does no take place in a weekly or release CI | |
# - the source code has been changed | |
# - any markdown file has been changed | |
# - any dependency has been updated | |
# - Makefile has been changed | |
- name: PyTest CodeBlocks (regular) | |
if: | | |
( | |
( | |
steps.changed-files-in-pr.outcome == 'success' | |
&& ( | |
steps.changed-files-in-pr.outputs.src_any_changed == 'true' | |
|| steps.changed-files-in-pr.outputs.codeblocks_any_changed == 'true' | |
|| steps.changed-files-in-pr.outputs.dependencies_any_changed == 'true' | |
|| steps.changed-files-in-pr.outputs.makefile_any_changed == 'true' | |
) | |
) | |
|| fromJSON(env.IS_WORKFLOW_DISPATCH) | |
) | |
&& steps.conformance.outcome == 'success' | |
&& !cancelled() | |
run: | | |
make pytest_codeblocks | |
# Run Pytest on all codeblocks on a weekly basis or while releasing | |
- name: PyTest CodeBlocks with PyPI local wheel of Concrete ML (weekly, release) | |
if: | | |
(fromJSON(env.IS_WEEKLY) || fromJSON(env.IS_RELEASE)) | |
&& steps.conformance.outcome == 'success' | |
&& !cancelled() | |
run: | | |
make pytest_codeblocks_pypi_wheel_cml | |
# Run Pytest on all codeblocks using Concrete ML's latest version available on PyPI after | |
# publishing a release | |
- name: PyTest CodeBlocks with PyPI (published release) | |
if: | | |
fromJSON(env.IS_PUBLISHED_RELEASE) | |
&& steps.conformance.outcome == 'success' | |
&& !cancelled() | |
run: | | |
PROJECT_VERSION="$(poetry version --short)" | |
make pytest_codeblocks_pypi_cml VERSION="$PROJECT_VERSION" | |
# Run Pytest on all notebooks on a weekly basis | |
# Note: some notebooks need specific data stored in LFS | |
- name: PyTest Notebooks (weekly) | |
if: | | |
fromJSON(env.IS_WEEKLY) | |
&& steps.conformance.outcome == 'success' | |
&& !cancelled() | |
run: | | |
git lfs pull --include "docs/advanced_examples/data/**" --exclude "" | |
make pytest_nb | |
- name: Fast sanity check | |
if: ${{ steps.conformance.outcome == 'success' && !cancelled() }} | |
run: | | |
make fast_sanity_check | |
# Check installation with sync_env | |
- name: Check installation with sync_env and python ${{ matrix.python_version }} (weekly, release) | |
if: | | |
(fromJSON(env.IS_WEEKLY) || fromJSON(env.IS_RELEASE)) | |
&& steps.conformance.outcome == 'success' | |
&& !cancelled() | |
run: | | |
./script/make_utils/check_installation_with_all_python.sh --version ${{ matrix.python_version }} --sync_env | |
# Check installation with pip | |
- name: Check installation with pip and python ${{ matrix.python_version }} (weekly, release) | |
if: | | |
(fromJSON(env.IS_WEEKLY) || fromJSON(env.IS_RELEASE)) | |
&& steps.conformance.outcome == 'success' | |
&& !cancelled() | |
run: | | |
./script/make_utils/check_installation_with_all_python.sh --version ${{ matrix.python_version }} --pip | |
# Check installation with wheel | |
- name: Check installation with wheel and python ${{ matrix.python_version }} (weekly, release) | |
if: | | |
(fromJSON(env.IS_WEEKLY) || fromJSON(env.IS_RELEASE)) | |
&& steps.conformance.outcome == 'success' | |
&& !cancelled() | |
run: | | |
./script/make_utils/check_installation_with_all_python.sh --version ${{ matrix.python_version }} --wheel | |
# Check installation with git clone | |
- name: Check installation with clone and python ${{ matrix.python_version }} (weekly, release) | |
if: | | |
(fromJSON(env.IS_WEEKLY) || fromJSON(env.IS_RELEASE)) | |
&& steps.conformance.outcome == 'success' | |
&& !cancelled() | |
run: | | |
./script/make_utils/check_installation_with_all_python.sh --version ${{ matrix.python_version }} --clone | |
# This is to manage build matrices and have a single status point for PRs | |
# This can be updated to take macOS into account but it is impractical because of long builds | |
# and therefore expensive macOS testing | |
linux-build-status: | |
name: Linux build status | |
needs: [build-linux] | |
runs-on: ubuntu-20.04 | |
timeout-minutes: 2 | |
if: ${{ always() }} | |
steps: | |
- name: Fail on unsuccessful Linux build | |
shell: bash | |
run: | | |
# success always if wasn't launched due to CI not supposed to be launched | |
if ${{ github.repository == 'zama-ai/concrete-ml-internal' && github.event_name == 'push' && github.ref == 'refs/heads/main' }} | |
then | |
exit 0 | |
fi | |
if [[ ${{ needs.build-linux.result }} != "success" ]]; then | |
exit 1 | |
fi | |
- name: Slack Notification | |
if: ${{ always() && !success() }} | |
continue-on-error: true | |
uses: rtCamp/action-slack-notify@4e5fb42d249be6a45a298f3c9543b111b02f7907 | |
env: | |
SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL }} | |
SLACK_ICON: https://pbs.twimg.com/profile_images/1274014582265298945/OjBKP9kn_400x400.png | |
SLACK_COLOR: ${{ needs.build-linux.result }} | |
SLACK_MESSAGE: "Build finished with status ${{ needs.build-linux.result }}. (${{ env.ACTION_RUN_URL }})" | |
SLACK_USERNAME: ${{ secrets.BOT_USERNAME }} | |
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }} | |
stop-runner-linux: | |
name: Stop EC2 runner | |
needs: [build-linux, start-runner-linux] | |
runs-on: ubuntu-20.04 | |
timeout-minutes: 2 | |
if: ${{ always() && (needs.start-runner-linux.result != 'skipped') }} | |
steps: | |
- name: Configure AWS credentials | |
uses: aws-actions/configure-aws-credentials@e3dd6a429d7300a6a4c196c26e071d42e0343502 | |
with: | |
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} | |
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | |
aws-region: ${{ secrets.AWS_REGION }} | |
- name: Stop EC2 runner python 38 | |
uses: machulav/ec2-github-runner@fcfb31a5760dad1314a64a0e172b78ec6fc8a17e | |
if: ${{ always() && needs.start-runner-linux.outputs.ec2-instance-id-38 }} | |
with: | |
github-token: ${{ secrets.EC2_RUNNER_BOT_TOKEN }} | |
label: ${{ needs.start-runner-linux.outputs.label-38 }} | |
ec2-instance-id: ${{ needs.start-runner-linux.outputs.ec2-instance-id-38 }} | |
mode: stop | |
- name: Stop EC2 runner python 39 | |
uses: machulav/ec2-github-runner@fcfb31a5760dad1314a64a0e172b78ec6fc8a17e | |
if: ${{ always() && needs.start-runner-linux.outputs.ec2-instance-id-39 }} | |
with: | |
github-token: ${{ secrets.EC2_RUNNER_BOT_TOKEN }} | |
label: ${{ needs.start-runner-linux.outputs.label-39 }} | |
ec2-instance-id: ${{ needs.start-runner-linux.outputs.ec2-instance-id-39 }} | |
mode: stop | |
- name: Stop EC2 runner python 310 | |
uses: machulav/ec2-github-runner@fcfb31a5760dad1314a64a0e172b78ec6fc8a17e | |
if: ${{ always() && needs.start-runner-linux.outputs.ec2-instance-id-310 }} | |
with: | |
github-token: ${{ secrets.EC2_RUNNER_BOT_TOKEN }} | |
label: ${{ needs.start-runner-linux.outputs.label-310 }} | |
ec2-instance-id: ${{ needs.start-runner-linux.outputs.ec2-instance-id-310 }} | |
mode: stop | |
build-macos: | |
needs: [matrix-preparation] | |
if: ${{ needs.matrix-preparation.outputs.macos-matrix != '[]' }} | |
runs-on: ${{ matrix.runs_on }} | |
defaults: | |
run: | |
shell: bash | |
strategy: | |
fail-fast: false | |
matrix: ${{ fromJSON(format('{{"include":{0}}}', needs.matrix-preparation.outputs.macos-matrix)) }} | |
env: | |
PIP_INDEX_URL: ${{ secrets.PIP_INDEX_URL }} | |
PIP_EXTRA_INDEX_URL: ${{ secrets.PIP_EXTRA_INDEX_URL }} | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | |
steps: | |
- name: Add masks | |
run: | | |
echo "::add-mask::${{ secrets.INTERNAL_PYPI_URL_FOR_MASK }}" | |
echo "::add-mask::${{ secrets.INTERNAL_REPO_URL_FOR_MASK }}" | |
# By default, `git clone` downloads all LFS files, which we want to avoid in CIs | |
- name: Disable LFS download by default | |
run: | | |
git lfs install --skip-smudge | |
# Checkout the code | |
- name: Checkout Code | |
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 | |
# Pull necessary LFS files (and thus avoid downloading files stored for benchmarks, use cases, ...) | |
- name: Pull LFS files | |
run: | | |
git lfs pull --include "tests/data/**, src/concrete/ml/pandas/_client_server_files/**" --exclude "" | |
- name: Set up Python ${{ matrix.python_version }} | |
uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d | |
with: | |
python-version: ${{ matrix.python_version }} | |
- name: Check python3 version | |
env: | |
SYSTEM_VERSION_COMPAT: 0 | |
run: | | |
which python3 | |
which pip3 | |
sw_vers | |
- name: Install dependencies | |
id: install-deps | |
env: | |
SYSTEM_VERSION_COMPAT: 0 | |
run: | | |
./script/make_utils/setup_os_deps.sh | |
mkdir -p ~/.aws | |
echo "[default]\nregion=eu-west-3\noutput=json\n" >> ~/.aws/config | |
which python3 | |
which pip3 | |
PATH="/usr/local/opt/make/libexec/gnubin:$PATH" | |
echo "PATH=${PATH}" >> "$GITHUB_ENV" | |
echo | |
echo "Using these tools:" | |
which python3 | |
which pip3 | |
echo | |
make setup_env | |
# macOS builds are already long, so we decide not to use --weekly on them, it could be | |
# changed. Remark also that, for mac, due to unexpected issues with GitHub, we have a | |
# slightly different way to launch pytest | |
- name: PyTest Source Code | |
run: | | |
make pytest_macOS_for_GitHub | |
send-report: | |
if: ${{ always() }} | |
timeout-minutes: 2 | |
needs: | |
[ | |
matrix-preparation, | |
start-runner-linux, | |
build-linux, | |
stop-runner-linux, | |
build-macos, | |
] | |
name: Send Slack notification | |
runs-on: ubuntu-20.04 | |
steps: | |
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 | |
- name: Prepare whole job status | |
if: ${{ always() }} | |
continue-on-error: true | |
env: | |
NEEDS_JSON: ${{ toJSON(needs) }} | |
run: | | |
echo "${NEEDS_JSON}" > /tmp/needs_context.json | |
JOB_STATUS=$(python3 ./script/actions_utils/actions_combine_status.py \ | |
--needs_context_json /tmp/needs_context.json) | |
echo "JOB_STATUS=${JOB_STATUS}" >> "$GITHUB_ENV" | |
- name: Slack Notification | |
if: ${{ always() }} | |
continue-on-error: true | |
uses: rtCamp/action-slack-notify@4e5fb42d249be6a45a298f3c9543b111b02f7907 | |
env: | |
SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL }} | |
SLACK_ICON: https://pbs.twimg.com/profile_images/1274014582265298945/OjBKP9kn_400x400.png | |
SLACK_COLOR: ${{ env.JOB_STATUS || 'failure' }} | |
SLACK_MESSAGE: "Full run finished with status ${{ env.JOB_STATUS || 'failure' }} \ | |
(${{ env.ACTION_RUN_URL }})\n\ | |
- matrix-preparation: ${{ needs.matrix-preparation.result || 'Did not run.'}}\n\n\ | |
- start-runner-linux: ${{ needs.start-runner-linux.result || 'Did not run.'}}\n\n\ | |
- build-linux: ${{ needs.build-linux.result || 'Did not run.' }}\n\n\ | |
- stop-runner-linux: ${{ needs.stop-runner-linux.result || 'Did not run.'}}\n\n\ | |
- build-macos: ${{ needs.build-macos.result || 'Did not run.' }}" | |
SLACK_USERNAME: ${{ secrets.BOT_USERNAME }} | |
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }} |