chore(profiling): update relnote for endpoint stack v2 [backport 2.12] #50665
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: System Tests | |
on: | |
push: | |
branches: | |
- main | |
pull_request: | |
workflow_dispatch: {} | |
schedule: | |
- cron: '00 04 * * 2-6' | |
jobs: | |
needs-run: | |
runs-on: ubuntu-latest | |
outputs: | |
outcome: ${{ steps.run_needed.outcome }} | |
steps: | |
- uses: actions/checkout@v4 | |
- id: run_needed | |
name: Check if run is needed | |
run: | | |
git fetch origin ${{ github.event.pull_request.base.sha || github.sha }} | |
export PATHS=$(git diff --name-only HEAD ${{ github.event.pull_request.base.sha || github.sha }}) | |
python -c "import os,sys,fnmatch;sys.exit(not bool([_ for pattern in {'ddtrace/*', 'setup*', 'pyproject.toml', '.github/workflows/system-tests.yml'} for _ in fnmatch.filter(os.environ['PATHS'].splitlines(), pattern)]))" | |
continue-on-error: true | |
system-tests-build: | |
runs-on: ubuntu-latest | |
needs: needs-run | |
strategy: | |
matrix: | |
include: | |
- weblog-variant: flask-poc | |
- weblog-variant: uwsgi-poc | |
- weblog-variant: django-poc | |
- weblog-variant: fastapi | |
# runs django-poc for 3.12 | |
- weblog-variant: python3.12 | |
fail-fast: false | |
env: | |
TEST_LIBRARY: python | |
WEBLOG_VARIANT: ${{ matrix.weblog-variant }} | |
# system-tests requires an API_KEY, but it does not have to be a valid key, as long as we don't run a scenario | |
# that make assertion on backend data. Using a fake key allow to run system tests on PR originating from forks. | |
# If ever it's needed, a valid key exists in the repo, using ${{ secrets.DD_API_KEY }} | |
DD_API_KEY: 1234567890abcdef1234567890abcdef | |
CMAKE_BUILD_PARALLEL_LEVEL: 12 | |
steps: | |
- name: Setup python 3.12 | |
if: needs.needs-run.outputs.outcome == 'success' || github.event_name == 'schedule' | |
uses: actions/setup-python@v5 | |
with: | |
python-version: '3.12' | |
- name: Checkout system tests | |
if: needs.needs-run.outputs.outcome == 'success' || github.event_name == 'schedule' | |
uses: actions/checkout@v4 | |
with: | |
repository: 'DataDog/system-tests' | |
- name: Checkout dd-trace-py | |
if: needs.needs-run.outputs.outcome == 'success' || github.event_name == 'schedule' | |
uses: actions/checkout@v4 | |
with: | |
path: 'binaries/dd-trace-py' | |
fetch-depth: 0 | |
# NB this ref is necessary to keep the checkout out of detached HEAD state, which setuptools_scm requires for | |
# proper version guessing | |
ref: ${{ github.event.pull_request.head.sha || github.sha }} | |
- name: Build | |
id: build | |
if: needs.needs-run.outputs.outcome == 'success' || github.event_name == 'schedule' | |
run: ./build.sh | |
- name: Save | |
id: save | |
if: needs.needs-run.outputs.outcome == 'success' || github.event_name == 'schedule' | |
run: | | |
docker image save system_tests/weblog:latest | gzip > ${{ matrix.weblog-variant}}_weblog_${{ github.sha }}.tar.gz | |
docker image save system_tests/agent:latest | gzip > ${{ matrix.weblog-variant}}_agent_${{ github.sha }}.tar.gz | |
- uses: actions/upload-artifact@v4 | |
if: needs.needs-run.outputs.outcome == 'success' || github.event_name == 'schedule' | |
with: | |
name: ${{ matrix.weblog-variant }}_${{ github.sha }} | |
path: | | |
${{ matrix.weblog-variant}}_weblog_${{ github.sha }}.tar.gz | |
${{ matrix.weblog-variant}}_agent_${{ github.sha }}.tar.gz | |
venv | |
retention-days: 2 | |
system-tests: | |
runs-on: ubuntu-latest | |
needs: [needs-run, system-tests-build] | |
strategy: | |
matrix: | |
weblog-variant: [flask-poc, uwsgi-poc , django-poc, fastapi, python3.12] | |
scenario: [remote-config, appsec, appsec-1, other] | |
fail-fast: false | |
env: | |
TEST_LIBRARY: python | |
WEBLOG_VARIANT: ${{ matrix.weblog-variant }} | |
# system-tests requires an API_KEY, but it does not have to be a valid key, as long as we don't run a scenario | |
# that make assertion on backend data. Using a fake key allow to run system tests on PR originating from forks. | |
# If ever it's needed, a valid key exists in the repo, using ${{ secrets.DD_API_KEY }} | |
DD_API_KEY: 1234567890abcdef1234567890abcdef | |
CMAKE_BUILD_PARALLEL_LEVEL: 12 | |
steps: | |
- name: Setup python 3.12 | |
if: needs.needs-run.outputs.outcome == 'success' || github.event_name == 'schedule' | |
uses: actions/setup-python@v5 | |
with: | |
python-version: '3.12' | |
- name: Checkout system tests | |
if: needs.needs-run.outputs.outcome == 'success' || github.event_name == 'schedule' | |
uses: actions/checkout@v4 | |
with: | |
repository: 'DataDog/system-tests' | |
- uses: actions/download-artifact@v4 | |
if: needs.needs-run.outputs.outcome == 'success' || github.event_name == 'schedule' | |
with: | |
name: ${{ matrix.weblog-variant }}_${{ github.sha }} | |
path: ${{ matrix.weblog-variant}}_${{ github.sha }}.tar.gz | |
- name: docker load | |
if: needs.needs-run.outputs.outcome == 'success' || github.event_name == 'schedule' | |
id: docker_load | |
run: | | |
docker load < ${{ matrix.weblog-variant}}_${{ github.sha }}.tar.gz/${{ matrix.weblog-variant}}_weblog_${{ github.sha }}.tar.gz | |
docker load < ${{ matrix.weblog-variant}}_${{ github.sha }}.tar.gz/${{ matrix.weblog-variant}}_agent_${{ github.sha }}.tar.gz | |
- name: move venv | |
if: needs.needs-run.outputs.outcome == 'success' || github.event_name == 'schedule' | |
run: | | |
mv ${{ matrix.weblog-variant}}_${{ github.sha }}.tar.gz/venv venv | |
chmod -R +x venv/bin/* | |
- name: Run DEFAULT | |
if: always() && steps.docker_load.outcome == 'success' && (needs.needs-run.outputs.outcome == 'success' || github.event_name == 'schedule') && matrix.scenario == 'other' | |
run: ./run.sh DEFAULT | |
- name: Run SAMPLING | |
if: always() && steps.docker_load.outcome == 'success' && (needs.needs-run.outputs.outcome == 'success' || github.event_name == 'schedule') && matrix.scenario == 'other' | |
run: ./run.sh SAMPLING | |
- name: Run INTEGRATIONS | |
if: always() && steps.docker_load.outcome == 'success' && (needs.needs-run.outputs.outcome == 'success' || github.event_name == 'schedule') && matrix.scenario == 'other' | |
run: ./run.sh INTEGRATIONS | |
- name: Run CROSSED_TRACING_LIBRARIES | |
if: always() && steps.docker_load.outcome == 'success' && (needs.needs-run.outputs.outcome == 'success' || github.event_name == 'schedule') && matrix.scenario == 'other' | |
run: ./run.sh CROSSED_TRACING_LIBRARIES | |
- name: Run REMOTE_CONFIG_MOCKED_BACKEND_ASM_FEATURES | |
if: always() && steps.docker_load.outcome == 'success' && (needs.needs-run.outputs.outcome == 'success' || github.event_name == 'schedule') && matrix.scenario == 'remote-config' | |
run: ./run.sh REMOTE_CONFIG_MOCKED_BACKEND_ASM_FEATURES | |
- name: Run REMOTE_CONFIG_MOCKED_BACKEND_LIVE_DEBUGGING | |
if: always() && steps.docker_load.outcome == 'success' && (needs.needs-run.outputs.outcome == 'success' || github.event_name == 'schedule') && matrix.scenario == 'remote-config' | |
run: ./run.sh REMOTE_CONFIG_MOCKED_BACKEND_LIVE_DEBUGGING | |
- name: Run REMOTE_CONFIG_MOCKED_BACKEND_ASM_DD | |
if: always() && steps.docker_load.outcome == 'success' && (needs.needs-run.outputs.outcome == 'success' || github.event_name == 'schedule') && matrix.scenario == 'remote-config' | |
run: ./run.sh REMOTE_CONFIG_MOCKED_BACKEND_ASM_DD | |
- name: Run APPSEC_MISSING_RULES | |
if: always() && steps.docker_load.outcome == 'success' && (needs.needs-run.outputs.outcome == 'success' || github.event_name == 'schedule') && matrix.scenario == 'appsec' | |
run: ./run.sh APPSEC_MISSING_RULES | |
- name: Run APPSEC_CUSTOM_RULES | |
if: always() && steps.docker_load.outcome == 'success' && (needs.needs-run.outputs.outcome == 'success' || github.event_name == 'schedule') && matrix.scenario == 'appsec' | |
run: ./run.sh APPSEC_CUSTOM_RULES | |
- name: Run APPSEC_CORRUPTED_RULES | |
if: always() && steps.docker_load.outcome == 'success' && (needs.needs-run.outputs.outcome == 'success' || github.event_name == 'schedule') && matrix.scenario == 'appsec' | |
run: ./run.sh APPSEC_CORRUPTED_RULES | |
- name: Run APPSEC_RULES_MONITORING_WITH_ERRORS | |
if: always() && steps.docker_load.outcome == 'success' && (needs.needs-run.outputs.outcome == 'success' || github.event_name == 'schedule') && matrix.scenario == 'appsec' | |
run: ./run.sh APPSEC_RULES_MONITORING_WITH_ERRORS | |
- name: Run APPSEC_LOW_WAF_TIMEOUT | |
if: always() && steps.docker_load.outcome == 'success' && (needs.needs-run.outputs.outcome == 'success' || github.event_name == 'schedule') && matrix.scenario == 'appsec' | |
run: ./run.sh APPSEC_LOW_WAF_TIMEOUT | |
- name: Run APPSEC_CUSTOM_OBFUSCATION | |
if: always() && steps.docker_load.outcome == 'success' && (needs.needs-run.outputs.outcome == 'success' || github.event_name == 'schedule') && matrix.scenario == 'appsec' | |
run: ./run.sh APPSEC_CUSTOM_OBFUSCATION | |
- name: Run APPSEC_RATE_LIMITER | |
if: always() && steps.docker_load.outcome == 'success' && (needs.needs-run.outputs.outcome == 'success' || github.event_name == 'schedule') && matrix.scenario == 'appsec' | |
run: ./run.sh APPSEC_RATE_LIMITER | |
- name: Run APPSEC_RUNTIME_ACTIVATION | |
if: always() && steps.docker_load.outcome == 'success' && (needs.needs-run.outputs.outcome == 'success' || github.event_name == 'schedule') && matrix.scenario == 'appsec-1' | |
run: ./run.sh APPSEC_RUNTIME_ACTIVATION | |
- name: Run APPSEC_WAF_TELEMETRY | |
if: always() && steps.docker_load.outcome == 'success' && (needs.needs-run.outputs.outcome == 'success' || github.event_name == 'schedule') && matrix.scenario == 'appsec-1' | |
run: ./run.sh APPSEC_WAF_TELEMETRY | |
- name: Run APPSEC_DISABLED | |
if: always() && steps.docker_load.outcome == 'success' && (needs.needs-run.outputs.outcome == 'success' || github.event_name == 'schedule') && matrix.scenario == 'appsec-1' | |
run: ./run.sh APPSEC_DISABLED | |
- name: Run APPSEC_BLOCKING | |
if: always() && steps.docker_load.outcome == 'success' && (needs.needs-run.outputs.outcome == 'success' || github.event_name == 'schedule') && matrix.scenario == 'appsec-1' | |
run: ./run.sh APPSEC_BLOCKING | |
- name: Run APPSEC_BLOCKING_FULL_DENYLIST | |
if: always() && steps.docker_load.outcome == 'success' && (needs.needs-run.outputs.outcome == 'success' || github.event_name == 'schedule') && matrix.scenario == 'appsec-1' | |
run: ./run.sh APPSEC_BLOCKING_FULL_DENYLIST | |
- name: Run APPSEC_REQUEST_BLOCKING | |
if: always() && steps.docker_load.outcome == 'success' && (needs.needs-run.outputs.outcome == 'success' || github.event_name == 'schedule') && matrix.scenario == 'appsec-1' | |
run: ./run.sh APPSEC_REQUEST_BLOCKING | |
- name: Run APPSEC_RASP | |
if: always() && steps.docker_load.outcome == 'success' && (needs.needs-run.outputs.outcome == 'success' || github.event_name == 'schedule') && matrix.scenario == 'appsec-1' | |
run: ./run.sh APPSEC_RASP | |
# The compress step speed up a lot the upload artifact process | |
- name: Compress artifact | |
if: always() && steps.docker_load.outcome == 'success' && (needs.needs-run.outputs.outcome == 'success' || github.event_name == 'schedule') | |
id: compress-artifact | |
run: tar -czvf artifact.tar.gz $(ls | grep logs) | |
- name: Upload artifact | |
uses: actions/upload-artifact@v4 | |
if: always() && steps.docker_load.outcome == 'success' && (steps.compress-artifact.outcome == 'success' || github.event_name == 'schedule') | |
with: | |
name: logs_${{ matrix.weblog-variant }}_${{ matrix.scenario }} | |
path: artifact.tar.gz | |
parametric: | |
runs-on: ubuntu-latest | |
needs: needs-run | |
env: | |
TEST_LIBRARY: python | |
steps: | |
- name: Checkout system tests | |
if: needs.needs-run.outputs.outcome == 'success' || github.event_name == 'schedule' | |
uses: actions/checkout@v4 | |
with: | |
repository: 'DataDog/system-tests' | |
- name: Checkout dd-trace-py | |
if: needs.needs-run.outputs.outcome == 'success' || github.event_name == 'schedule' | |
uses: actions/checkout@v4 | |
with: | |
path: 'binaries/dd-trace-py' | |
fetch-depth: 0 | |
ref: ${{ github.event.pull_request.head.sha || github.sha }} | |
- uses: actions/setup-python@v5 | |
if: needs.needs-run.outputs.outcome == 'success' || github.event_name == 'schedule' | |
with: | |
python-version: '3.12' | |
- name: Build runner | |
id: build_runner | |
if: needs.needs-run.outputs.outcome == 'success' || github.event_name == 'schedule' | |
run: ./build.sh -i runner | |
- name: Run | |
if: always() && steps.build_runner.outcome == 'success' && (needs.needs-run.outputs.outcome == 'success' || github.event_name == 'schedule') | |
run: ./run.sh PARAMETRIC | |
- name: Compress artifact | |
if: always() && steps.build_runner.outcome == 'success' && (needs.needs-run.outputs.outcome == 'success' || github.event_name == 'schedule') | |
run: tar -czvf artifact.tar.gz $(ls | grep logs) | |
- name: Upload artifact | |
uses: actions/upload-artifact@v4 | |
if: always() && steps.build_runner.outcome == 'success' && (needs.needs-run.outputs.outcome == 'success' || github.event_name == 'schedule') | |
with: | |
name: logs_parametric | |
path: artifact.tar.gz |