diff --git a/.github/workflows/ApplicationTesting.yml b/.github/workflows/ApplicationTesting.yml index 3f91d6a5..5196901c 100644 --- a/.github/workflows/ApplicationTesting.yml +++ b/.github/workflows/ApplicationTesting.yml @@ -89,7 +89,7 @@ jobs: uses: actions/checkout@v4 - name: 📥 Download artifacts '${{ inputs.wheel }}' from 'Package' job - uses: actions/download-artifact@v4 + uses: pyTooling/download-artifact@v4 with: name: ${{ inputs.wheel }} path: install @@ -255,9 +255,10 @@ jobs: - name: 📤 Upload 'TestReportSummary.xml' artifact if: inputs.apptest_xml_artifact != '' - uses: actions/upload-artifact@v4 + uses: pyTooling/upload-artifact@v4 with: name: ${{ inputs.apptest_xml_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }} - path: report/unit/TestReportSummary.xml + working-directory: report/unit + path: TestReportSummary.xml if-no-files-found: error retention-days: 1 diff --git a/.github/workflows/BuildTheDocs.yml b/.github/workflows/BuildTheDocs.yml index 44152e13..330cd3c9 100644 --- a/.github/workflows/BuildTheDocs.yml +++ b/.github/workflows/BuildTheDocs.yml @@ -50,10 +50,11 @@ jobs: - name: 📤 Upload 'documentation' artifacts if: inputs.artifact != '' - uses: actions/upload-artifact@v4 + uses: pyTooling/upload-artifact@v4 with: name: ${{ inputs.artifact }} - path: doc/_build/html + working-directory: doc/_build/html + path: '*' retention-days: 1 - name: '📓 Publish site to GitHub Pages' diff --git a/.github/workflows/CompletePipeline.yml b/.github/workflows/CompletePipeline.yml index 90d5b07c..9ee08935 100644 --- a/.github/workflows/CompletePipeline.yml +++ b/.github/workflows/CompletePipeline.yml @@ -1,3 +1,24 @@ +# ==================================================================================================================== # +# Authors: # +# Patrick Lehmann # +# # +# ==================================================================================================================== # +# Copyright 2020-2024 The pyTooling Authors # +# # +# Licensed under the Apache License, Version 2.0 (the "License"); # +# you may not use this file except in compliance with the License. # +# You may obtain a copy of the License at # +# # +# http://www.apache.org/licenses/LICENSE-2.0 # +# # +# Unless required by applicable law or agreed to in writing, software # +# distributed under the License is distributed on an "AS IS" BASIS, # +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # +# See the License for the specific language governing permissions and # +# limitations under the License. # +# # +# SPDX-License-Identifier: Apache-2.0 # +# ==================================================================================================================== # name: Namespace Package on: @@ -138,6 +159,7 @@ jobs: DocCoverage: uses: pyTooling/Actions/.github/workflows/CheckDocumentation.yml@main needs: + - ConfigParams - UnitTestingParams with: python_version: ${{ needs.UnitTestingParams.outputs.python_version }} @@ -195,8 +217,8 @@ jobs: Documentation: uses: pyTooling/Actions/.github/workflows/SphinxDocumentation.yml@main needs: - - UnitTestingParams - ConfigParams + - UnitTestingParams - PublishTestResults - PublishCoverageResults # - VerifyDocs diff --git a/.github/workflows/CoverageCollection.yml b/.github/workflows/CoverageCollection.yml index f7dab102..3bdf2725 100644 --- a/.github/workflows/CoverageCollection.yml +++ b/.github/workflows/CoverageCollection.yml @@ -76,6 +76,9 @@ jobs: - name: ⏬ Checkout repository uses: actions/checkout@v4 + with: + lfs: true + submodules: true - name: 🐍 Setup Python ${{ inputs.python_version }} uses: actions/setup-python@v5 @@ -160,16 +163,17 @@ jobs: - name: 📤 Upload 'Coverage Report' artifact continue-on-error: true - uses: actions/upload-artifact@v4 + uses: pyTooling/upload-artifact@v4 with: name: ${{ inputs.artifact }} - path: ${{ steps.getVariables.outputs.coverage_report_html_directory }} + working-directory: ${{ steps.getVariables.outputs.coverage_report_html_directory }} + path: '*' if-no-files-found: error retention-days: 1 - name: 📊 Publish coverage at CodeCov continue-on-error: true - uses: codecov/codecov-action@v3 + uses: codecov/codecov-action@v5 with: files: ${{ steps.getVariables.outputs.coverage_report_xml }} flags: unittests diff --git a/.github/workflows/LaTeXDocumentation.yml b/.github/workflows/LaTeXDocumentation.yml index a12bc0a3..47d39337 100644 --- a/.github/workflows/LaTeXDocumentation.yml +++ b/.github/workflows/LaTeXDocumentation.yml @@ -50,7 +50,7 @@ jobs: runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}" steps: - name: 📥 Download artifacts '${{ inputs.latex_artifact }}' from 'SphinxDocumentation' job - uses: actions/download-artifact@v4 + uses: pyTooling/download-artifact@v4 with: name: ${{ inputs.latex_artifact }} path: latex @@ -62,7 +62,7 @@ jobs: root_file: ${{ inputs.document }}.tex - name: 📤 Upload 'PDF Documentation' artifact - uses: actions/upload-artifact@v4 + uses: pyTooling/upload-artifact@v4 if: inputs.pdf_artifact != '' with: name: ${{ inputs.pdf_artifact }} diff --git a/.github/workflows/NightlyRelease.yml b/.github/workflows/NightlyRelease.yml new file mode 100644 index 00000000..079b60b9 --- /dev/null +++ b/.github/workflows/NightlyRelease.yml @@ -0,0 +1,387 @@ +# ==================================================================================================================== # +# Authors: # +# Patrick Lehmann # +# # +# ==================================================================================================================== # +# Copyright 2020-2024 The pyTooling Authors # +# # +# Licensed under the Apache License, Version 2.0 (the "License"); # +# you may not use this file except in compliance with the License. # +# You may obtain a copy of the License at # +# # +# http://www.apache.org/licenses/LICENSE-2.0 # +# # +# Unless required by applicable law or agreed to in writing, software # +# distributed under the License is distributed on an "AS IS" BASIS, # +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # +# See the License for the specific language governing permissions and # +# limitations under the License. # +# # +# SPDX-License-Identifier: Apache-2.0 # +# ==================================================================================================================== # +name: Nightly + +on: + workflow_call: + inputs: + ubuntu_image: + description: 'Name of the Ubuntu image.' + required: false + default: 'ubuntu-24.04' + type: string + nightly_name: + description: 'Name of the nightly release.' + required: false + default: 'nightly' + type: string + nightly_title: + description: 'Title of the nightly release.' + required: false + default: '' + type: string + nightly_description: + description: 'Description of the nightly release.' + required: false + default: 'Release of artifacts from latest CI pipeline.' + type: string + draft: + description: 'Specify if this is a draft.' + required: false + default: false + type: boolean + prerelease: + description: 'Specify if this is a pre-release.' + required: false + default: false + type: boolean + latest: + description: 'Specify if this is the latest release.' + required: false + default: false + type: boolean + replacements: + description: 'Multi-line string containing search=replace patterns.' + required: false + default: '' + type: string + assets: + description: 'Multi-line string containing artifact:file:title asset descriptions.' + required: true + type: string + +jobs: + Release: + name: 📝 Update 'Nightly Page' on GitHub + runs-on: ${{ inputs.ubuntu_image }} + permissions: + contents: write + actions: write +# attestations: write + + steps: + - name: ⏬ Checkout repository + uses: actions/checkout@v4 + with: + # The command 'git describe' (used for version) needs the history. + fetch-depth: 0 + + - name: 🔧 Install zstd + run: sudo apt-get install -y --no-install-recommends zstd + + - name: 📑 Delete (old) Release Page + id: deleteReleasePage + run: | + set +e + + ANSI_LIGHT_RED="\e[91m" + ANSI_LIGHT_GREEN="\e[92m" + ANSI_LIGHT_YELLOW="\e[93m" + ANSI_NOCOLOR="\e[0m" + + export GH_TOKEN=${{ github.token }} + + echo -n "Deleting release '${{ inputs.nightly_name }}' ... " + message="$(gh release delete ${{ inputs.nightly_name }} --yes 2>&1)" + if [[ $? -eq 0 ]]; then + echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}" + elif [[ "${message}" == "release not found" ]]; then + echo -e "${ANSI_LIGHT_YELLOW}[NOT FOUND]${ANSI_NOCOLOR}" + else + echo -e "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}" + echo -e "${ANSI_LIGHT_RED}Couldn't delete release '${{ inputs.nightly_name }}' -> Error: '${message}'.${ANSI_NOCOLOR}" + echo "::error title=InternalError::Couldn't delete release '${{ inputs.nightly_name }}' -> Error: '${message}'." + exit 1 + fi + + - name: 📑 (Re)create (new) Release Page + id: createReleasePage + run: | + set +e + + ANSI_LIGHT_RED="\e[91m" + ANSI_LIGHT_GREEN="\e[92m" + ANSI_NOCOLOR="\e[0m" + + export GH_TOKEN=${{ github.token }} + + addDraft="--draft" + + if ${{ inputs.prerelease }}; then + addPreRelease="--prerelease" + fi + + if ! ${{ inputs.latest }}; then + addLatest="--latest=false" + fi + + if [[ "${{ inputs.nightly_title }}" != "" ]]; then + addTitle=("--title" "${{ inputs.nightly_title }}") + fi + + cat <<'EOF' > __NoTeS__.md + ${{ inputs.nightly_description }} + EOF + if [[ -s __NoTeS__.md ]]; then + addNotes=("--notes-file" "__NoTeS__.md") + fi + + # Apply replacements + while IFS=$'\r\n' read -r patternLine; do + # skip empty lines + [[ "$patternLine" == "" ]] && continue + + pattern="${patternLine%%=*}" + replacement="${patternLine#*=}" + sed -i -e "s/%$pattern%/$replacement/g" "__NoTeS__.md" + done <<<'${{ inputs.replacements }}' + + # Add footer line + cat <> __NoTeS__.md + + -------- + Published from [${{ github.workflow }}](https://github.com/Paebbels/ghdl/actions/runs/${{ github.run_id }}) workflow triggered by @${{ github.actor }} on $(date '+%Y-%m-%d %H:%M:%S'). + EOF + + echo "Creating release '${{ inputs.nightly_name }}' ... " + message="$(gh release create "${{ inputs.nightly_name }}" --verify-tag $addDraft $addPreRelease $addLatest "${addTitle[@]}" "${addNotes[@]}" 2>&1)" + if [[ $? -eq 0 ]]; then + echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}" + else + echo -e "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}" + echo -e "${ANSI_LIGHT_RED}Couldn't create release '${{ inputs.nightly_name }}' -> Error: '${message}'.${ANSI_NOCOLOR}" + echo "::error title=InternalError::Couldn't create release '${{ inputs.nightly_name }}' -> Error: '${message}'." + exit 1 + fi + + - name: 📥 Download artifacts and upload as assets + id: uploadAssets + run: | + set +e + + ANSI_LIGHT_RED="\e[91m" + ANSI_LIGHT_GREEN="\e[92m" + ANSI_LIGHT_YELLOW="\e[93m" + ANSI_NOCOLOR="\e[0m" + + export GH_TOKEN=${{ github.token }} + + Replace() { + line="$1" + while IFS=$'\r\n' read -r patternLine; do + # skip empty lines + [[ "$patternLine" == "" ]] && continue + + pattern="${patternLine%%=*}" + replacement="${patternLine#*=}" + line="${line//"%$pattern%"/"$replacement"}" + done <<<'${{ inputs.replacements }}' + echo "$line" + } + + ERRORS=0 + # A dictionary of 0/1 to avoid duplicate downloads + declare -A downloadedArtifacts + # A dictionary to check for duplicate asset files in release + declare -A assetFilenames + while IFS=$'\r\n' read -r assetLine; do + if [[ "${assetLine}" == "" ]]; then + continue + fi + + # split assetLine colon separated triple: artifact:asset:title + artifact="${assetLine%%:*}" + remaining="${assetLine#*:}" + asset="${remaining%%:*}" + title="${remaining##*:}" + + # remove leading whitespace + asset="${asset#"${asset%%[![:space:]]*}"}" + title="${title#"${title%%[![:space:]]*}"}" + + # apply replacements + asset="$(Replace "${asset}")" + title="$(Replace "${title}")" + + echo "Publish asset '${asset}' from artifact '${artifact}' with title '${title}'" + echo -n " Checked asset for duplicates ... " + if [[ -n "${assetFilenames[$asset]}" ]]; then + echo -e "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}" + echo "::error title=DuplicateAsset::Asset '${asset}' from artifact '${artifact}' was already uploaded to release '${{ inputs.nightly_name }}'." + ERRORS=1 + continue + else + echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}" + assetFilenames[$asset]=1 + fi + + # Download artifact by artifact name + if [[ -n "${downloadedArtifacts[$artifact]}" ]]; then + echo -e " downloading '${artifact}' ... ${ANSI_LIGHT_YELLOW}[SKIPPED]${ANSI_NOCOLOR}" + else + echo " downloading '${artifact}' ... " + echo -n " gh run download $GITHUB_RUN_ID --dir \"${artifact}\" --name \"${artifact}\" " + gh run download $GITHUB_RUN_ID --dir "${artifact}" --name "${artifact}" + if [[ $? -eq 0 ]]; then + echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}" + else + echo -e "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}" + echo -e "${ANSI_LIGHT_RED}Couldn't download artifact '${artifact}'.${ANSI_NOCOLOR}" + echo "::error title=ArtifactNotFound::Couldn't download artifact '${artifact}'." + ERRORS=1 + continue + fi + downloadedArtifacts[$artifact]=1 + fi + + # Check if artifact should be compressed (zip, tgz) or if asset was part of the downloaded artifact. + echo -n " checking asset '${artifact}/${asset}' ... " + if [[ "${asset}" == !*.zip ]]; then + echo -e "${ANSI_LIGHT_GREEN}[ZIP]${ANSI_NOCOLOR}" + asset="${asset##*!}" + echo " Compressing artifact '${artifact}' to '${asset}' ..." + ( + cd "${artifact}" && \ + zip -r "../${asset}" * + ) + if [[ $? -eq 0 ]]; then + echo -e " Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}" + uploadFile="${asset}" + else + echo -e " Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}" + echo -e "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to zip file '${asset}'.${ANSI_NOCOLOR}" + echo "::error title=CompressionError::Couldn't compress '${artifact}' to zip file '${asset}'." + ERRORS=1 + continue + fi + elif [[ "${asset}" == !*.tgz || "${asset}" == !*.tar.gz || "${asset}" == \$*.tgz || "${asset}" == \$*.tar.gz ]]; then + echo -e "${ANSI_LIGHT_GREEN}[TAR/GZ]${ANSI_NOCOLOR}" + + if [[ "${asset:0:1}" == "\$" ]]; then + asset="${asset##*$}" + dirName="${asset%.*}" + echo " Compressing artifact '${artifact}' to '${asset}' ..." + tar -c --gzip --file="${asset}" --directory="${artifact}" --transform "s|^\.|${dirName%.tar}|" . + retCode=$? + else + asset="${asset##*!}" + echo " Compressing artifact '${artifact}' to '${asset}' ..." + ( + cd "${artifact}" && \ + tar -c --gzip --file="../${asset}" * + ) + retCode=$? + fi + + if [[ $retCode -eq 0 ]]; then + echo -e " Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}" + uploadFile="${asset}" + else + echo -e " Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}" + echo -e "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to tgz file '${asset}'.${ANSI_NOCOLOR}" + echo "::error title=CompressionError::Couldn't compress '${artifact}' to tgz file '${asset}'." + ERRORS=1 + continue + fi + elif [[ "${asset}" == !*.tzst || "${asset}" == !*.tar.zst || "${asset}" == \$*.tzst || "${asset}" == \$*.tar.zst ]]; then + echo -e "${ANSI_LIGHT_GREEN}[ZST]${ANSI_NOCOLOR}" + + if [[ "${asset:0:1}" == "\$" ]]; then + asset="${asset##*$}" + dirName="${asset%.*}" + echo " Compressing artifact '${artifact}' to '${asset}' ..." + tar -c --zstd --file="${asset}" --directory="${artifact}" --transform "s|^\.|${dirName%.tar}|" . + retCode=$? + else + asset="${asset##*!}" + echo " Compressing artifact '${artifact}' to '${asset}' ..." + ( + cd "${artifact}" && \ + tar -c --zstd --file="../${asset}" * + ) + retCode=$? + fi + + if [[ $retCode -eq 0 ]]; then + echo -e " Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}" + uploadFile="${asset}" + else + echo -e " Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}" + echo -e "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to zst file '${asset}'.${ANSI_NOCOLOR}" + echo "::error title=CompressionError::Couldn't compress '${artifact}' to zst file '${asset}'." + ERRORS=1 + continue + fi + elif [[ -e "${artifact}/${asset}" ]]; then + echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}" + uploadFile="${artifact}/${asset}" + else + echo -e "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}" + echo -e "${ANSI_LIGHT_RED}Couldn't find asset '${asset}' in artifact '${artifact}'.${ANSI_NOCOLOR}" + echo "::error title=FileNotFound::Couldn't find asset '${asset}' in artifact '${artifact}'." + ERRORS=1 + continue + fi + + # Upload asset to existing release page + echo -n " uploading asset '${asset}' from '${uploadFile}' with title '${title}' ... " + gh release upload ${{ inputs.nightly_name }} "${uploadFile}#${title}" --clobber + if [[ $? -eq 0 ]]; then + echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}" + else + echo -e "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}" + echo -e "${ANSI_LIGHT_RED}Couldn't upload asset '${asset}' from '${uploadFile}' to release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}" + echo "::error title=UploadError::Couldn't upload asset '${asset}' from '${uploadFile}' to release '${{ inputs.nightly_name }}'." + ERRORS=1 + continue + fi + done <<<'${{ inputs.assets }}' + + echo "Inspecting downloaded artifacts ..." + tree -L 3 . + + if [[ $ERROR -ne 0 ]]; then + echo -e "${ANSI_LIGHT_RED}Errors detected in previous steps.${ANSI_NOCOLOR}" + exit 1 + fi + + - name: 📑 Remove draft state from Release Page + if: ${{ ! inputs.draft }} + run: | + set +e + + ANSI_LIGHT_RED="\e[91m" + ANSI_LIGHT_GREEN="\e[92m" + ANSI_NOCOLOR="\e[0m" + + export GH_TOKEN=${{ github.token }} + + # Remove draft-state from release page + echo -n "Remove draft-state from release '${title}' ... " + gh release edit --draft=false "${{ inputs.nightly_name }}" + if [[ $? -eq 0 ]]; then + echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}" + else + echo -e "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}" + echo -e "${ANSI_LIGHT_RED}Couldn't remove draft-state from release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}" + echo "::error title=ReleasePage::Couldn't remove draft-state from release '${{ inputs.nightly_name }}'." + fi diff --git a/.github/workflows/Package.yml b/.github/workflows/Package.yml index 9a60d1aa..d16e9f78 100644 --- a/.github/workflows/Package.yml +++ b/.github/workflows/Package.yml @@ -54,6 +54,9 @@ jobs: steps: - name: ⏬ Checkout repository uses: actions/checkout@v4 + with: + lfs: true + submodules: true - name: 🐍 Setup Python ${{ inputs.python_version }} uses: actions/setup-python@v5 @@ -103,9 +106,10 @@ jobs: run: python setup.py bdist_wheel - name: 📤 Upload wheel artifact - uses: actions/upload-artifact@v4 + uses: pyTooling/upload-artifact@v4 with: name: ${{ inputs.artifact }} - path: dist/ + working-directory: dist + path: '*' if-no-files-found: error retention-days: 1 diff --git a/.github/workflows/Parameters.yml b/.github/workflows/Parameters.yml index ca02b745..82cb950e 100644 --- a/.github/workflows/Parameters.yml +++ b/.github/workflows/Parameters.yml @@ -83,7 +83,7 @@ on: windows_image: description: 'The used GitHub Action image for Windows based jobs.' required: false - default: 'windows-latest' + default: 'windows-2022' type: string macos_intel_image: description: 'The used GitHub Action image for macOS (Intel x86-64) based jobs.' @@ -93,7 +93,7 @@ on: macos_arm_image: description: 'The used GitHub Action image for macOS (ARM aarch64) based jobs.' required: false - default: 'macos-latest' + default: 'macos-14' type: string outputs: diff --git a/.github/workflows/PublishCoverageResults.yml b/.github/workflows/PublishCoverageResults.yml index 5365e469..03b0cc30 100644 --- a/.github/workflows/PublishCoverageResults.yml +++ b/.github/workflows/PublishCoverageResults.yml @@ -29,6 +29,10 @@ on: required: false default: '24.04' type: string + coverage_artifacts_pattern: + required: false + default: '*-CodeCoverage-*' + type: string coverage_config: description: 'Path to the .coveragerc file. Use pyproject.toml by default.' required: false @@ -68,12 +72,20 @@ jobs: steps: - name: ⏬ Checkout repository uses: actions/checkout@v4 + with: + lfs: true + submodules: true - name: Download Artifacts - uses: actions/download-artifact@v4 + uses: pyTooling/download-artifact@v4 with: + pattern: ${{ inputs.coverage_artifacts_pattern }} path: artifacts + - name: 🔎 Inspect extracted artifact (tarball) + run: | + tree -psh artifacts + - name: 🔧 Install coverage and tomli run: | python -m pip install -U --disable-pip-version-check --break-system-packages coverage[toml] tomli @@ -170,7 +182,7 @@ jobs: - name: 📤 Upload 'Coverage SQLite Database' artifact if: inputs.coverage_sqlite_artifact != '' continue-on-error: true - uses: actions/upload-artifact@v4 + uses: pyTooling/upload-artifact@v4 with: name: ${{ inputs.coverage_sqlite_artifact }} path: .coverage @@ -180,7 +192,7 @@ jobs: - name: 📤 Upload 'Coverage XML Report' artifact if: inputs.coverage_xml_artifact != '' continue-on-error: true - uses: actions/upload-artifact@v4 + uses: pyTooling/upload-artifact@v4 with: name: ${{ inputs.coverage_xml_artifact }} path: ${{ steps.getVariables.outputs.coverage_report_xml }} @@ -190,7 +202,7 @@ jobs: - name: 📤 Upload 'Coverage JSON Report' artifact if: inputs.coverage_json_artifact != '' continue-on-error: true - uses: actions/upload-artifact@v4 + uses: pyTooling/upload-artifact@v4 with: name: ${{ inputs.coverage_json_artifact }} path: ${{ steps.getVariables.outputs.coverage_report_json }} @@ -200,17 +212,18 @@ jobs: - name: 📤 Upload 'Coverage HTML Report' artifact if: inputs.coverage_html_artifact != '' continue-on-error: true - uses: actions/upload-artifact@v4 + uses: pyTooling/upload-artifact@v4 with: name: ${{ inputs.coverage_html_artifact }} - path: ${{ steps.getVariables.outputs.coverage_report_html_directory }} + working-directory: ${{ steps.getVariables.outputs.coverage_report_html_directory }} + path: '*' if-no-files-found: error retention-days: 1 - name: 📊 Publish code coverage at CodeCov if: inputs.CodeCov == true continue-on-error: true - uses: codecov/codecov-action@v4 + uses: codecov/codecov-action@v5 with: files: ${{ steps.getVariables.outputs.coverage_report_xml }} flags: unittests diff --git a/.github/workflows/PublishOnPyPI.yml b/.github/workflows/PublishOnPyPI.yml index 637fe007..030cf312 100644 --- a/.github/workflows/PublishOnPyPI.yml +++ b/.github/workflows/PublishOnPyPI.yml @@ -57,10 +57,10 @@ jobs: steps: - name: 📥 Download artifacts '${{ inputs.artifact }}' from 'Package' job - uses: actions/download-artifact@v4 + uses: pyTooling/download-artifact@v4 with: name: ${{ inputs.artifact }} - path: dist/ + path: dist - name: 🐍 Setup Python ${{ inputs.python_version }} uses: actions/setup-python@v5 diff --git a/.github/workflows/PublishTestResults.yml b/.github/workflows/PublishTestResults.yml index e7447d9a..91fca353 100644 --- a/.github/workflows/PublishTestResults.yml +++ b/.github/workflows/PublishTestResults.yml @@ -30,6 +30,10 @@ on: required: false default: '24.04' type: string + unittest_artifacts_pattern: + required: false + default: '*-UnitTestReportSummary-*' + type: string merged_junit_artifact: description: 'Name of the merged JUnit Test Summary artifact.' required: false @@ -40,6 +44,11 @@ on: required: false default: '"--pytest=rewrite-dunder-init;reduce-depth:pytest.tests.unit"' type: string + publish: + description: 'Publish test report summary via Dorny Test-Reporter' + required: false + default: true + type: boolean report_title: description: 'Title of the summary report in the pipeline''s sidebar' required: false @@ -57,10 +66,15 @@ jobs: uses: actions/checkout@v4 - name: Download Artifacts - uses: actions/download-artifact@v4 + uses: pyTooling/download-artifact@v4 with: + pattern: ${{ inputs.unittest_artifacts_pattern }} path: artifacts + - name: 🔎 Inspect extracted artifact (tarball) + run: | + tree -psh artifacts + - name: 🔧 Install pyEDAA.Reports (JUunit Parser and Merger) run: | python -m pip install --disable-pip-version-check --break-system-packages -U pyEDAA.Reports @@ -80,6 +94,7 @@ jobs: - name: 📊 Publish Unit Test Results uses: dorny/test-reporter@v1 + if: inputs.publish && inputs.report_title != '' with: name: ${{ inputs.report_title }} path: Unittesting.xml @@ -87,7 +102,7 @@ jobs: - name: 📤 Upload merged 'JUnit Test Summary' artifact if: inputs.merged_junit_artifact != '' - uses: actions/upload-artifact@v4 + uses: pyTooling/upload-artifact@v4 with: name: ${{ inputs.merged_junit_artifact }} path: Unittesting.xml diff --git a/.github/workflows/PublishToGitHubPages.yml b/.github/workflows/PublishToGitHubPages.yml index 6fbf3201..ef53cf27 100644 --- a/.github/workflows/PublishToGitHubPages.yml +++ b/.github/workflows/PublishToGitHubPages.yml @@ -56,21 +56,21 @@ jobs: uses: actions/checkout@v4 - name: 📥 Download artifacts '${{ inputs.doc }}' from 'BuildTheDocs' job - uses: actions/download-artifact@v4 + uses: pyTooling/download-artifact@v4 with: name: ${{ inputs.doc }} path: public - name: 📥 Download artifacts '${{ inputs.coverage }}' from 'Coverage' job if: ${{ inputs.coverage != '' }} - uses: actions/download-artifact@v4 + uses: pyTooling/download-artifact@v4 with: name: ${{ inputs.coverage }} path: public/coverage - name: 📥 Download artifacts '${{ inputs.typing }}' from 'StaticTypeCheck' job if: ${{ inputs.typing != '' }} - uses: actions/download-artifact@v4 + uses: pyTooling/download-artifact@v4 with: name: ${{ inputs.typing }} path: public/typing diff --git a/.github/workflows/SphinxDocumentation.yml b/.github/workflows/SphinxDocumentation.yml index ed10ac22..8a6e7308 100644 --- a/.github/workflows/SphinxDocumentation.yml +++ b/.github/workflows/SphinxDocumentation.yml @@ -82,6 +82,9 @@ jobs: steps: - name: ⏬ Checkout repository uses: actions/checkout@v4 + with: + lfs: true + submodules: true - name: 🔧 Install graphviz run: sudo apt-get install -y --no-install-recommends graphviz @@ -98,14 +101,14 @@ jobs: - name: 📥 Download artifacts '${{ inputs.unittest_xml_artifact }}' from 'Unittesting' job if: inputs.unittest_xml_artifact != '' - uses: actions/download-artifact@v4 + uses: pyTooling/download-artifact@v4 with: name: ${{ inputs.unittest_xml_artifact }} path: ${{ inputs.unittest_xml_directory }} - name: 📥 Download artifacts '${{ inputs.coverage_json_artifact }}' from 'PublishCoverageResults' job if: inputs.coverage_json_artifact != '' - uses: actions/download-artifact@v4 + uses: pyTooling/download-artifact@v4 with: name: ${{ inputs.coverage_json_artifact }} path: ${{ inputs.coverage_report_json_directory }} @@ -121,10 +124,11 @@ jobs: - name: 📤 Upload 'HTML Documentation' artifact if: inputs.html_artifact != '' continue-on-error: true - uses: actions/upload-artifact@v4 + uses: pyTooling/upload-artifact@v4 with: name: ${{ inputs.html_artifact }} - path: ${{ inputs.doc_directory }}/_build/html + working-directory: ${{ inputs.doc_directory }}/_build/html + path: '*' if-no-files-found: error retention-days: 1 @@ -135,6 +139,9 @@ jobs: steps: - name: ⏬ Checkout repository uses: actions/checkout@v4 + with: + lfs: true + submodules: true - name: 🔧 Install graphviz run: sudo apt-get install -y --no-install-recommends graphviz @@ -151,14 +158,14 @@ jobs: - name: 📥 Download artifacts '${{ inputs.unittest_xml_artifact }}' from 'Unittesting' job if: inputs.unittest_xml_artifact != '' - uses: actions/download-artifact@v4 + uses: pyTooling/download-artifact@v4 with: name: ${{ inputs.unittest_xml_artifact }} path: ${{ inputs.unittest_xml_directory }} - name: 📥 Download artifacts '${{ inputs.coverage_json_artifact }}' from 'PublishCoverageResults' job if: inputs.coverage_json_artifact != '' - uses: actions/download-artifact@v4 + uses: pyTooling/download-artifact@v4 with: name: ${{ inputs.coverage_json_artifact }} path: ${{ inputs.coverage_report_json_directory }} @@ -176,9 +183,10 @@ jobs: - name: 📤 Upload 'LaTeX Documentation' artifact if: inputs.latex_artifact != '' continue-on-error: true - uses: actions/upload-artifact@v4 + uses: pyTooling/upload-artifact@v4 with: name: ${{ inputs.latex_artifact }} - path: ${{ inputs.doc_directory }}/_build/latex + working-directory: ${{ inputs.doc_directory }}/_build/latex + path: '*' if-no-files-found: error retention-days: 1 diff --git a/.github/workflows/StaticTypeCheck.yml b/.github/workflows/StaticTypeCheck.yml index feb8c69a..7554a10d 100644 --- a/.github/workflows/StaticTypeCheck.yml +++ b/.github/workflows/StaticTypeCheck.yml @@ -89,17 +89,18 @@ jobs: - name: 📤 Upload 'Static Typing Report' HTML artifact if: ${{ inputs.html_artifact != '' }} continue-on-error: true - uses: actions/upload-artifact@v4 + uses: pyTooling/upload-artifact@v4 with: name: ${{ inputs.html_artifact }} - path: ${{ inputs.html_report }} + working-directory: ${{ inputs.html_report }} + path: '*' if-no-files-found: error retention-days: 1 - name: 📤 Upload 'Static Typing Report' JUnit artifact if: ${{ inputs.junit_artifact != '' }} continue-on-error: true - uses: actions/upload-artifact@v4 + uses: pyTooling/upload-artifact@v4 with: name: ${{ inputs.junit_artifact }} path: ${{ inputs.junit_report }} diff --git a/.github/workflows/UnitTesting.yml b/.github/workflows/UnitTesting.yml index 81332592..7b32f23e 100644 --- a/.github/workflows/UnitTesting.yml +++ b/.github/workflows/UnitTesting.yml @@ -147,6 +147,9 @@ jobs: steps: - name: ⏬ Checkout repository uses: actions/checkout@v4 + with: + lfs: true + submodules: true # Package Manager steps - name: 🔧 Install homebrew dependencies on macOS @@ -439,17 +442,18 @@ jobs: - name: 📤 Upload 'TestReportSummary.xml' artifact if: inputs.unittest_xml_artifact != '' continue-on-error: true - uses: actions/upload-artifact@v4 + uses: pyTooling/upload-artifact@v4 with: name: ${{ inputs.unittest_xml_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }} - path: report/unit/TestReportSummary.xml + working-directory: report/unit + path: TestReportSummary.xml if-no-files-found: error retention-days: 1 # - name: 📤 Upload 'Unit Tests HTML Report' artifact # if: inputs.unittest_html_artifact != '' # continue-on-error: true -# uses: actions/upload-artifact@v4 +# uses: pyTooling/upload-artifact@v4 # with: # name: ${{ inputs.unittest_html_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }} # path: ${{ steps.getVariables.outputs.unittest_report_html_directory }} @@ -459,7 +463,7 @@ jobs: - name: 📤 Upload 'Coverage SQLite Database' artifact if: inputs.coverage_sqlite_artifact != '' continue-on-error: true - uses: actions/upload-artifact@v4 + uses: pyTooling/upload-artifact@v4 with: name: ${{ inputs.coverage_sqlite_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }} path: .coverage @@ -470,7 +474,7 @@ jobs: - name: 📤 Upload 'Coverage XML Report' artifact if: inputs.coverage_xml_artifact != '' continue-on-error: true - uses: actions/upload-artifact@v4 + uses: pyTooling/upload-artifact@v4 with: name: ${{ inputs.coverage_xml_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }} path: ${{ steps.getVariables.outputs.coverage_report_xml }} @@ -480,7 +484,7 @@ jobs: - name: 📤 Upload 'Coverage JSON Report' artifact if: inputs.coverage_json_artifact != '' continue-on-error: true - uses: actions/upload-artifact@v4 + uses: pyTooling/upload-artifact@v4 with: name: ${{ inputs.coverage_json_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }} path: ${{ steps.getVariables.outputs.coverage_report_json }} @@ -490,9 +494,10 @@ jobs: - name: 📤 Upload 'Coverage HTML Report' artifact if: inputs.coverage_html_artifact != '' continue-on-error: true - uses: actions/upload-artifact@v4 + uses: pyTooling/upload-artifact@v4 with: name: ${{ inputs.coverage_html_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }} - path: ${{ steps.getVariables.outputs.coverage_report_html_directory }} + working-directory: ${{ steps.getVariables.outputs.coverage_report_html_directory }} + path: '*' if-no-files-found: error retention-days: 1 diff --git a/.github/workflows/_Checking_ArtifactCleanup.yml b/.github/workflows/_Checking_ArtifactCleanup.yml index d4686a9a..1b56220a 100644 --- a/.github/workflows/_Checking_ArtifactCleanup.yml +++ b/.github/workflows/_Checking_ArtifactCleanup.yml @@ -25,7 +25,7 @@ jobs: run: echo "${{ matrix.runs-on }}-${{ matrix.python }}" >> artifact.txt - name: 📤 Upload artifact for ${{ matrix.system }}-${{ matrix.python }} - uses: actions/upload-artifact@v4 + uses: pyTooling/upload-artifact@v4 with: name: ${{ fromJson(needs.Params.outputs.artifact_names).unittesting_xml }}-${{ matrix.system }}-${{ matrix.python }} path: artifact.txt @@ -42,7 +42,7 @@ jobs: run: echo "Package" >> package.txt - name: 📤 Upload artifact for ${{ matrix.system }}-${{ matrix.python }} - uses: actions/upload-artifact@v4 + uses: pyTooling/upload-artifact@v4 with: name: ${{ fromJson(needs.Params.outputs.artifact_names).package_all }} path: package.txt diff --git a/.github/workflows/_Checking_JobTemplates.yml b/.github/workflows/_Checking_JobTemplates.yml index 16aff7c3..4031fa0d 100644 --- a/.github/workflows/_Checking_JobTemplates.yml +++ b/.github/workflows/_Checking_JobTemplates.yml @@ -7,8 +7,6 @@ on: jobs: ConfigParams: uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@main - needs: - - DocCoverage with: package_name: pyDummy @@ -80,10 +78,11 @@ jobs: DocCoverage: uses: pyTooling/Actions/.github/workflows/CheckDocumentation.yml@r1 needs: + - ConfigParams - UnitTestingParams with: python_version: ${{ needs.UnitTestingParams.outputs.python_version }} - directory: sphinx_reports + directory: ${{ needs.ConfigParams.outputs.package_directors }} # fail_below: 70 Package: @@ -130,8 +129,8 @@ jobs: Documentation: uses: pyTooling/Actions/.github/workflows/SphinxDocumentation.yml@main needs: - - UnitTestingParams - ConfigParams + - UnitTestingParams - PublishTestResults - PublishCoverageResults # - VerifyDocs diff --git a/.github/workflows/_Checking_NamespacePackage_Pipeline.yml b/.github/workflows/_Checking_NamespacePackage_Pipeline.yml index 8dd54cfd..f673f1d9 100644 --- a/.github/workflows/_Checking_NamespacePackage_Pipeline.yml +++ b/.github/workflows/_Checking_NamespacePackage_Pipeline.yml @@ -1,4 +1,4 @@ -name: Verification of Pipeline Templates +name: Verification of Pipeline Templates (Namespace Package) on: push: diff --git a/.github/workflows/_Checking_Nightly.yml b/.github/workflows/_Checking_Nightly.yml new file mode 100644 index 00000000..1e43ee30 --- /dev/null +++ b/.github/workflows/_Checking_Nightly.yml @@ -0,0 +1,101 @@ +name: Verification of Nightly Releases + +on: + push: + workflow_dispatch: + +jobs: + Build: + name: Build something + runs-on: ubuntu-24.04 + + steps: + - name: 🖉 Build 1 + run: | + echo "Document 1 $(date --utc '+%d.%m.%Y - %H:%M:%S')" > document1.txt + echo "Analysis log $(date --utc '+%d.%m.%Y - %H:%M:%S')" > analysis.log + echo "Build log $(date --utc '+%d.%m.%Y - %H:%M:%S')" > build.log + + - name: 📤 Upload artifact + uses: pyTooling/upload-artifact@v4 + with: + name: document + path: | + document1.txt + *.log + if-no-files-found: error + retention-days: 1 + + - name: 🖉 Program + run: | + echo "Document other $(date --utc '+%d.%m.%Y - %H:%M:%S')" > document1.txt + echo "Program $(date --utc '+%d.%m.%Y - %H:%M:%S')" > program.py + + - name: 📤 Upload artifact + uses: pyTooling/upload-artifact@v4 + with: + name: other + path: | + *.txt + *.py + if-no-files-found: error + retention-days: 1 + + NightlyPage: + uses: pyTooling/Actions/.github/workflows/NightlyRelease.yml@main + needs: + - Build + secrets: inherit + permissions: + contents: write + actions: write +# attestations: write + with: + prerelease: true + replacements: | + version=4.2.0 + tool=myTool + prog=program + nightly_title: "Nightly Release" + nightly_description: | + This *nightly* release contains all latest and important artifacts created by GHDL's CI pipeline. + + # GHDL %version% + + GHDL offers the simulator and synthesis tool for VHDL. GHDL can be build for various backends: + * `gcc` - using the GCC compiler framework + * `mcode` - in memory code generation + * `llvm` - using the LLVM compiler framework + * `llvm-jit` - using the LLVM compiler framework, but in memory + + The following asset categories are provided for GHDL: + * macOS x64-64 builds as TAR/GZ file + * macOS aarch64 builds as TAR/GZ file + * Ubuntu 24.04 LTS builds as TAR/GZ file + * Windows builds for standalone usage (without MSYS2) as ZIP file + * MSYS2 packages as TAR/ZST file + + # pyGHDL %version% + + The Python package `pyGHDL` offers Python binding (`pyGHDL.libghdl`) to a `libghdl` shared library (`*.so`/`*.dll`). + In addition to the low-level binding layer, pyGHDL offers: + * a Language Server Protocol (LSP) instance for e.g. live code checking by editors + * a Code Document Object Model (CodeDOM) based on [pyVHDLModel](https://github.com/VHDL/pyVHDLModel) + + The following asset categories are provided for pyGHDL: + * Platform specific Python wheel package for Ubuntu incl. `pyGHDL...so` + * Platform specific Python wheel package for Windows incl. `pyGHDL...dll` + assets: | + document: document1.txt: Documentation + document: build.log: Logfile - %tool% - %tool% + other: document1.txt: SBOM - %version% + other: %prog%.py: Application - %tool% - %version% + document:!archive1.zip: Archive 1 - zip + document:!archive2.tgz: Archive 2 - tgz + document:!archive3.tar.gz: Archive 3 - tar.gz + document:!archive4.tzst: Archive 4 - tzst + document:!archive5.tar.zst:Archive 5 - tar.zst + document:$archive6.tgz: Archive 6 - tgz + dir + document:$archive7.tar.gz: Archive 7 - tar.gz + dir + document:$archive8.tzst: Archive 8 - tzst + dir + document:$archive9.tar.zst:Archive 9 - tar.zst + dir diff --git a/.github/workflows/_Checking_SimplePackage_Pipeline.yml b/.github/workflows/_Checking_SimplePackage_Pipeline.yml index 73947339..f977ac67 100644 --- a/.github/workflows/_Checking_SimplePackage_Pipeline.yml +++ b/.github/workflows/_Checking_SimplePackage_Pipeline.yml @@ -1,4 +1,4 @@ -name: Verification of Pipeline Templates +name: Verification of Pipeline Templates (Simple Package) on: push: diff --git a/.gitignore b/.gitignore index aa068cd9..09faddab 100644 --- a/.gitignore +++ b/.gitignore @@ -31,7 +31,7 @@ doc/pyDummy/**/*.* # BuildTheDocs doc/_theme/**/*.* -# IntelliJ project files +# PyCharm project files /.idea/workspace.xml # Git files diff --git a/doc/requirements.txt b/doc/requirements.txt index 51a65c93..35b7576d 100644 --- a/doc/requirements.txt +++ b/doc/requirements.txt @@ -11,17 +11,9 @@ docutils_stubs ~= 0.0.22 sphinx_rtd_theme ~= 3.0 # Sphinx Extenstions -#sphinx.ext.coverage -#sphinxcontrib-actdiag>=0.8.5 sphinxcontrib-mermaid>=0.9.2 -#sphinxcontrib-seqdiag>=0.8.5 -#sphinxcontrib-textstyle>=0.2.1 -#sphinxcontrib-spelling>=2.2.0 autoapi >= 2.0.1 sphinx_design ~= 0.6.1 sphinx-copybutton >= 0.5.2 sphinx_autodoc_typehints ~= 2.5 -# changelog>=0.3.5 sphinx_reports ~= 0.7 - -# BuildTheDocs Extensions (mostly patched Sphinx extensions) diff --git a/pyproject.toml b/pyproject.toml index 1132ea2d..f76c66ff 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [build-system] requires = [ - "setuptools ~= 75.3", + "setuptools ~= 75.5", "wheel ~= 0.45", "pyTooling ~= 8.0" ] diff --git a/tests/pacman_packages.py b/tests/pacman_packages.py index c5705547..bb329efc 100644 --- a/tests/pacman_packages.py +++ b/tests/pacman_packages.py @@ -8,7 +8,7 @@ def loadRequirementsFile(requirementsFile: Path): requirements = [] - with requirementsFile.open("r") as file: + with requirementsFile.open("r", encoding="utf-8") as file: for line in file.readlines(): line = line.strip() if line.startswith("#") or line.startswith("https") or line == "": @@ -84,7 +84,7 @@ def loadRequirementsFile(requirementsFile: Path): # Write jobs to special file github_output = Path(getenv("GITHUB_OUTPUT")) print(f"GITHUB_OUTPUT: {github_output}") -with github_output.open("a+") as f: +with github_output.open("a+", encoding="utf-8") as f: f.write(f"pacboy_packages={' '.join(pacboyPackages)}\n") print(f"GITHUB_OUTPUT:")