From b9c184140fc1452632b272a4f8e6dd78415c4bc4 Mon Sep 17 00:00:00 2001 From: Yuri Goldfeld Date: Tue, 9 Jan 2024 21:44:04 -0800 Subject: [PATCH 1/2] Updating submodules. --- flow | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/flow b/flow index 175a2eec3..1c07c99f1 160000 --- a/flow +++ b/flow @@ -1 +1 @@ -Subproject commit 175a2eec3c31326202dbac3e06c0258dfce9331a +Subproject commit 1c07c99f181398d54f851bd4dd3dc3e115f9a9a8 From 3eb25a3077179238f882477a5d04328956931182 Mon Sep 17 00:00:00 2001 From: Yuri Goldfeld Date: Tue, 9 Jan 2024 22:03:20 -0800 Subject: [PATCH 2/2] Carry over the auto-generated-docs-check-in workflow stuff from flow into `ipc`. --- .github/workflows/main.yml | 158 ++++++++++++++++++++++++++----------- flow | 2 +- 2 files changed, 113 insertions(+), 47 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 37797c3fa..26c19d6ed 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -11,7 +11,60 @@ on: workflow_dispatch: jobs: + # Impetus behind this is to set up at least one magic string used in 2+ places. + # Unfortunately simply using `env:` does not work, as jobs..if refuses to access the workflow-global `env.`. + # Ridiculous. TODO: Revisit. + # + # Folding this into `setup` would have been fine, but as of this writing `setup` needs at least one constant + # from here, and getting the order of operations correct within that one job is non-trivial at best. + set-vars: + runs-on: ubuntu-latest + steps: + - name: Set variables/constants for subsequent use + run: | + # Set variables/constants for subsequent use. + outputs: + doc-commit-message: (Commit by workflow script) Update generated documentation. + + # Impetus behind this is to gate whether the real jobs below actually need to run. Can of course also compute + # other things as needed. + setup: + needs: set-vars + runs-on: ubuntu-latest + steps: + - id: compute-proceed-else-not + name: Compute whether for main jobs to proceed or not + # For checking whether github.event.head_commit.message starts with needs.set-vars.outputs.doc-commit-message + # it is tempting to just use: + # '${{ github.event.head_commit.message }}' != '${{ needs.set-vars.outputs.doc-commit-message }}'* + # This works usually but is unsafe: If the head commit message contains, for example, a single-quote, + # then the shell syntax breaks down. One approach would be to use pipeline startsWith() before `run`, + # but apparently it cannot be done directly inside `run`; so it is a pain. Staying with shell scripting + # then we can just use here-doc syntax and a temp file, so if the commit message does not have the + # here-doc terminator token, then we're fine. + run: | + # Compute whether for main jobs to proceed or not. + TMP_MSG=/tmp/flow-ipc-pipeline-head-cmt-msg.txt + cat <<'FLOW_IPC_PIPELINE_HEAD_CMD_MSG_EOF' > $TMP_MSG + ${{ github.event.head_commit.message }} + FLOW_IPC_PIPELINE_HEAD_CMD_MSG_EOF + if [ '${{ github.ref }}' != 'refs/heads/main' ] || \ + [ '${{ github.event_name }}' != 'push' ] || \ + ! { head --lines=1 $TMP_MSG | grep -Fxq '${{ needs.set-vars.outputs.doc-commit-message }}'; }; then + echo 'proceed-else-not=true' >> $GITHUB_OUTPUT + else + echo 'proceed-else-not=false' >> $GITHUB_OUTPUT + echo 'The real jobs will not run: earlier `doc` job checked-in generated docs to `main`.' + echo 'That is not a source change and requires no actual pipeline to execute.' + fi + outputs: + proceed-else-not: ${{ steps.compute-proceed-else-not.outputs.proceed-else-not }} + build: + needs: [setup, set-vars] + if: | + needs.setup.outputs.proceed-else-not == 'true' + strategy: fail-fast: false matrix: @@ -330,6 +383,7 @@ jobs: steps: - name: Update available software list for apt-get run: | + # Update available software list for apt-get. lsb_release -a sudo apt-get update @@ -342,6 +396,7 @@ jobs: if: | matrix.compiler.install && (matrix.compiler.name == 'clang') run: | + # Install clang compiler. wget https://apt.llvm.org/llvm.sh chmod u+x llvm.sh sudo ./llvm.sh ${{ matrix.compiler.version }} @@ -350,6 +405,7 @@ jobs: if: | matrix.compiler.install && (matrix.compiler.name == 'gcc') run: | + # Install gcc compiler. sudo apt-get install -y software-properties-common sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y sudo apt-get update @@ -361,13 +417,13 @@ jobs: # to-dos more easily (targeting different build settings at built product source versus tools used to build it # = prime example). - name: Install the latest version of Conan which is less than 2 - run: | - pip install "conan<2" + run: pip install 'conan<2' - name: Add custom settings for Conan packages if: | matrix.build-test-cfg.conan-custom-settings-defs run: | + # Add custom settings for Conan packages. conan config init pip install PyYAML CONAN_SETTINGS_PATH=$(conan config home)/settings.yml @@ -412,6 +468,7 @@ jobs: # work, or would it be better to build capnp twice then (once for the binary, once for the linked libraries)? - name: Create Conan profile run: | + # Create Conan profile. cat <<'EOF' > conan_profile [settings] compiler = ${{ matrix.compiler.name }} @@ -470,6 +527,7 @@ jobs: if: | (!cancelled()) && (matrix.build-test-cfg.sanitizer-name == 'msan') run: | + # Prepare MSAN sanitizer compile-time config file(s). cat <<'EOF' > $${ env.msan-ignore-list-cfg-file }} [memory] # Warning: In clang-18 there are breaking changes in how globs/regexes are interpreted. See docs. @@ -494,13 +552,13 @@ jobs: - name: Install Flow-IPC dependencies with Conan using the profile run: | + # Install Flow-IPC dependencies with Conan using the profile. conan editable add flow flow/1.0 conan install . \ --profile:build conan_profile --profile:host conan_profile --build missing - name: Build libraries and demos/tests with Conan - run: | - conan build . + run: conan build . - name: Install built targets with Makefile run: | @@ -550,6 +608,7 @@ jobs: if: | !cancelled() run: | + # Run link test [`ipc_core` - Flow-IPC Core]. cd ${{ env.install-dir }}/bin mkdir -p logs/ipc_core_link_test SUPP_DIR_A=${{ github.workspace }}/flow/src @@ -562,6 +621,7 @@ jobs: if: | !cancelled() run: | + # Run link test [`ipc_transport_structured` - Flow-IPC Structured Transport]. cd ${{ env.install-dir }}/bin mkdir -p logs/ipc_transport_structured_link_test SUPP_DIR_A=${{ github.workspace }}/flow/src @@ -574,6 +634,7 @@ jobs: if: | !cancelled() run: | + # Run link test [`ipc_session` - Flow-IPC Sessions]. cd ${{ env.install-dir }}/bin mkdir -p logs/ipc_session_link_test SUPP_DIR_A=${{ github.workspace }}/flow/src @@ -590,6 +651,7 @@ jobs: if: | !cancelled() run: | + # Run link test [`ipc_shm` - Flow-IPC Shared Memory]. cd ${{ env.install-dir }}/bin mkdir -p logs/ipc_shm_link_test SUPP_DIR_A=${{ github.workspace }}/flow/src @@ -606,6 +668,7 @@ jobs: if: | !cancelled() run: | + # Run link test [`ipc_shm_arena_lend` - Flow-IPC SHM-jemalloc]. cd ${{ env.install-dir }}/bin mkdir -p logs/ipc_shm_arena_lend_link_test SUPP_DIR_A=${{ github.workspace }}/flow/src @@ -624,6 +687,7 @@ jobs: if: | !cancelled() run: | + # Run unit tests. cd ${{ env.install-dir }}/bin # Some newline issues with the possible additional args; so need to make a wrapper script # and then redirect, as desired, its output. @@ -705,6 +769,7 @@ jobs: if: | !cancelled() run: | + # Prepare run script for [transport_test - Scripted mode] variations below. cat <<'EOF' > ${{ env.install-dir }}/bin/run_transport_test_sc.sh echo "Log level: [$1]." cd ${{ env.install-dir }}/bin/transport_test @@ -734,18 +799,12 @@ jobs: id: transport_test_scripted if: | !cancelled() - run: | - /usr/bin/bash -e \ - ${{ env.install-dir }}/bin/run_transport_test_sc.sh \ - info + run: /usr/bin/bash -e ${{ env.install-dir }}/bin/run_transport_test_sc.sh info - name: Re-run with increased logging, on failure only if: | (!cancelled()) && (steps.transport_test_scripted.outcome == 'failure') - run: | - /usr/bin/bash -e \ - ${{ env.install-dir }}/bin/run_transport_test_sc.sh \ - data + run: /usr/bin/bash -e ${{ env.install-dir }}/bin/run_transport_test_sc.sh data # The following [Exercise mode] tests follow the instructions in bin/transport_test/README.txt. # Note that the creation of ~/bin/ex_..._run and placement of executables there, plus @@ -756,6 +815,7 @@ jobs: if: | !cancelled() run: | + # Prepare IPC-session safety-friendly run-time environment for [transport_test - Exercise mode]. mkdir -p ~/bin/ex_srv_run ~/bin/ex_cli_run mkdir -p /tmp/var/run cp -v ${{ env.install-dir }}/bin/transport_test/transport_test.exec \ @@ -766,6 +826,7 @@ jobs: if: | !cancelled() run: | + # Prepare run script for [transport_test - Exercise mode] variations below. cat <<'EOF' > ${{ env.install-dir }}/bin/run_transport_test_ex.sh # Script created by pipeline during job. echo "Log level: [$1]." @@ -807,35 +868,23 @@ jobs: id: transport_test_ex_heap if: | !cancelled() - run: | - /usr/bin/bash -e \ - ${{ env.install-dir }}/bin/run_transport_test_ex.sh \ - info heap + run: /usr/bin/bash -e ${{ env.install-dir }}/bin/run_transport_test_ex.sh info heap - name: Re-run with increased logging, on failure only if: | (!cancelled()) && (steps.transport_test_ex_heap.outcome == 'failure') - run: | - /usr/bin/bash -e \ - ${{ env.install-dir }}/bin/run_transport_test_ex.sh \ - data heap_log_level_data + run: /usr/bin/bash -e ${{ env.install-dir }}/bin/run_transport_test_ex.sh data heap_log_level_data - name: Run integration test [transport_test - Exercise mode - SHM-classic sub-mode] id: transport_test_ex_shm_c if: | !cancelled() - run: | - /usr/bin/bash -e \ - ${{ env.install-dir }}/bin/run_transport_test_ex.sh \ - info shm_classic -shm-c + run: /usr/bin/bash -e ${{ env.install-dir }}/bin/run_transport_test_ex.sh info shm_classic -shm-c - name: Re-run with increased logging, on failure only if: | (!cancelled()) && (steps.transport_test_ex_shm_c.outcome == 'failure') - run: | - /usr/bin/bash -e \ - ${{ env.install-dir }}/bin/run_transport_test_ex.sh \ - data shm_classic_log_level_data -shm-c + run: /usr/bin/bash -e ${{ env.install-dir }}/bin/run_transport_test_ex.sh data shm_classic_log_level_data -shm-c # Disabling this particular test run for the specific case of clang-17 in TSAN (thread sanitizer) config # (in particular at least 2 other clangs+TSAN are exercised, so the TSAN coverage is still good). @@ -867,18 +916,12 @@ jobs: id: transport_test_ex_shm_j if: | (!cancelled()) && ((matrix.compiler.id != 'clang-17') || (matrix.build-test-cfg.sanitizer-name != 'tsan')) - run: | - /usr/bin/bash -e \ - ${{ env.install-dir }}/bin/run_transport_test_ex.sh \ - info shm_jemalloc -shm-j + run: /usr/bin/bash -e ${{ env.install-dir }}/bin/run_transport_test_ex.sh info shm_jemalloc -shm-j - name: Re-run with increased logging, on failure only if: | (!cancelled()) && (steps.transport_test_ex_shm_j.outcome == 'failure') - run: | - /usr/bin/bash -e \ - ${{ env.install-dir }}/bin/run_transport_test_ex.sh \ - data shm_jemalloc_log_level_data -shm-j + run: /usr/bin/bash -e ${{ env.install-dir }}/bin/run_transport_test_ex.sh data shm_jemalloc_log_level_data -shm-j # See earlier comment block about why we saved all the logs including for console-output-only tests/demos. - name: Check test/demo logs for non-fatal sanitizer error(s) @@ -905,6 +948,7 @@ jobs: if: | always() run: | + # Package test/demo logs tarball. cd ${{ env.install-dir }}/bin tar cvzf logs.tgz logs rm -rf logs # Save runner space. @@ -930,6 +974,10 @@ jobs: # Possibly this is all handled beautifully automatically; then this should be deleted. doc: + needs: [setup, set-vars] + if: | + needs.setup.outputs.proceed-else-not == 'true' + strategy: fail-fast: false matrix: @@ -959,15 +1007,14 @@ jobs: submodules: true - name: Install Flow-IPC dependencies (like Graphviz) with apt-get - run: | - sudo apt-get install -y graphviz + run: sudo apt-get install -y graphviz - name: Install the latest version of Conan which is less than 2 - run: | - pip install "conan<2" + run: pip install 'conan<2' - name: Create Conan profile run: | + # Create Conan profile. cat <<'EOF' > conan_profile [settings] compiler = ${{ matrix.compiler.name }} @@ -992,23 +1039,42 @@ jobs: - name: Install Flow-IPC dependencies (like Doxygen) with Conan using the profile run: | - conan install . \ - --profile:build conan_profile --profile:host conan_profile --build missing + conan install . --profile:build conan_profile --profile:host conan_profile --build missing - name: Generate code documentation using Conan and Doxygen - run: | - conan build . + run: conan build . - name: Create documentation tarball (full docs, API-only docs, landing page) run: | + # Create documentation tarball (full docs, API-only docs, landing page). cd ${{ github.workspace }}/doc/ipc_doc ${{ github.workspace }}/tools/doc/stage_generated_docs.sh \ ${{ github.workspace }}/build/${{ matrix.build-cfg.conan-profile-build-type }} - # Save runner space. - rm -rf generated - name: Upload documentation tarball uses: actions/upload-artifact@v3 with: name: ipc-doc path: ${{ github.workspace }}/doc/ipc_doc.tgz + + - name: (`main` branch only) Check-in generated documentation directly into source control + if: success() && (github.ref == 'refs/heads/main') + run: | + # (`main` branch only) Check-in generated documentation directly into source control. + echo 'generated/ docs have been added or replaced locally; mirroring this into checked-in tree.' + # These values informally recommended in: + # https://github.com/actions/checkout#push-a-commit-using-the-built-in-token + git config user.name github-actions + git config user.email github-actions@github.com + # We are forced to use a Personal Access Token attached to a special bot user such that in repo Settings + # we've configured that "guy" as allowed to bypass the requirement to merge via PR. As of this writing + # there's no way to configure the default token to being able to do this. + # TODO: Keep an eye on that in case they provide for a better way: + # https://github.com/orgs/community/discussions/25305 + git config --local http.https://github.com/.extraheader \ + "AUTHORIZATION: basic $(echo -n x-access-token:${{ secrets.GIT_BOT_PAT }} | base64)" + cd ${{ github.workspace }}/doc/ipc_doc + git rm -r --cached generated || echo 'No generated/ currently checked in; no problem.' + git add generated + git commit -m '${{ needs.set-vars.outputs.doc-commit-message }}' + git push origin main diff --git a/flow b/flow index 1c07c99f1..ff09b8adb 160000 --- a/flow +++ b/flow @@ -1 +1 @@ -Subproject commit 1c07c99f181398d54f851bd4dd3dc3e115f9a9a8 +Subproject commit ff09b8adb8c085d60bda091d778e7ece55aa94d3