Skip to content

fix deletions on readreceipt update #1213

fix deletions on readreceipt update

fix deletions on readreceipt update #1213

Workflow file for this run

name: Documentation and GitHub Pages
on:
pull_request:
push:
branches:
- main
tags:
- '*'
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
env:
# Required to make some things output color
TERM: ansi
# Publishing to my nix binary cache
ATTIC_TOKEN: ${{ secrets.ATTIC_TOKEN }}
# conduwuit.cachix.org
CACHIX_AUTH_TOKEN: ${{ secrets.CACHIX_AUTH_TOKEN }}
# Custom nix binary cache if fork is being used
ATTIC_ENDPOINT: ${{ vars.ATTIC_ENDPOINT }}
ATTIC_PUBLIC_KEY: ${{ vars.ATTIC_PUBLIC_KEY }}
# Get error output from nix that we can actually use, and use our binary caches for the earlier CI steps
NIX_CONFIG: |
show-trace = true
extra-substituters = extra-substituters = https://attic.kennel.juneis.dog/conduwuit https://attic.kennel.juneis.dog/conduit https://cache.lix.systems https://conduwuit.cachix.org https://aseipp-nix-cache.freetls.fastly.net
extra-trusted-public-keys = conduit:eEKoUwlQGDdYmAI/Q/0slVlegqh/QmAvQd7HBSm21Wk= conduwuit:BbycGUgTISsltcmH0qNjFR9dbrQNYgdIAcmViSGoVTE= cache.lix.systems:aBnZUw8zA7H35Cz2RyKFVs3H4PlGTLawyY5KRbvJR8o= conduwuit.cachix.org-1:MFRm6jcnfTf0jSAbmvLfhO3KBMt4px+1xaereWXp8Xg=
experimental-features = nix-command flakes
extra-experimental-features = nix-command flakes
accept-flake-config = true
# Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued.
# However, do NOT cancel in-progress runs as we want to allow these production deployments to complete.
concurrency:
group: "pages"
cancel-in-progress: false
jobs:
docs:
name: Documentation and GitHub Pages
runs-on: ubuntu-24.04
permissions:
pages: write
id-token: write
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
steps:
- name: Free up a bit of runner space
run: |
set +o pipefail
sudo docker image prune --all --force || true
sudo apt purge -y 'php.*' '^mongodb-.*' '^mysql-.*' azure-cli google-cloud-cli google-chrome-stable firefox powershell microsoft-edge-stable || true
sudo apt clean
sudo rm -v -rf /usr/local/games /usr/local/sqlpackage /usr/local/share/powershell /usr/local/share/edge_driver /usr/local/share/gecko_driver /usr/local/share/chromium /usr/local/share/chromedriver-linux64 /usr/lib/google-cloud-sdk /usr/lib/jvm /usr/lib/mono /usr/lib/heroku
set -o pipefail
- name: Sync repository
uses: actions/checkout@v4
- name: Setup GitHub Pages
if: (startsWith(github.ref, 'refs/tags/v') || github.ref == 'refs/heads/main') && (github.event_name != 'pull_request')
uses: actions/configure-pages@v5
- uses: nixbuild/nix-quick-install-action@master
- name: Restore and cache Nix store
uses: nix-community/[email protected]
with:
# restore and save a cache using this key
primary-key: nix-${{ runner.os }}-${{ hashFiles('**/*.nix', '**/.lock') }}
# if there's no cache hit, restore a cache by this prefix
restore-prefixes-first-match: nix-${{ runner.os }}-
# collect garbage until Nix store size (in bytes) is at most this number
# before trying to save a new cache
gc-max-store-size-linux: 2073741824
# do purge caches
purge: true
# purge all versions of the cache
purge-prefixes: nix-${{ runner.os }}-
# created more than this number of seconds ago relative to the start of the `Post Restore` phase
purge-last-accessed: 86400
# except the version with the `primary-key`, if it exists
purge-primary-key: never
# always save the cache
save-always: true
- name: Enable Cachix binary cache
run: |
nix profile install nixpkgs#cachix
cachix use crane
cachix use nix-community
- name: Apply Nix binary cache configuration
run: |
sudo tee -a "${XDG_CONFIG_HOME:-$HOME/.config}/nix/nix.conf" > /dev/null <<EOF
extra-substituters = https://attic.kennel.juneis.dog/conduwuit https://attic.kennel.juneis.dog/conduit https://cache.lix.systems https://conduwuit.cachix.org https://aseipp-nix-cache.freetls.fastly.net
extra-trusted-public-keys = conduit:eEKoUwlQGDdYmAI/Q/0slVlegqh/QmAvQd7HBSm21Wk= conduwuit:BbycGUgTISsltcmH0qNjFR9dbrQNYgdIAcmViSGoVTE= cache.lix.systems:aBnZUw8zA7H35Cz2RyKFVs3H4PlGTLawyY5KRbvJR8o= conduwuit.cachix.org-1:MFRm6jcnfTf0jSAbmvLfhO3KBMt4px+1xaereWXp8Xg=
experimental-features = nix-command flakes
extra-experimental-features = nix-command flakes
accept-flake-config = true
EOF
- name: Use alternative Nix binary caches if specified
if: ${{ (env.ATTIC_ENDPOINT != '') && (env.ATTIC_PUBLIC_KEY != '') }}
run: |
sudo tee -a "${XDG_CONFIG_HOME:-$HOME/.config}/nix/nix.conf" > /dev/null <<EOF
extra-substituters = ${{ env.ATTIC_ENDPOINT }}
extra-trusted-public-keys = ${{ env.ATTIC_PUBLIC_KEY }}
EOF
- name: Prepare build environment
run: |
echo 'source $HOME/.nix-profile/share/nix-direnv/direnvrc' > "$HOME/.direnvrc"
nix profile install --inputs-from . nixpkgs#direnv nixpkgs#nix-direnv
direnv allow
nix develop --command true
- name: Cache CI dependencies
run: |
bin/nix-build-and-cache ci
- name: Run lychee and markdownlint
run: |
direnv exec . engage just lints lychee
direnv exec . engage just lints markdownlint
- name: Build documentation (book)
run: |
bin/nix-build-and-cache just .#book
cp -r --dereference result public
- name: Upload generated documentation (book) as normal artifact
uses: actions/upload-artifact@v4
with:
name: public
path: public
if-no-files-found: error
# don't compress again
compression-level: 0
- name: Upload generated documentation (book) as GitHub Pages artifact
if: (startsWith(github.ref, 'refs/tags/v') || github.ref == 'refs/heads/main') && (github.event_name != 'pull_request')
uses: actions/upload-pages-artifact@v3
with:
path: public
- name: Deploy to GitHub Pages
if: (startsWith(github.ref, 'refs/tags/v') || github.ref == 'refs/heads/main') && (github.event_name != 'pull_request')
id: deployment
uses: actions/deploy-pages@v4