Skip to content

Release 2.12.0

Release 2.12.0 #9015

Workflow file for this run

# Test our extension sql scripts are following security best practices
name: pgspot
"on":
pull_request:
push:
branches:
- main
- prerelease_test
jobs:
pgspot:
runs-on: ubuntu-latest
env:
# time_bucket with offset is intentional without explicit search_path to allow for inlining
# policy_compression, policy_compression_execute, cagg_migrate_execute_plan and cagg_migrate
# do not have explicit search_path because this would prevent them doing transaction control
PGSPOT_OPTS: --proc-without-search-path
'extschema.time_bucket(bucket_width interval,ts timestamp,"offset" interval)'
--proc-without-search-path 'extschema.time_bucket(bucket_width interval,ts timestamptz,"offset" interval)'
--proc-without-search-path 'extschema.time_bucket(bucket_width interval,ts date,"offset" interval)'
--proc-without-search-path 'extschema.recompress_chunk(chunk regclass,if_not_compressed boolean)'
--proc-without-search-path '_timescaledb_internal.policy_compression(job_id integer,config jsonb)'
--proc-without-search-path '_timescaledb_functions.policy_compression(job_id integer,config jsonb)'
--proc-without-search-path
'_timescaledb_internal.policy_compression_execute(job_id integer,htid integer,lag anyelement,maxchunks integer,verbose_log boolean,recompress_enabled boolean)'
--proc-without-search-path
'_timescaledb_functions.policy_compression_execute(job_id integer,htid integer,lag anyelement,maxchunks integer,verbose_log boolean,recompress_enabled boolean)'
--proc-without-search-path
'_timescaledb_internal.cagg_migrate_execute_plan(_cagg_data _timescaledb_catalog.continuous_agg)'
--proc-without-search-path
'_timescaledb_functions.cagg_migrate_execute_plan(_cagg_data _timescaledb_catalog.continuous_agg)'
--proc-without-search-path 'extschema.cagg_migrate(cagg regclass,override boolean,drop_old boolean)'
steps:
- name: Setup python 3.10
uses: actions/setup-python@v4
with:
python-version: '3.10'
- name: Checkout timescaledb
uses: actions/checkout@v3
- name: Install pgspot
run: |
python -m pip install pgspot==0.6.0
- name: Build timescaledb sqlfiles
run: |
downgrade_to=$(grep '^downgrade_to_version = ' version.config | sed -e 's!^[^=]\+ = !!')
git fetch --tags
./bootstrap -DGENERATE_DOWNGRADE_SCRIPT=ON
# We use downgrade_to_version instead of update_from_version because
# when the release PR for a new version has been merged to main but
# the version is not tagged yet update_from_version will not exist yet.
# In all other situations update_from_version and downgrade_to_version
# should point to the same version.
git checkout ${downgrade_to}
make -C build sqlfile sqlupdatescripts
git checkout ${GITHUB_SHA}
make -C build sqlfile sqlupdatescripts
ls -la build/sql/timescaledb--*.sql
- name: Run pgspot
run: |
version=$(grep '^version = ' version.config | sed -e 's!^[^=]\+ = !!')
downgrade_to=$(grep '^downgrade_to_version = ' version.config | sed -e 's!^[^=]\+ = !!')
# Show files used
ls -la build/sql/timescaledb--${version}.sql build/sql/timescaledb--${downgrade_to}--${version}.sql \
build/sql/timescaledb--${version}--${downgrade_to}.sql
# The next pgspot execution tests the installation script by itself
pgspot ${{ env.PGSPOT_OPTS }} build/sql/timescaledb--${version}.sql
# The next pgspot execution tests the update script to the latest version
# we prepend the installation script here so pgspot can correctly keep track of created objects
pgspot ${{ env.PGSPOT_OPTS }} -a build/sql/timescaledb--${downgrade_to}.sql \
build/sql/timescaledb--${downgrade_to}--${version}.sql
# The next pgspot execution tests the downgrade script to the previous version
# we prepend the installation script here so pgspot can correctly keep track of created objects
pgspot ${{ env.PGSPOT_OPTS }} -a build/sql/timescaledb--${version}.sql \
build/sql/timescaledb--${version}--${downgrade_to}.sql