From f3c026e89deb0b52c46b46201763e10e7d508503 Mon Sep 17 00:00:00 2001 From: Felix Kunde Date: Thu, 12 Sep 2024 17:48:35 +0200 Subject: [PATCH] simplify how to configure hourly logging --- ENVIRONMENT.rst | 3 ++- postgres-appliance/launch.sh | 2 +- postgres-appliance/scripts/configure_spilo.py | 20 +++++++------------ postgres-appliance/scripts/post_init.sh | 2 +- 4 files changed, 11 insertions(+), 16 deletions(-) diff --git a/ENVIRONMENT.rst b/ENVIRONMENT.rst index 6875fa68b..da59d1673 100644 --- a/ENVIRONMENT.rst +++ b/ENVIRONMENT.rst @@ -90,7 +90,8 @@ Environment Configuration Settings - **AZURE_TENANT_ID**: (optional) Tenant ID of the Service Principal - **CALLBACK_SCRIPT**: the callback script to run on various cluster actions (on start, on stop, on restart, on role change). The script will receive the cluster name, connection string and the current action. See `Patroni `__ documentation for details. - **LOG_S3_BUCKET**: path to the S3 bucket used for PostgreSQL daily log files (i.e. foobar, without `s3://` prefix). Spilo will add `/spilo/{LOG_BUCKET_SCOPE_PREFIX}{SCOPE}{LOG_BUCKET_SCOPE_SUFFIX}/log/` to that path. Logs are shipped if this variable is set. -- **LOG_SHIP_SCHEDULE**: cron schedule for shipping compressed logs from ``pg_log`` (if this feature is enabled, '00 02 * * *' by default) +- **LOG_SHIP_HOURLY**: if true, log rotation in Postgres is set to 1h incl. foreign tables for every hour (schedule `1 */1 * * *`) +- **LOG_SHIP_SCHEDULE**: cron schedule for shipping compressed logs from ``pg_log`` (``1 0 * * *`` by default) - **LOG_ENV_DIR**: directory to store environment variables necessary for log shipping - **LOG_TMPDIR**: directory to store temporary compressed daily log files. PGROOT/../tmp by default. - **LOG_S3_ENDPOINT**: (optional) S3 Endpoint to use with Boto3 diff --git a/postgres-appliance/launch.sh b/postgres-appliance/launch.sh index 4b552a0eb..a0adc213f 100755 --- a/postgres-appliance/launch.sh +++ b/postgres-appliance/launch.sh @@ -34,7 +34,7 @@ fi ## Ensure all logfiles exist, most appliances will have ## a foreign data wrapper pointing to these files for i in $(seq 0 7); do - if [ "$LOG_SHIP_HOURLY" != 'true' ]; then + if [ "$LOG_SHIP_HOURLY" != "true" ]; then if [ ! -f "${PGLOG}/postgresql-${i}.csv" ]; then touch "${PGLOG}/postgresql-${i}.csv" fi diff --git a/postgres-appliance/scripts/configure_spilo.py b/postgres-appliance/scripts/configure_spilo.py index f084209f4..7edae6a44 100755 --- a/postgres-appliance/scripts/configure_spilo.py +++ b/postgres-appliance/scripts/configure_spilo.py @@ -773,10 +773,6 @@ def write_log_environment(placeholders): if os.getenv('LOG_GROUP_BY_DATE'): log_s3_key += '{DATE}/' - log_schedule = os.getenv('LOG_SHIP_SCHEDULE') - if '/' in log_schedule.split()[1]: - log_env['LOG_SHIP_HOURLY'] = 'true' - log_s3_key += placeholders['instance_data']['id'] log_env['LOG_S3_KEY'] = log_s3_key @@ -787,13 +783,7 @@ def write_log_environment(placeholders): if not os.path.exists(log_env['LOG_ENV_DIR']): os.makedirs(log_env['LOG_ENV_DIR']) - for var in ('LOG_TMPDIR', - 'LOG_AWS_REGION', - 'LOG_S3_ENDPOINT', - 'LOG_S3_KEY', - 'LOG_S3_BUCKET', - 'LOG_SHIP_HOURLY', - 'PGLOG'): + for var in ('LOG_TMPDIR', 'LOG_AWS_REGION', 'LOG_S3_ENDPOINT', 'LOG_S3_KEY', 'LOG_S3_BUCKET', 'PGLOG'): write_file(log_env[var], os.path.join(log_env['LOG_ENV_DIR'], var), True) @@ -1023,8 +1013,12 @@ def write_crontab(placeholders, overwrite): ' "{PGDATA}"').format(**placeholders)] if bool(placeholders.get('LOG_S3_BUCKET')): - lines += [('{LOG_SHIP_SCHEDULE} nice -n 5 envdir "{LOG_ENV_DIR}"' + - ' /scripts/upload_pg_log_to_s3.py').format(**placeholders)] + log_dir = placeholders.get('LOG_ENV_DIR') + schedule = placeholders.get('LOG_SHIP_SCHEDULE') + if placeholders.get('LOG_SHIP_HOURLY') == 'true': + schedule = '1 */1 * * *' + lines += [('{0} nice -n 5 envdir "{1}"' + + ' /scripts/upload_pg_log_to_s3.py').format(schedule, log_dir)] lines += yaml.safe_load(placeholders['CRONTAB']) diff --git a/postgres-appliance/scripts/post_init.sh b/postgres-appliance/scripts/post_init.sh index b98931634..cedf39463 100755 --- a/postgres-appliance/scripts/post_init.sh +++ b/postgres-appliance/scripts/post_init.sh @@ -148,7 +148,7 @@ fi # Sunday could be 0 or 7 depending on the format, we just create both for i in $(seq 0 7); do - if [ "$LOG_SHIP_HOURLY" != 'true' ]; then + if [ "$LOG_SHIP_HOURLY" != "true" ]; then echo "CREATE FOREIGN TABLE IF NOT EXISTS public.postgres_log_${i} () INHERITS (public.postgres_log) SERVER pglog OPTIONS (filename '../pg_log/postgresql-${i}.csv', format 'csv', header 'false'); GRANT SELECT ON public.postgres_log_${i} TO admin;"