Skip to content

Commit

Permalink
simplify how to configure hourly logging
Browse files Browse the repository at this point in the history
  • Loading branch information
FxKu committed Sep 12, 2024
1 parent dd9c56a commit f3c026e
Show file tree
Hide file tree
Showing 4 changed files with 11 additions and 16 deletions.
3 changes: 2 additions & 1 deletion ENVIRONMENT.rst
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,8 @@ Environment Configuration Settings
- **AZURE_TENANT_ID**: (optional) Tenant ID of the Service Principal
- **CALLBACK_SCRIPT**: the callback script to run on various cluster actions (on start, on stop, on restart, on role change). The script will receive the cluster name, connection string and the current action. See `Patroni <http://patroni.readthedocs.io/en/latest/SETTINGS.html?highlight=callback#postgresql>`__ documentation for details.
- **LOG_S3_BUCKET**: path to the S3 bucket used for PostgreSQL daily log files (i.e. foobar, without `s3://` prefix). Spilo will add `/spilo/{LOG_BUCKET_SCOPE_PREFIX}{SCOPE}{LOG_BUCKET_SCOPE_SUFFIX}/log/` to that path. Logs are shipped if this variable is set.
- **LOG_SHIP_SCHEDULE**: cron schedule for shipping compressed logs from ``pg_log`` (if this feature is enabled, '00 02 * * *' by default)
- **LOG_SHIP_HOURLY**: if true, log rotation in Postgres is set to 1h incl. foreign tables for every hour (schedule `1 */1 * * *`)
- **LOG_SHIP_SCHEDULE**: cron schedule for shipping compressed logs from ``pg_log`` (``1 0 * * *`` by default)
- **LOG_ENV_DIR**: directory to store environment variables necessary for log shipping
- **LOG_TMPDIR**: directory to store temporary compressed daily log files. PGROOT/../tmp by default.
- **LOG_S3_ENDPOINT**: (optional) S3 Endpoint to use with Boto3
Expand Down
2 changes: 1 addition & 1 deletion postgres-appliance/launch.sh
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ fi
## Ensure all logfiles exist, most appliances will have
## a foreign data wrapper pointing to these files
for i in $(seq 0 7); do
if [ "$LOG_SHIP_HOURLY" != 'true' ]; then
if [ "$LOG_SHIP_HOURLY" != "true" ]; then
if [ ! -f "${PGLOG}/postgresql-${i}.csv" ]; then
touch "${PGLOG}/postgresql-${i}.csv"
fi
Expand Down
20 changes: 7 additions & 13 deletions postgres-appliance/scripts/configure_spilo.py
Original file line number Diff line number Diff line change
Expand Up @@ -773,10 +773,6 @@ def write_log_environment(placeholders):
if os.getenv('LOG_GROUP_BY_DATE'):
log_s3_key += '{DATE}/'

log_schedule = os.getenv('LOG_SHIP_SCHEDULE')
if '/' in log_schedule.split()[1]:
log_env['LOG_SHIP_HOURLY'] = 'true'

log_s3_key += placeholders['instance_data']['id']
log_env['LOG_S3_KEY'] = log_s3_key

Expand All @@ -787,13 +783,7 @@ def write_log_environment(placeholders):
if not os.path.exists(log_env['LOG_ENV_DIR']):
os.makedirs(log_env['LOG_ENV_DIR'])

for var in ('LOG_TMPDIR',
'LOG_AWS_REGION',
'LOG_S3_ENDPOINT',
'LOG_S3_KEY',
'LOG_S3_BUCKET',
'LOG_SHIP_HOURLY',
'PGLOG'):
for var in ('LOG_TMPDIR', 'LOG_AWS_REGION', 'LOG_S3_ENDPOINT', 'LOG_S3_KEY', 'LOG_S3_BUCKET', 'PGLOG'):
write_file(log_env[var], os.path.join(log_env['LOG_ENV_DIR'], var), True)


Expand Down Expand Up @@ -1023,8 +1013,12 @@ def write_crontab(placeholders, overwrite):
' "{PGDATA}"').format(**placeholders)]

if bool(placeholders.get('LOG_S3_BUCKET')):
lines += [('{LOG_SHIP_SCHEDULE} nice -n 5 envdir "{LOG_ENV_DIR}"' +
' /scripts/upload_pg_log_to_s3.py').format(**placeholders)]
log_dir = placeholders.get('LOG_ENV_DIR')
schedule = placeholders.get('LOG_SHIP_SCHEDULE')
if placeholders.get('LOG_SHIP_HOURLY') == 'true':
schedule = '1 */1 * * *'
lines += [('{0} nice -n 5 envdir "{1}"' +
' /scripts/upload_pg_log_to_s3.py').format(schedule, log_dir)]

lines += yaml.safe_load(placeholders['CRONTAB'])

Expand Down
2 changes: 1 addition & 1 deletion postgres-appliance/scripts/post_init.sh
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ fi

# Sunday could be 0 or 7 depending on the format, we just create both
for i in $(seq 0 7); do
if [ "$LOG_SHIP_HOURLY" != 'true' ]; then
if [ "$LOG_SHIP_HOURLY" != "true" ]; then
echo "CREATE FOREIGN TABLE IF NOT EXISTS public.postgres_log_${i} () INHERITS (public.postgres_log) SERVER pglog
OPTIONS (filename '../pg_log/postgresql-${i}.csv', format 'csv', header 'false');
GRANT SELECT ON public.postgres_log_${i} TO admin;"
Expand Down

0 comments on commit f3c026e

Please sign in to comment.