Skip to content

Commit

Permalink
add extra tags for uploaded Postgres log files (#1024)
Browse files Browse the repository at this point in the history
  • Loading branch information
FxKu authored Sep 19, 2024
1 parent e463b8a commit fde34d4
Show file tree
Hide file tree
Showing 3 changed files with 12 additions and 2 deletions.
1 change: 1 addition & 0 deletions ENVIRONMENT.rst
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,7 @@ Environment Configuration Settings
- **AZURE_TENANT_ID**: (optional) Tenant ID of the Service Principal
- **CALLBACK_SCRIPT**: the callback script to run on various cluster actions (on start, on stop, on restart, on role change). The script will receive the cluster name, connection string and the current action. See `Patroni <http://patroni.readthedocs.io/en/latest/SETTINGS.html?highlight=callback#postgresql>`__ documentation for details.
- **LOG_S3_BUCKET**: path to the S3 bucket used for PostgreSQL daily log files (i.e. foobar, without `s3://` prefix). Spilo will add `/spilo/{LOG_BUCKET_SCOPE_PREFIX}{SCOPE}{LOG_BUCKET_SCOPE_SUFFIX}/log/` to that path. Logs are shipped if this variable is set.
- **LOG_S3_TAGS**: map of key value pairs to be used for tagging files uploaded to S3. Values should be referencing existing environment variables e.g. ``{"ClusterName": "SCOPE", "Namespace": "POD_NAMESPACE"}``
- **LOG_SHIP_SCHEDULE**: cron schedule for shipping compressed logs from ``pg_log`` (if this feature is enabled, '00 02 * * *' by default)
- **LOG_ENV_DIR**: directory to store environment variables necessary for log shipping
- **LOG_TMPDIR**: directory to store temporary compressed daily log files. PGROOT/../tmp by default.
Expand Down
9 changes: 8 additions & 1 deletion postgres-appliance/scripts/configure_spilo.py
Original file line number Diff line number Diff line change
Expand Up @@ -583,6 +583,7 @@ def get_placeholders(provider):
placeholders.setdefault('LOG_SHIP_SCHEDULE', '1 0 * * *')
placeholders.setdefault('LOG_S3_BUCKET', '')
placeholders.setdefault('LOG_S3_ENDPOINT', '')
placeholders.setdefault('LOG_S3_TAGS', '{}')
placeholders.setdefault('LOG_TMPDIR', os.path.abspath(os.path.join(placeholders['PGROOT'], '../tmp')))
placeholders.setdefault('LOG_BUCKET_SCOPE_SUFFIX', '')

Expand Down Expand Up @@ -771,7 +772,13 @@ def write_log_environment(placeholders):
if not os.path.exists(log_env['LOG_ENV_DIR']):
os.makedirs(log_env['LOG_ENV_DIR'])

for var in ('LOG_TMPDIR', 'LOG_AWS_REGION', 'LOG_S3_ENDPOINT', 'LOG_S3_KEY', 'LOG_S3_BUCKET', 'PGLOG'):
for var in ('LOG_TMPDIR',
'LOG_AWS_REGION',
'LOG_S3_ENDPOINT',
'LOG_S3_KEY',
'LOG_S3_BUCKET',
'LOG_S3_TAGS',
'PGLOG'):
write_file(log_env[var], os.path.join(log_env['LOG_ENV_DIR'], var), True)


Expand Down
4 changes: 3 additions & 1 deletion postgres-appliance/scripts/upload_pg_log_to_s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,9 +53,11 @@ def upload_to_s3(local_file_path):

chunk_size = 52428800 # 50 MiB
config = TransferConfig(multipart_threshold=chunk_size, multipart_chunksize=chunk_size)
tags = eval(os.getenv('LOG_S3_TAGS'))
s3_tags_str = "&".join(f"{key}={os.getenv(value)}" for key, value in tags.items())

try:
bucket.upload_file(local_file_path, key_name, Config=config)
bucket.upload_file(local_file_path, key_name, Config=config, ExtraArgs={'Tagging': s3_tags_str})
except S3UploadFailedError as e:
logger.exception('Failed to upload the %s to the bucket %s under the key %s. Exception: %r',
local_file_path, bucket_name, key_name, e)
Expand Down

0 comments on commit fde34d4

Please sign in to comment.