From f6a9c6bcf5cdca3f7882a6526bc586797ee6fc57 Mon Sep 17 00:00:00 2001 From: Felix Kunde Date: Wed, 9 Oct 2024 11:36:32 +0200 Subject: [PATCH 1/3] fill s3 tags before copying to log env --- postgres-appliance/scripts/configure_spilo.py | 3 +++ postgres-appliance/scripts/upload_pg_log_to_s3.py | 5 ++--- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/postgres-appliance/scripts/configure_spilo.py b/postgres-appliance/scripts/configure_spilo.py index fa2f3ca9..4ec029de 100755 --- a/postgres-appliance/scripts/configure_spilo.py +++ b/postgres-appliance/scripts/configure_spilo.py @@ -787,6 +787,9 @@ def write_log_environment(placeholders): if not os.path.exists(log_env['LOG_ENV_DIR']): os.makedirs(log_env['LOG_ENV_DIR']) + tags = eval(os.getenv('LOG_S3_TAGS')) + log_env['LOG_S3_TAGS'] = "&".join(f"{key}={os.getenv(value)}" for key, value in tags.items()) + for var in ('LOG_TMPDIR', 'LOG_SHIP_HOURLY', 'LOG_AWS_REGION', diff --git a/postgres-appliance/scripts/upload_pg_log_to_s3.py b/postgres-appliance/scripts/upload_pg_log_to_s3.py index 39966c35..c008ee48 100755 --- a/postgres-appliance/scripts/upload_pg_log_to_s3.py +++ b/postgres-appliance/scripts/upload_pg_log_to_s3.py @@ -63,11 +63,10 @@ def upload_to_s3(local_file_path): chunk_size = 52428800 # 50 MiB config = TransferConfig(multipart_threshold=chunk_size, multipart_chunksize=chunk_size) - tags = eval(os.getenv('LOG_S3_TAGS')) - s3_tags_str = "&".join(f"{key}={os.getenv(value)}" for key, value in tags.items()) try: - bucket.upload_file(local_file_path, key_name, Config=config, ExtraArgs={'Tagging': s3_tags_str}) + bucket.upload_file(local_file_path, key_name, Config=config, + ExtraArgs={'Tagging': eval(os.getenv('LOG_S3_TAGS'))}) except S3UploadFailedError as e: logger.exception('Failed to upload the %s to the bucket %s under the key %s. Exception: %r', local_file_path, bucket_name, key_name, e) From a0f06ef1957fa9f8f32da2d8f00c6924c2be7311 Mon Sep 17 00:00:00 2001 From: Felix Kunde Date: Wed, 9 Oct 2024 12:50:37 +0200 Subject: [PATCH 2/3] use json.load instead of eval --- postgres-appliance/scripts/configure_spilo.py | 2 +- postgres-appliance/scripts/upload_pg_log_to_s3.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/postgres-appliance/scripts/configure_spilo.py b/postgres-appliance/scripts/configure_spilo.py index 4ec029de..970ba7b7 100755 --- a/postgres-appliance/scripts/configure_spilo.py +++ b/postgres-appliance/scripts/configure_spilo.py @@ -787,7 +787,7 @@ def write_log_environment(placeholders): if not os.path.exists(log_env['LOG_ENV_DIR']): os.makedirs(log_env['LOG_ENV_DIR']) - tags = eval(os.getenv('LOG_S3_TAGS')) + tags = json.loads(os.getenv('LOG_S3_TAGS')) log_env['LOG_S3_TAGS'] = "&".join(f"{key}={os.getenv(value)}" for key, value in tags.items()) for var in ('LOG_TMPDIR', diff --git a/postgres-appliance/scripts/upload_pg_log_to_s3.py b/postgres-appliance/scripts/upload_pg_log_to_s3.py index c008ee48..00cdb44d 100755 --- a/postgres-appliance/scripts/upload_pg_log_to_s3.py +++ b/postgres-appliance/scripts/upload_pg_log_to_s3.py @@ -3,6 +3,7 @@ import boto3 import os +import json import logging import subprocess import sys @@ -66,7 +67,7 @@ def upload_to_s3(local_file_path): try: bucket.upload_file(local_file_path, key_name, Config=config, - ExtraArgs={'Tagging': eval(os.getenv('LOG_S3_TAGS'))}) + ExtraArgs={'Tagging': json.loads(os.getenv('LOG_S3_TAGS'))}) except S3UploadFailedError as e: logger.exception('Failed to upload the %s to the bucket %s under the key %s. Exception: %r', local_file_path, bucket_name, key_name, e) From e20fd7b89718ee3c66b5e8e54fe331a56905c684 Mon Sep 17 00:00:00 2001 From: Felix Kunde Date: Wed, 9 Oct 2024 14:21:59 +0200 Subject: [PATCH 3/3] remove json usage in upload script --- postgres-appliance/scripts/upload_pg_log_to_s3.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/postgres-appliance/scripts/upload_pg_log_to_s3.py b/postgres-appliance/scripts/upload_pg_log_to_s3.py index 00cdb44d..8f7df9d8 100755 --- a/postgres-appliance/scripts/upload_pg_log_to_s3.py +++ b/postgres-appliance/scripts/upload_pg_log_to_s3.py @@ -3,7 +3,6 @@ import boto3 import os -import json import logging import subprocess import sys @@ -66,8 +65,7 @@ def upload_to_s3(local_file_path): config = TransferConfig(multipart_threshold=chunk_size, multipart_chunksize=chunk_size) try: - bucket.upload_file(local_file_path, key_name, Config=config, - ExtraArgs={'Tagging': json.loads(os.getenv('LOG_S3_TAGS'))}) + bucket.upload_file(local_file_path, key_name, Config=config, ExtraArgs={'Tagging': os.getenv('LOG_S3_TAGS')}) except S3UploadFailedError as e: logger.exception('Failed to upload the %s to the bucket %s under the key %s. Exception: %r', local_file_path, bucket_name, key_name, e)