diff --git a/postgres-appliance/scripts/configure_spilo.py b/postgres-appliance/scripts/configure_spilo.py index fa2f3ca9..970ba7b7 100755 --- a/postgres-appliance/scripts/configure_spilo.py +++ b/postgres-appliance/scripts/configure_spilo.py @@ -787,6 +787,9 @@ def write_log_environment(placeholders): if not os.path.exists(log_env['LOG_ENV_DIR']): os.makedirs(log_env['LOG_ENV_DIR']) + tags = json.loads(os.getenv('LOG_S3_TAGS')) + log_env['LOG_S3_TAGS'] = "&".join(f"{key}={os.getenv(value)}" for key, value in tags.items()) + for var in ('LOG_TMPDIR', 'LOG_SHIP_HOURLY', 'LOG_AWS_REGION', diff --git a/postgres-appliance/scripts/upload_pg_log_to_s3.py b/postgres-appliance/scripts/upload_pg_log_to_s3.py index 39966c35..8f7df9d8 100755 --- a/postgres-appliance/scripts/upload_pg_log_to_s3.py +++ b/postgres-appliance/scripts/upload_pg_log_to_s3.py @@ -63,11 +63,9 @@ def upload_to_s3(local_file_path): chunk_size = 52428800 # 50 MiB config = TransferConfig(multipart_threshold=chunk_size, multipart_chunksize=chunk_size) - tags = eval(os.getenv('LOG_S3_TAGS')) - s3_tags_str = "&".join(f"{key}={os.getenv(value)}" for key, value in tags.items()) try: - bucket.upload_file(local_file_path, key_name, Config=config, ExtraArgs={'Tagging': s3_tags_str}) + bucket.upload_file(local_file_path, key_name, Config=config, ExtraArgs={'Tagging': os.getenv('LOG_S3_TAGS')}) except S3UploadFailedError as e: logger.exception('Failed to upload the %s to the bucket %s under the key %s. Exception: %r', local_file_path, bucket_name, key_name, e)