Skip to content

Commit

Permalink
add toggle to enable grouping log by date
Browse files Browse the repository at this point in the history
  • Loading branch information
idanovinda committed Nov 3, 2023
1 parent d554db0 commit 9e03add
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 0 deletions.
3 changes: 3 additions & 0 deletions postgres-appliance/scripts/configure_spilo.py
Original file line number Diff line number Diff line change
Expand Up @@ -579,6 +579,7 @@ def get_placeholders(provider):
placeholders.setdefault('CLONE_TARGET_TIME', '')
placeholders.setdefault('CLONE_TARGET_INCLUSIVE', True)

placeholders.setdefault('LOG_GROUP_BY_DATE', False)
placeholders.setdefault('LOG_SHIP_SCHEDULE', '1 0 * * *')
placeholders.setdefault('LOG_S3_BUCKET', '')
placeholders.setdefault('LOG_S3_ENDPOINT', '')
Expand Down Expand Up @@ -758,6 +759,8 @@ def write_log_environment(placeholders):
log_env['LOG_AWS_REGION'] = aws_region

log_s3_key = 'spilo/{LOG_BUCKET_SCOPE_PREFIX}{SCOPE}{LOG_BUCKET_SCOPE_SUFFIX}/log/'.format(**log_env)
if os.getenv('LOG_GROUP_BY_DATE'):
log_s3_key += '{DATE}/'
log_s3_key += placeholders['instance_data']['id']
log_env['LOG_S3_KEY'] = log_s3_key

Expand Down
2 changes: 2 additions & 0 deletions postgres-appliance/scripts/upload_pg_log_to_s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,8 @@ def upload_to_s3(local_file_path):
bucket = s3.Bucket(bucket_name)

key_name = os.path.join(os.getenv('LOG_S3_KEY'), os.path.basename(local_file_path))
if os.getenv('LOG_GROUP_BY_DATE'):
key_name = key_name.format(**{'DATE': os.path.basename(local_file_path).split('.')[0]})

chunk_size = 52428800 # 50 MiB
config = TransferConfig(multipart_threshold=chunk_size, multipart_chunksize=chunk_size)
Expand Down

0 comments on commit 9e03add

Please sign in to comment.