Skip to content

Commit

Permalink
Merge pull request #313 from aws-solutions/release/v1.4.1
Browse files Browse the repository at this point in the history
Updated to v1.4.1
  • Loading branch information
aijunpeng authored May 12, 2022
2 parents d9208dd + 65803bb commit d00d80b
Show file tree
Hide file tree
Showing 2 changed files with 20 additions and 54 deletions.
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,10 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [1.4.1] - 2022-05-12
### Fixed
- Replaced the DescribeLogStreams API call used for getting the next sequence token with PutLogEvents API call to reduce the lambda execution time [#307](https://github.com/awslabs/aws-instance-scheduler/issues/307)

## [1.4.0] - 2021-04-26
### Added
- Enable solution to be deployed as mutliple stacks in the same account/region
Expand Down
70 changes: 16 additions & 54 deletions source/lambda/util/logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,7 @@ def clear(self):
@property
def client(self):
if self._client is None:
methods = ["describe_log_streams", "create_log_stream"]
methods = ["create_log_stream"]
self._client = boto_retry.get_client_with_retries("logs", methods, context=self._context)
return self._client

Expand All @@ -189,33 +189,6 @@ def flush(self):
Writes all buffered messages to CloudWatch Stream
:return:
"""
def get_next_log_token():

if self._log_sequence_token:
return self._log_sequence_token

resp = self.client.describe_log_streams_with_retries(logGroupName=self._loggroup, logStreamNamePrefix=self._logstream)
if "logStreams" in resp and len(resp["logStreams"]) > 0:
token = resp["logStreams"][0].get("uploadSequenceToken")
return token
try:
self.client.create_log_stream_with_retries(logGroupName=self._loggroup, logStreamName=self._logstream)
resp = self.client.describe_log_streams_with_retries(logGroupName=self._loggroup,
logStreamNamePrefix=self._logstream)
except Exception as e:
if type(e).__name__ != "ResourceAlreadyExistsException":
raise e

retry_get_token_count = 0
while True:
if "logStreams" in resp and len(resp["logStreams"]) > 0:
token = resp["logStreams"][0].get("uploadSequenceToken")
return token
else:
retry_get_token_count += 1
if retry_get_token_count > 3:
return None
time.sleep(1)

if len(self._buffer) == 0:
return
Expand All @@ -226,29 +199,18 @@ def get_next_log_token():
"logEvents": [{"timestamp": r[0], "message": r[1]} for r in self._buffer]
}

next_token = None
try:
retries = 0
while True:
next_token = get_next_log_token()
if next_token is not None:
put_event_args["sequenceToken"] = next_token
try:
log_event_response = self.client.put_log_events(**put_event_args)
self._log_sequence_token = log_event_response['nextSequenceToken']
self._buffer = []
self._cached_size = 0
return
except Exception as ex:
retries += 1
if retries > 5:
raise ex
time.sleep(2)
next_token = get_next_log_token()
if next_token is not None:
put_event_args["sequenceToken"] = next_token

except Exception as ex:
print("Error writing to logstream {} with token {} ({})".format(self._logstream, next_token, str(ex)))
for entry in self._buffer:
print (entry)
retries = 5
while retries > 0:
try:
self.client.put_log_events(**put_event_args)
self._buffer = []
self._cached_size = 0
return
except self.client.exceptions.ResourceNotFoundException:
retries -= 1
self.client.create_log_stream_with_retries(logGroupName=self._loggroup, logStreamName=self._logstream)
except self.client.exceptions.InvalidSequenceTokenException as ex:
retries -= 1
put_event_args["sequenceToken"] = ex.response.get("expectedSequenceToken")
except Exception:
return

0 comments on commit d00d80b

Please sign in to comment.