Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[AWS logs forwarder] Addressing closed PRs #900

Merged
merged 4 commits into from
Feb 11, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 2 additions & 12 deletions aws/logs_monitoring/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,6 @@ For more information about sending AWS services logs with the Datadog Forwarder,

Datadog recommends using [CloudFormation](#cloudformation) to automatically install the Forwarder. You can also complete the setup process using [Terraform](#terraform) or [manually](#manual). Once installed, you can subscribe the Forwarder to log sources such as S3 buckets or CloudWatch log groups by [setting up triggers][4].

{{< tabs >}}
{{% tab "CloudFormation" %}}

### CloudFormation

Expand All @@ -46,8 +44,6 @@ If you had previously enabled your AWS Integration using the [following CloudFor
[101]: https://docs.datadoghq.com/logs/guide/send-aws-services-logs-with-the-datadog-lambda-function/#set-up-triggers
[102]: https://github.com/DataDog/cloudformation-template/tree/master/aws

{{% /tab %}}
{{% tab "Terraform" %}}

### Terraform

Expand Down Expand Up @@ -91,8 +87,8 @@ resource "aws_cloudformation_stack" "datadog_forwarder" {
name = "datadog-forwarder"
capabilities = ["CAPABILITY_IAM", "CAPABILITY_NAMED_IAM", "CAPABILITY_AUTO_EXPAND"]
parameters = {
DdApiKeySecretArn = "REPLACE ME WITH THE SECRETS ARN",
DdSite = "<SITE>",
DdApiKeySecretArn = "REPLACE WITH DATADOG SECRETS ARN",
DdSite = "REPLACE WITH DATADOG SITE",
FunctionName = "datadog-forwarder"
}
template_url = "https://datadog-cloudformation-template.s3.amazonaws.com/aws/forwarder/latest.yaml"
Expand All @@ -106,9 +102,6 @@ resource "aws_cloudformation_stack" "datadog_forwarder" {
[103]: https://docs.datadoghq.com/logs/guide/send-aws-services-logs-with-the-datadog-lambda-function/#set-up-triggers
[104]: https://docs.datadoghq.com/getting_started/site/#access-the-datadog-site

{{% /tab %}}
{{% tab "Manual" %}}

### Manual

If you can't install the Forwarder using the provided CloudFormation template, you can install the Forwarder manually following the steps below. Feel free to open an issue or pull request to let us know if there is anything we can improve to make the template work for you.
Expand All @@ -131,9 +124,6 @@ aws lambda invoke --function-name <function-name> --payload '{"retry":"true"}' o
[103]: https://github.com/DataDog/datadog-serverless-functions/blob/029bd46e5c6d4e8b1ae647ed3b4d1917ac3cd793/aws/logs_monitoring/template.yaml#L680
[104]: https://docs.datadoghq.com/logs/guide/send-aws-services-logs-with-the-datadog-lambda-function/?tab=awsconsole#set-up-triggers

{{% /tab %}}
{{< /tabs >}}

### Upgrade to a new version

1. Find the [datadog-forwarder (if you didn't rename it)][5] CloudFormation stack. If you installed the Forwarder as part of the [Datadog AWS integration stack][6], make sure to update the nested Forwarder stack instead of the root stack.
Expand Down
22 changes: 14 additions & 8 deletions aws/logs_monitoring/steps/enums.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ class AwsEventSource(Enum):
APIGATEWAY = "apigateway"
APPSYNC = "appsync"
AWS = "aws"
BATCH = "batch"
BEDROCK = "bedrock"
CARBONBLACK = "carbonblack"
CLOUDFRONT = "cloudfront"
Expand All @@ -30,6 +31,7 @@ class AwsEventSource(Enum):
MSK = "msk"
MYSQL = "mysql"
NETWORKFIREWALL = "network-firewall"
OPENSEARCH = "opensearch"
POSTGRESQL = "postgresql"
REDSHIFT = "redshift"
ROUTE53 = "route53"
Expand All @@ -48,17 +50,17 @@ def __str__(self):
@staticmethod
def cloudwatch_sources():
return [
AwsEventSource.NETWORKFIREWALL,
AwsEventSource.ROUTE53,
AwsEventSource.VPC,
AwsEventSource.FARGATE,
AwsEventSource.BEDROCK,
AwsEventSource.CLOUDFRONT,
AwsEventSource.CLOUDTRAIL,
AwsEventSource.MSK,
AwsEventSource.ELASTICSEARCH,
AwsEventSource.FARGATE,
AwsEventSource.MSK,
AwsEventSource.NETWORKFIREWALL,
AwsEventSource.ROUTE53,
AwsEventSource.TRANSITGATEWAY,
AwsEventSource.VERIFIED_ACCESS,
AwsEventSource.BEDROCK,
AwsEventSource.CLOUDFRONT,
AwsEventSource.VPC,
]


Expand All @@ -82,6 +84,7 @@ def __init__(self, string, event_source):
DOCDB = ("amazon_documentdb", AwsEventSource.DOCDB)
# e.g. AWSLogs/123456779121/elasticloadbalancing/us-east-1/2020/10/02/123456779121_elasticloadbalancing_us-east-1_app.alb.xxxxx.xx.xxx.xxx_x.log.gz
ELB = ("elasticloadbalancing", AwsEventSource.ELB)
GUARDDUTY = ("guardduty", AwsEventSource.GUARDDUTY)
KINESIS = ("amazon_kinesis", AwsEventSource.KINESIS)
MSK = ("amazon_msk", AwsEventSource.MSK)
NETWORKFIREWALL = ("network-firewall", AwsEventSource.NETWORKFIREWALL)
Expand Down Expand Up @@ -113,6 +116,7 @@ def __init__(self, string, event_source):
APIGATEWAY_3 = ("/aws/apigateway", AwsEventSource.APIGATEWAY)
# e.g. /aws/appsync/yourApiId
APPSYNC = ("/aws/appsync", AwsEventSource.APPSYNC)
BATCH = ("/aws/batch/job", AwsEventSource.BATCH)
BEDROCK = ("aws/bedrock/modelinvocations", AwsEventSource.BEDROCK)
# e.g. /aws/codebuild/my-project
CODEBUILD = ("/aws/codebuild", AwsEventSource.CODEBUILD)
Expand All @@ -128,7 +132,9 @@ def __init__(self, string, event_source):
# e.g. /aws/kinesisfirehose/dev
KINESIS = ("/aws/kinesis", AwsEventSource.KINESIS)
# e.g. /aws/lambda/helloDatadog
lAMBDA = ("/aws/lambda", AwsEventSource.LAMBDA)
LAMBDA = ("/aws/lambda", AwsEventSource.LAMBDA)
# e.g. /aws/opensearchservice/domains/my-cluster
OPENSEARCH = ("/aws/opensearchservice/domains/", AwsEventSource.OPENSEARCH)
# e.g. sns/us-east-1/123456779121/SnsTopicX
SNS = ("sns/", AwsEventSource.SNS)
SSM = ("/aws/ssm/", AwsEventSource.SSM)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
},
"ddsource": "lambda",
"ddsourcecategory": "aws",
"ddtags": "forwardername:function_name,forwarder_memorysize:10,forwarder_version:4.0.2,env:none",
"ddtags": "forwardername:function_name,forwarder_memorysize:10,forwarder_version:<redacted>,env:none",
"host": "/aws/lambda/test-lambda-default-log-group",
"id": "37199773595581154154810589279545129148442535997644275712",
"lambda": {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
},
"ddsource": "cloudwatch",
"ddsourcecategory": "aws",
"ddtags": "forwardername:function_name,forwarder_memorysize:10,forwarder_version:4.0.2,test_tag_key:test_tag_value",
"ddtags": "forwardername:function_name,forwarder_memorysize:10,forwarder_version:<redacted>,test_tag_key:test_tag_value",
"host": "/aws/rds/instance/datadog/postgresql",
"id": "31953106606966983378809025079804211143289615424298221568",
"message": "2021-01-02 03:04:05 UTC::@:[5306]:LOG: database system is ready to accept connections",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
},
"ddsource": "stepfunction",
"ddsourcecategory": "aws",
"ddtags": "forwardername:function_name,forwarder_memorysize:10,forwarder_version:4.0.2,test_tag_key:test_tag_value,dd_step_functions_trace_enabled:true",
"ddtags": "forwardername:function_name,forwarder_memorysize:10,forwarder_version:<redacted>,test_tag_key:test_tag_value,dd_step_functions_trace_enabled:true",
"host": "arn:aws:states:us-east-1:12345678910:stateMachine:StepFunction2",
"id": "37199773595581154154810589279545129148442535997644275712",
"message": "{\"id\": \"1\",\"type\": \"ExecutionStarted\",\"details\": {\"input\": \"{}\",\"inputDetails\": {\"truncated\": \"false\"},\"roleArn\": \"arn:aws:iam::12345678910:role/service-role/StepFunctions-test-role-a0iurr4pt\"},\"previous_event_id\": \"0\",\"event_timestamp\": \"1716992192441\",\"execution_arn\": \"arn:aws:states:us-east-1:12345678910:execution:StepFunction2:ccccccc-d1da-4c38-b32c-2b6b07d713fa\",\"redrive_count\": \"0\"}",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
},
"ddsource": "stepfunction",
"ddsourcecategory": "aws",
"ddtags": "forwardername:function_name,forwarder_memorysize:10,forwarder_version:4.0.2,test_tag_key:test_tag_value,dd_step_functions_trace_enabled:true",
"ddtags": "forwardername:function_name,forwarder_memorysize:10,forwarder_version:<redacted>,test_tag_key:test_tag_value,dd_step_functions_trace_enabled:true",
"host": "arn:aws:states:us-east-1:12345678910:stateMachine:StepFunction1",
"id": "37199773595581154154810589279545129148442535997644275712",
"message": "{\"id\": \"1\",\"type\": \"ExecutionStarted\",\"details\": {\"input\": \"{}\",\"inputDetails\": {\"truncated\": \"false\"},\"roleArn\": \"arn:aws:iam::12345678910:role/service-role/StepFunctions-test-role-a0iurr4pt\"},\"previous_event_id\": \"0\",\"event_timestamp\": \"1716992192441\",\"execution_arn\": \"arn:aws:states:us-east-1:12345678910:execution:StepFunction1:ccccccc-d1da-4c38-b32c-2b6b07d713fa\",\"redrive_count\": \"0\"}",
Expand Down
29 changes: 24 additions & 5 deletions aws/logs_monitoring/tests/test_awslogs_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@
import unittest
import sys
from unittest.mock import patch, MagicMock
from approvaltests.approvals import verify_as_json
from approvaltests.approvals import Options, verify_as_json
from approvaltests.scrubbers import create_regex_scrubber

sys.modules["trace_forwarder.connection"] = MagicMock()
sys.modules["datadog_lambda.wrapper"] = MagicMock()
Expand Down Expand Up @@ -36,6 +37,12 @@ class Context:


class TestAWSLogsHandler(unittest.TestCase):
def setUp(self):
self.scrubber = create_regex_scrubber(
r"forwarder_version:\d+\.\d+\.\d+",
"forwarder_version:<redacted>",
)

@patch("caching.cloudwatch_log_group_cache.CloudwatchLogGroupTagsCache.__init__")
def test_awslogs_handler_rds_postgresql(self, mock_cache_init):
event = {
Expand Down Expand Up @@ -75,7 +82,10 @@ def test_awslogs_handler_rds_postgresql(self, mock_cache_init):
)

awslogs_handler = AwsLogsHandler(context, cache_layer)
verify_as_json(list(awslogs_handler.handle(event)))
verify_as_json(
list(awslogs_handler.handle(event)),
options=Options().with_scrubber(self.scrubber),
)

@patch("caching.cloudwatch_log_group_cache.CloudwatchLogGroupTagsCache.__init__")
@patch("caching.cloudwatch_log_group_cache.send_forwarder_internal_metrics")
Expand Down Expand Up @@ -130,7 +140,10 @@ def test_awslogs_handler_step_functions_tags_added_properly(
cache_layer._cloudwatch_log_group_cache.get = MagicMock()

awslogs_handler = AwsLogsHandler(context, cache_layer)
verify_as_json(list(awslogs_handler.handle(event)))
verify_as_json(
list(awslogs_handler.handle(event)),
options=Options().with_scrubber(self.scrubber),
)

@patch("caching.cloudwatch_log_group_cache.CloudwatchLogGroupTagsCache.__init__")
@patch("caching.cloudwatch_log_group_cache.send_forwarder_internal_metrics")
Expand Down Expand Up @@ -185,7 +198,10 @@ def test_awslogs_handler_step_functions_customized_log_group(

awslogs_handler = AwsLogsHandler(context, cache_layer)
# for some reasons, the below two are needed to update the context of the handler
verify_as_json(list(awslogs_handler.handle(eventFromCustomizedLogGroup)))
verify_as_json(
list(awslogs_handler.handle(eventFromCustomizedLogGroup)),
options=Options().with_scrubber(self.scrubber),
)

def test_awslogs_handler_lambda_log(self):
event = {
Expand Down Expand Up @@ -231,7 +247,10 @@ def test_awslogs_handler_lambda_log(self):
)

awslogs_handler = AwsLogsHandler(context, cache_layer)
verify_as_json(list(awslogs_handler.handle(event)))
verify_as_json(
list(awslogs_handler.handle(event)),
options=Options().with_scrubber(self.scrubber),
)

def test_process_lambda_logs(self):
# Non Lambda log
Expand Down
9 changes: 9 additions & 0 deletions aws/logs_monitoring/tests/test_parsing.py
Original file line number Diff line number Diff line change
Expand Up @@ -295,6 +295,15 @@ def test_carbon_black_event(self):
str(AwsEventSource.CARBONBLACK),
)

def test_opensearch_event(self):
self.assertEqual(
parse_event_source(
{"awslogs": "logs"},
"/aws/OpenSearchService/domains/my-opensearch-cluster/ES_APPLICATION_LOGS",
),
str(AwsEventSource.OPENSEARCH),
)

def test_cloudwatch_source_if_none_found(self):
self.assertEqual(
parse_event_source({"awslogs": "logs"}, ""), str(AwsEventSource.CLOUDWATCH)
Expand Down
Loading