diff --git a/aws/logs_monitoring/README.md b/aws/logs_monitoring/README.md index 3d1bc48ba..417213896 100644 --- a/aws/logs_monitoring/README.md +++ b/aws/logs_monitoring/README.md @@ -25,8 +25,6 @@ For more information about sending AWS services logs with the Datadog Forwarder, Datadog recommends using [CloudFormation](#cloudformation) to automatically install the Forwarder. You can also complete the setup process using [Terraform](#terraform) or [manually](#manual). Once installed, you can subscribe the Forwarder to log sources such as S3 buckets or CloudWatch log groups by [setting up triggers][4]. -{{< tabs >}} -{{% tab "CloudFormation" %}} ### CloudFormation @@ -46,8 +44,6 @@ If you had previously enabled your AWS Integration using the [following CloudFor [101]: https://docs.datadoghq.com/logs/guide/send-aws-services-logs-with-the-datadog-lambda-function/#set-up-triggers [102]: https://github.com/DataDog/cloudformation-template/tree/master/aws -{{% /tab %}} -{{% tab "Terraform" %}} ### Terraform @@ -91,8 +87,8 @@ resource "aws_cloudformation_stack" "datadog_forwarder" { name = "datadog-forwarder" capabilities = ["CAPABILITY_IAM", "CAPABILITY_NAMED_IAM", "CAPABILITY_AUTO_EXPAND"] parameters = { - DdApiKeySecretArn = "REPLACE ME WITH THE SECRETS ARN", - DdSite = "", + DdApiKeySecretArn = "REPLACE WITH DATADOG SECRETS ARN", + DdSite = "REPLACE WITH DATADOG SITE", FunctionName = "datadog-forwarder" } template_url = "https://datadog-cloudformation-template.s3.amazonaws.com/aws/forwarder/latest.yaml" @@ -106,9 +102,6 @@ resource "aws_cloudformation_stack" "datadog_forwarder" { [103]: https://docs.datadoghq.com/logs/guide/send-aws-services-logs-with-the-datadog-lambda-function/#set-up-triggers [104]: https://docs.datadoghq.com/getting_started/site/#access-the-datadog-site -{{% /tab %}} -{{% tab "Manual" %}} - ### Manual If you can't install the Forwarder using the provided CloudFormation template, you can install the Forwarder manually following the steps below. Feel free to open an issue or pull request to let us know if there is anything we can improve to make the template work for you. @@ -131,9 +124,6 @@ aws lambda invoke --function-name --payload '{"retry":"true"}' o [103]: https://github.com/DataDog/datadog-serverless-functions/blob/029bd46e5c6d4e8b1ae647ed3b4d1917ac3cd793/aws/logs_monitoring/template.yaml#L680 [104]: https://docs.datadoghq.com/logs/guide/send-aws-services-logs-with-the-datadog-lambda-function/?tab=awsconsole#set-up-triggers -{{% /tab %}} -{{< /tabs >}} - ### Upgrade to a new version 1. Find the [datadog-forwarder (if you didn't rename it)][5] CloudFormation stack. If you installed the Forwarder as part of the [Datadog AWS integration stack][6], make sure to update the nested Forwarder stack instead of the root stack. diff --git a/aws/logs_monitoring/steps/enums.py b/aws/logs_monitoring/steps/enums.py index a9bd61010..c4bab765f 100644 --- a/aws/logs_monitoring/steps/enums.py +++ b/aws/logs_monitoring/steps/enums.py @@ -5,6 +5,7 @@ class AwsEventSource(Enum): APIGATEWAY = "apigateway" APPSYNC = "appsync" AWS = "aws" + BATCH = "batch" BEDROCK = "bedrock" CARBONBLACK = "carbonblack" CLOUDFRONT = "cloudfront" @@ -30,6 +31,7 @@ class AwsEventSource(Enum): MSK = "msk" MYSQL = "mysql" NETWORKFIREWALL = "network-firewall" + OPENSEARCH = "opensearch" POSTGRESQL = "postgresql" REDSHIFT = "redshift" ROUTE53 = "route53" @@ -48,17 +50,17 @@ def __str__(self): @staticmethod def cloudwatch_sources(): return [ - AwsEventSource.NETWORKFIREWALL, - AwsEventSource.ROUTE53, - AwsEventSource.VPC, - AwsEventSource.FARGATE, + AwsEventSource.BEDROCK, + AwsEventSource.CLOUDFRONT, AwsEventSource.CLOUDTRAIL, - AwsEventSource.MSK, AwsEventSource.ELASTICSEARCH, + AwsEventSource.FARGATE, + AwsEventSource.MSK, + AwsEventSource.NETWORKFIREWALL, + AwsEventSource.ROUTE53, AwsEventSource.TRANSITGATEWAY, AwsEventSource.VERIFIED_ACCESS, - AwsEventSource.BEDROCK, - AwsEventSource.CLOUDFRONT, + AwsEventSource.VPC, ] @@ -82,6 +84,7 @@ def __init__(self, string, event_source): DOCDB = ("amazon_documentdb", AwsEventSource.DOCDB) # e.g. AWSLogs/123456779121/elasticloadbalancing/us-east-1/2020/10/02/123456779121_elasticloadbalancing_us-east-1_app.alb.xxxxx.xx.xxx.xxx_x.log.gz ELB = ("elasticloadbalancing", AwsEventSource.ELB) + GUARDDUTY = ("guardduty", AwsEventSource.GUARDDUTY) KINESIS = ("amazon_kinesis", AwsEventSource.KINESIS) MSK = ("amazon_msk", AwsEventSource.MSK) NETWORKFIREWALL = ("network-firewall", AwsEventSource.NETWORKFIREWALL) @@ -113,6 +116,7 @@ def __init__(self, string, event_source): APIGATEWAY_3 = ("/aws/apigateway", AwsEventSource.APIGATEWAY) # e.g. /aws/appsync/yourApiId APPSYNC = ("/aws/appsync", AwsEventSource.APPSYNC) + BATCH = ("/aws/batch/job", AwsEventSource.BATCH) BEDROCK = ("aws/bedrock/modelinvocations", AwsEventSource.BEDROCK) # e.g. /aws/codebuild/my-project CODEBUILD = ("/aws/codebuild", AwsEventSource.CODEBUILD) @@ -128,7 +132,9 @@ def __init__(self, string, event_source): # e.g. /aws/kinesisfirehose/dev KINESIS = ("/aws/kinesis", AwsEventSource.KINESIS) # e.g. /aws/lambda/helloDatadog - lAMBDA = ("/aws/lambda", AwsEventSource.LAMBDA) + LAMBDA = ("/aws/lambda", AwsEventSource.LAMBDA) + # e.g. /aws/opensearchservice/domains/my-cluster + OPENSEARCH = ("/aws/opensearchservice/domains/", AwsEventSource.OPENSEARCH) # e.g. sns/us-east-1/123456779121/SnsTopicX SNS = ("sns/", AwsEventSource.SNS) SSM = ("/aws/ssm/", AwsEventSource.SSM) diff --git a/aws/logs_monitoring/tests/approved_files/TestAWSLogsHandler.test_awslogs_handler_lambda_log.approved.json b/aws/logs_monitoring/tests/approved_files/TestAWSLogsHandler.test_awslogs_handler_lambda_log.approved.json index 0af818ac4..da85111ba 100644 --- a/aws/logs_monitoring/tests/approved_files/TestAWSLogsHandler.test_awslogs_handler_lambda_log.approved.json +++ b/aws/logs_monitoring/tests/approved_files/TestAWSLogsHandler.test_awslogs_handler_lambda_log.approved.json @@ -11,7 +11,7 @@ }, "ddsource": "lambda", "ddsourcecategory": "aws", - "ddtags": "forwardername:function_name,forwarder_memorysize:10,forwarder_version:4.0.2,env:none", + "ddtags": "forwardername:function_name,forwarder_memorysize:10,forwarder_version:,env:none", "host": "/aws/lambda/test-lambda-default-log-group", "id": "37199773595581154154810589279545129148442535997644275712", "lambda": { diff --git a/aws/logs_monitoring/tests/approved_files/TestAWSLogsHandler.test_awslogs_handler_rds_postgresql.approved.json b/aws/logs_monitoring/tests/approved_files/TestAWSLogsHandler.test_awslogs_handler_rds_postgresql.approved.json index 04f1026fa..a72d147b8 100644 --- a/aws/logs_monitoring/tests/approved_files/TestAWSLogsHandler.test_awslogs_handler_rds_postgresql.approved.json +++ b/aws/logs_monitoring/tests/approved_files/TestAWSLogsHandler.test_awslogs_handler_rds_postgresql.approved.json @@ -11,7 +11,7 @@ }, "ddsource": "cloudwatch", "ddsourcecategory": "aws", - "ddtags": "forwardername:function_name,forwarder_memorysize:10,forwarder_version:4.0.2,test_tag_key:test_tag_value", + "ddtags": "forwardername:function_name,forwarder_memorysize:10,forwarder_version:,test_tag_key:test_tag_value", "host": "/aws/rds/instance/datadog/postgresql", "id": "31953106606966983378809025079804211143289615424298221568", "message": "2021-01-02 03:04:05 UTC::@:[5306]:LOG: database system is ready to accept connections", diff --git a/aws/logs_monitoring/tests/approved_files/TestAWSLogsHandler.test_awslogs_handler_step_functions_customized_log_group.approved.json b/aws/logs_monitoring/tests/approved_files/TestAWSLogsHandler.test_awslogs_handler_step_functions_customized_log_group.approved.json index 980bd178d..cdbcbb982 100644 --- a/aws/logs_monitoring/tests/approved_files/TestAWSLogsHandler.test_awslogs_handler_step_functions_customized_log_group.approved.json +++ b/aws/logs_monitoring/tests/approved_files/TestAWSLogsHandler.test_awslogs_handler_step_functions_customized_log_group.approved.json @@ -11,7 +11,7 @@ }, "ddsource": "stepfunction", "ddsourcecategory": "aws", - "ddtags": "forwardername:function_name,forwarder_memorysize:10,forwarder_version:4.0.2,test_tag_key:test_tag_value,dd_step_functions_trace_enabled:true", + "ddtags": "forwardername:function_name,forwarder_memorysize:10,forwarder_version:,test_tag_key:test_tag_value,dd_step_functions_trace_enabled:true", "host": "arn:aws:states:us-east-1:12345678910:stateMachine:StepFunction2", "id": "37199773595581154154810589279545129148442535997644275712", "message": "{\"id\": \"1\",\"type\": \"ExecutionStarted\",\"details\": {\"input\": \"{}\",\"inputDetails\": {\"truncated\": \"false\"},\"roleArn\": \"arn:aws:iam::12345678910:role/service-role/StepFunctions-test-role-a0iurr4pt\"},\"previous_event_id\": \"0\",\"event_timestamp\": \"1716992192441\",\"execution_arn\": \"arn:aws:states:us-east-1:12345678910:execution:StepFunction2:ccccccc-d1da-4c38-b32c-2b6b07d713fa\",\"redrive_count\": \"0\"}", diff --git a/aws/logs_monitoring/tests/approved_files/TestAWSLogsHandler.test_awslogs_handler_step_functions_tags_added_properly.approved.json b/aws/logs_monitoring/tests/approved_files/TestAWSLogsHandler.test_awslogs_handler_step_functions_tags_added_properly.approved.json index c3cd9a973..b1365a054 100644 --- a/aws/logs_monitoring/tests/approved_files/TestAWSLogsHandler.test_awslogs_handler_step_functions_tags_added_properly.approved.json +++ b/aws/logs_monitoring/tests/approved_files/TestAWSLogsHandler.test_awslogs_handler_step_functions_tags_added_properly.approved.json @@ -11,7 +11,7 @@ }, "ddsource": "stepfunction", "ddsourcecategory": "aws", - "ddtags": "forwardername:function_name,forwarder_memorysize:10,forwarder_version:4.0.2,test_tag_key:test_tag_value,dd_step_functions_trace_enabled:true", + "ddtags": "forwardername:function_name,forwarder_memorysize:10,forwarder_version:,test_tag_key:test_tag_value,dd_step_functions_trace_enabled:true", "host": "arn:aws:states:us-east-1:12345678910:stateMachine:StepFunction1", "id": "37199773595581154154810589279545129148442535997644275712", "message": "{\"id\": \"1\",\"type\": \"ExecutionStarted\",\"details\": {\"input\": \"{}\",\"inputDetails\": {\"truncated\": \"false\"},\"roleArn\": \"arn:aws:iam::12345678910:role/service-role/StepFunctions-test-role-a0iurr4pt\"},\"previous_event_id\": \"0\",\"event_timestamp\": \"1716992192441\",\"execution_arn\": \"arn:aws:states:us-east-1:12345678910:execution:StepFunction1:ccccccc-d1da-4c38-b32c-2b6b07d713fa\",\"redrive_count\": \"0\"}", diff --git a/aws/logs_monitoring/tests/test_awslogs_handler.py b/aws/logs_monitoring/tests/test_awslogs_handler.py index e62c87c7f..1afe58bad 100644 --- a/aws/logs_monitoring/tests/test_awslogs_handler.py +++ b/aws/logs_monitoring/tests/test_awslogs_handler.py @@ -5,7 +5,8 @@ import unittest import sys from unittest.mock import patch, MagicMock -from approvaltests.approvals import verify_as_json +from approvaltests.approvals import Options, verify_as_json +from approvaltests.scrubbers import create_regex_scrubber sys.modules["trace_forwarder.connection"] = MagicMock() sys.modules["datadog_lambda.wrapper"] = MagicMock() @@ -36,6 +37,12 @@ class Context: class TestAWSLogsHandler(unittest.TestCase): + def setUp(self): + self.scrubber = create_regex_scrubber( + r"forwarder_version:\d+\.\d+\.\d+", + "forwarder_version:", + ) + @patch("caching.cloudwatch_log_group_cache.CloudwatchLogGroupTagsCache.__init__") def test_awslogs_handler_rds_postgresql(self, mock_cache_init): event = { @@ -75,7 +82,10 @@ def test_awslogs_handler_rds_postgresql(self, mock_cache_init): ) awslogs_handler = AwsLogsHandler(context, cache_layer) - verify_as_json(list(awslogs_handler.handle(event))) + verify_as_json( + list(awslogs_handler.handle(event)), + options=Options().with_scrubber(self.scrubber), + ) @patch("caching.cloudwatch_log_group_cache.CloudwatchLogGroupTagsCache.__init__") @patch("caching.cloudwatch_log_group_cache.send_forwarder_internal_metrics") @@ -130,7 +140,10 @@ def test_awslogs_handler_step_functions_tags_added_properly( cache_layer._cloudwatch_log_group_cache.get = MagicMock() awslogs_handler = AwsLogsHandler(context, cache_layer) - verify_as_json(list(awslogs_handler.handle(event))) + verify_as_json( + list(awslogs_handler.handle(event)), + options=Options().with_scrubber(self.scrubber), + ) @patch("caching.cloudwatch_log_group_cache.CloudwatchLogGroupTagsCache.__init__") @patch("caching.cloudwatch_log_group_cache.send_forwarder_internal_metrics") @@ -185,7 +198,10 @@ def test_awslogs_handler_step_functions_customized_log_group( awslogs_handler = AwsLogsHandler(context, cache_layer) # for some reasons, the below two are needed to update the context of the handler - verify_as_json(list(awslogs_handler.handle(eventFromCustomizedLogGroup))) + verify_as_json( + list(awslogs_handler.handle(eventFromCustomizedLogGroup)), + options=Options().with_scrubber(self.scrubber), + ) def test_awslogs_handler_lambda_log(self): event = { @@ -231,7 +247,10 @@ def test_awslogs_handler_lambda_log(self): ) awslogs_handler = AwsLogsHandler(context, cache_layer) - verify_as_json(list(awslogs_handler.handle(event))) + verify_as_json( + list(awslogs_handler.handle(event)), + options=Options().with_scrubber(self.scrubber), + ) def test_process_lambda_logs(self): # Non Lambda log diff --git a/aws/logs_monitoring/tests/test_parsing.py b/aws/logs_monitoring/tests/test_parsing.py index bdae1d53e..3afe25915 100644 --- a/aws/logs_monitoring/tests/test_parsing.py +++ b/aws/logs_monitoring/tests/test_parsing.py @@ -295,6 +295,15 @@ def test_carbon_black_event(self): str(AwsEventSource.CARBONBLACK), ) + def test_opensearch_event(self): + self.assertEqual( + parse_event_source( + {"awslogs": "logs"}, + "/aws/OpenSearchService/domains/my-opensearch-cluster/ES_APPLICATION_LOGS", + ), + str(AwsEventSource.OPENSEARCH), + ) + def test_cloudwatch_source_if_none_found(self): self.assertEqual( parse_event_source({"awslogs": "logs"}, ""), str(AwsEventSource.CLOUDWATCH)