diff --git a/.flake8 b/.flake8 index 111952283..e55e479dd 100644 --- a/.flake8 +++ b/.flake8 @@ -21,3 +21,6 @@ exclude = venv*/ target __pycache__ + mock_collector_service_pb2.py + mock_collector_service_pb2.pyi + mock_collector_service_pb2_grpc.py diff --git a/.isort.cfg b/.isort.cfg index 4b689e9f6..16f113e75 100644 --- a/.isort.cfg +++ b/.isort.cfg @@ -13,7 +13,7 @@ profile=black ; ) ; docs: https://github.com/timothycrosley/isort#multi-line-output-modes multi_line_output=3 -skip=target +skip=target,mock_collector_service_pb2_grpc.py,mock_collector_service_pb2.py,mock_collector_service_pb2.pyi skip_glob=**/gen/*,.venv*/*,venv*/*,.tox/* known_first_party=opentelemetry,amazon known_third_party=psutil,pytest diff --git a/.pylintrc b/.pylintrc index ac66fdbfb..94c9656e4 100644 --- a/.pylintrc +++ b/.pylintrc @@ -7,7 +7,7 @@ extension-pkg-whitelist=cassandra # Add list of files or directories to be excluded. They should be base names, not # paths. -ignore=CVS,gen,Dockerfile,docker-compose.yml,README.md,requirements.txt +ignore=CVS,gen,Dockerfile,docker-compose.yml,README.md,requirements.txt,mock_collector_service_pb2.py,mock_collector_service_pb2.pyi,mock_collector_service_pb2_grpc.py # Add files or directories matching the regex patterns to be excluded. The # regex matches against base names, not paths. diff --git a/contract-tests/README.md b/contract-tests/README.md new file mode 100644 index 000000000..f3f087dae --- /dev/null +++ b/contract-tests/README.md @@ -0,0 +1,42 @@ +# Introduction + +This directory contain contract tests that exist to prevent regressions. They cover: +* [OpenTelemetry semantic conventions](https://github.com/open-telemetry/semantic-conventions/). +* Application Signals-specific attributes. + +# How it works? + +The tests present here rely on the auto-instrumentation of a sample application which will send telemetry signals to a mock collector. The tests will use the data collected by the mock collector to perform assertions and validate that the contracts are being respected. + +# Types of tested frameworks + +The frameworks and libraries that are tested in the contract tests should fall in the following categories (more can be added on demand): +* http-servers - applications meant to test http servers (e.g. Django). +* http-clients - applications meant to test http clients (e.g. requests). +* aws-sdk - Applications meant to test the AWS SDK (e.g. botocore). +* database-clients - Applications meant to test database clients (e.g. psychopg2). + +When testing a framework, we will create a sample application. The sample applications are stored following this convention: `contract-tests/images/applications/`. + +# Adding tests for a new library or framework + +The steps to add a new test for a library or framework are: +* Create a sample application. + * The sample application should be created in `contract-tests/images/applications/`. + * Implement a `pyproject.toml` (to ensure code style checks run), `Dockerfile`, and `requirements.txt` file. See the `requests` application for an example of this. +* Add a test class for the sample application. + * The test class should be created in `contract-tests/tests/amazon/`. + * The test class should extend `contract_test_base.py` + +# How to run the tests locally? + +Pre-requirements: +* Have `docker` installed and running - verify by running the `docker` command. +* Ensure the `aws_opentelemetry_distro` wheel file exists in to `aws-otel-python-instrumentation/dist` folder + +From `aws-otel-python-instrumentation` dir, execute: + +``` +./contract-tests/set-up-contract-tests.sh +pytest contract-tests/tests +``` \ No newline at end of file diff --git a/contract-tests/images/applications/requests/Dockerfile b/contract-tests/images/applications/requests/Dockerfile new file mode 100644 index 000000000..dcb068b84 --- /dev/null +++ b/contract-tests/images/applications/requests/Dockerfile @@ -0,0 +1,14 @@ +# Meant to be run from aws-otel-python-instrumentation/contract-tests. +# Assumes existence of dist/aws_opentelemetry_distro--py3-none-any.whl. +# Assumes filename of aws_opentelemetry_distro--py3-none-any.whl is passed in as "DISTRO" arg. +FROM public.ecr.aws/docker/library/python:3.11-slim +WORKDIR /requests +COPY ./dist/$DISTRO /requests +COPY ./contract-tests/images/applications/requests /requests + +ARG DISTRO +RUN pip install -r requirements.txt && pip install ${DISTRO} --force-reinstall +RUN opentelemetry-bootstrap -a install + +# Without `-u`, logs will be buffered and `wait_for_logs` will never return. +CMD ["opentelemetry-instrument", "python", "-u", "./requests_server.py"] \ No newline at end of file diff --git a/contract-tests/images/applications/requests/pyproject.toml b/contract-tests/images/applications/requests/pyproject.toml new file mode 100644 index 000000000..7e010fb84 --- /dev/null +++ b/contract-tests/images/applications/requests/pyproject.toml @@ -0,0 +1,6 @@ +[project] +name = "requests-server" +description = "Simple server that relies on requests library" +version = "1.0.0" +license = "Apache-2.0" +requires-python = ">=3.8" \ No newline at end of file diff --git a/contract-tests/images/applications/requests/requests_server.py b/contract-tests/images/applications/requests/requests_server.py new file mode 100644 index 000000000..af4b222b5 --- /dev/null +++ b/contract-tests/images/applications/requests/requests_server.py @@ -0,0 +1,62 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import atexit +from http.server import BaseHTTPRequestHandler, ThreadingHTTPServer +from threading import Thread + +from requests import Response, request +from typing_extensions import override + +_PORT: int = 8080 +_NETWORK_ALIAS: str = "backend" +_SUCCESS: str = "success" +_ERROR: str = "error" +_FAULT: str = "fault" + + +class RequestHandler(BaseHTTPRequestHandler): + @override + # pylint: disable=invalid-name + def do_GET(self): + self.handle_request("GET") + + @override + # pylint: disable=invalid-name + def do_POST(self): + self.handle_request("POST") + + def handle_request(self, method: str): + status_code: int + if self.in_path(_NETWORK_ALIAS): + if self.in_path(_SUCCESS): + status_code = 200 + elif self.in_path(_ERROR): + status_code = 400 + elif self.in_path(_FAULT): + status_code = 500 + else: + status_code = 404 + else: + url: str = f"http://{_NETWORK_ALIAS}:{_PORT}/{_NETWORK_ALIAS}{self.path}" + response: Response = request(method, url, timeout=20) + status_code = response.status_code + self.send_response_only(status_code) + self.end_headers() + + def in_path(self, sub_path: str): + return sub_path in self.path + + +def main() -> None: + server_address: tuple[str, int] = ("0.0.0.0", _PORT) + request_handler_class: type = RequestHandler + requests_server: ThreadingHTTPServer = ThreadingHTTPServer(server_address, request_handler_class) + atexit.register(requests_server.shutdown) + server_thread: Thread = Thread(target=requests_server.serve_forever) + server_thread.start() + print("Ready") + server_thread.join() + + +if __name__ == "__main__": + main() diff --git a/contract-tests/images/applications/requests/requirements.txt b/contract-tests/images/applications/requests/requirements.txt new file mode 100644 index 000000000..8bd8feb8c --- /dev/null +++ b/contract-tests/images/applications/requests/requirements.txt @@ -0,0 +1,4 @@ +opentelemetry-distro==0.43b0 +opentelemetry-exporter-otlp-proto-grpc==1.22.0 +typing-extensions==4.9.0 +requests==2.31.0 \ No newline at end of file diff --git a/contract-tests/images/mock-collector/Dockerfile b/contract-tests/images/mock-collector/Dockerfile new file mode 100644 index 000000000..6c0c96cc2 --- /dev/null +++ b/contract-tests/images/mock-collector/Dockerfile @@ -0,0 +1,8 @@ +FROM public.ecr.aws/docker/library/python:3.11-slim +WORKDIR /mock-collector +COPY . /mock-collector + +RUN pip install -r requirements.txt + +# Without `-u`, logs will be buffered and `wait_for_logs` will never return. +CMD ["python", "-u", "./mock_collector_server.py"] \ No newline at end of file diff --git a/contract-tests/images/mock-collector/README.md b/contract-tests/images/mock-collector/README.md new file mode 100644 index 000000000..75c6cb775 --- /dev/null +++ b/contract-tests/images/mock-collector/README.md @@ -0,0 +1,9 @@ +### Overview + +MockCollector mimics the behaviour of the actual OTEL collector, but stores export requests to be retrieved by contract tests. + +### Protos +To build protos: +1. Run `pip install grpcio grpcio-tools` +2. Change directory to `aws-otel-python-instrumentation/contract-tests/images/mock-collector/` +3. Run: `python -m grpc_tools.protoc -I./protos --python_out=. --pyi_out=. --grpc_python_out=. ./protos/mock_collector_service.proto` \ No newline at end of file diff --git a/contract-tests/images/mock-collector/mock_collector_client.py b/contract-tests/images/mock-collector/mock_collector_client.py new file mode 100644 index 000000000..21389cead --- /dev/null +++ b/contract-tests/images/mock-collector/mock_collector_client.py @@ -0,0 +1,143 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +from datetime import datetime, timedelta +from logging import Logger, getLogger +from time import sleep +from typing import Callable, List, Set, TypeVar + +from google.protobuf.internal.containers import RepeatedScalarFieldContainer +from grpc import Channel, insecure_channel +from mock_collector_service_pb2 import ( + ClearRequest, + GetMetricsRequest, + GetMetricsResponse, + GetTracesRequest, + GetTracesResponse, +) +from mock_collector_service_pb2_grpc import MockCollectorServiceStub + +from opentelemetry.proto.collector.metrics.v1.metrics_service_pb2 import ExportMetricsServiceRequest +from opentelemetry.proto.collector.trace.v1.trace_service_pb2 import ExportTraceServiceRequest +from opentelemetry.proto.metrics.v1.metrics_pb2 import Metric, ResourceMetrics, ScopeMetrics +from opentelemetry.proto.trace.v1.trace_pb2 import ResourceSpans, ScopeSpans, Span + +_logger: Logger = getLogger(__name__) +_TIMEOUT_DELAY: timedelta = timedelta(seconds=20) +_WAIT_INTERVAL_SEC: float = 0.1 +T: TypeVar = TypeVar("T") + + +class ResourceScopeSpan: + """Data class used to correlate resources, scope and telemetry signals. + + Correlate resource, scope and span + """ + + def __init__(self, resource_spans: ResourceSpans, scope_spans: ScopeSpans, span: Span): + self.resource_spans: ResourceSpans = resource_spans + self.scope_spans: ScopeSpans = scope_spans + self.span: Span = span + + +class ResourceScopeMetric: + """Data class used to correlate resources, scope and telemetry signals. + + Correlate resource, scope and metric + """ + + def __init__(self, resource_metrics: ResourceMetrics, scope_metrics: ScopeMetrics, metric: Metric): + self.resource_metrics: ResourceMetrics = resource_metrics + self.scope_metrics: ScopeMetrics = scope_metrics + self.metric: Metric = metric + + +class MockCollectorClient: + """The mock collector client is used to interact with the Mock collector image, used in the tests.""" + + def __init__(self, mock_collector_address: str, mock_collector_port: str): + channel: Channel = insecure_channel(f"{mock_collector_address}:{mock_collector_port}") + self.client: MockCollectorServiceStub = MockCollectorServiceStub(channel) + + def clear_signals(self) -> None: + """Clear all the signals in the backend collector""" + self.client.clear(ClearRequest()) + + def get_traces(self) -> List[ResourceScopeSpan]: + """Get all traces that are currently stored in the collector + + Returns: + List of `ResourceScopeSpan` which is essentially a flat list containing all the spans and their related + scope and resources. + """ + + def get_export() -> List[ExportTraceServiceRequest]: + response: GetTracesResponse = self.client.get_traces(GetTracesRequest()) + serialized_traces: RepeatedScalarFieldContainer[bytes] = response.traces + return list(map(ExportTraceServiceRequest.FromString, serialized_traces)) + + def wait_condition(exported: List[ExportTraceServiceRequest], current: List[ExportTraceServiceRequest]) -> bool: + return 0 < len(exported) == len(current) + + exported_traces: List[ExportTraceServiceRequest] = _wait_for_content(get_export, wait_condition) + spans: List[ResourceScopeSpan] = [] + for exported_trace in exported_traces: + for resource_span in exported_trace.resource_spans: + for scope_span in resource_span.scope_spans: + for span in scope_span.spans: + spans.append(ResourceScopeSpan(resource_span, scope_span, span)) + return spans + + def get_metrics(self, present_metrics: Set[str]) -> List[ResourceScopeMetric]: + """Get all metrics that are currently stored in the mock collector. + + Returns: + List of `ResourceScopeMetric` which is a flat list containing all metrics and their related scope and + resources. + """ + + present_metrics_lower: Set[str] = {s.lower() for s in present_metrics} + + def get_export() -> List[ExportMetricsServiceRequest]: + response: GetMetricsResponse = self.client.get_metrics(GetMetricsRequest()) + serialized_metrics: RepeatedScalarFieldContainer[bytes] = response.metrics + return list(map(ExportMetricsServiceRequest.FromString, serialized_metrics)) + + def wait_condition( + exported: List[ExportMetricsServiceRequest], current: List[ExportMetricsServiceRequest] + ) -> bool: + received_metrics: Set[str] = set() + for exported_metric in current: + for resource_metric in exported_metric.resource_metrics: + for scope_metric in resource_metric.scope_metrics: + for metric in scope_metric.metrics: + received_metrics.add(metric.name.lower()) + return 0 < len(exported) == len(current) and present_metrics_lower.issubset(received_metrics) + + exported_metrics: List[ExportMetricsServiceRequest] = _wait_for_content(get_export, wait_condition) + metrics: List[ResourceScopeMetric] = [] + for exported_metric in exported_metrics: + for resource_metric in exported_metric.resource_metrics: + for scope_metric in resource_metric.scope_metrics: + for metric in scope_metric.metrics: + metrics.append(ResourceScopeMetric(resource_metric, scope_metric, metric)) + return metrics + + +def _wait_for_content(get_export: Callable[[], List[T]], wait_condition: Callable[[List[T], List[T]], bool]) -> List[T]: + # Verify that there is no more data to be received + deadline: datetime = datetime.now() + _TIMEOUT_DELAY + exported: List[T] = [] + + while deadline > datetime.now(): + try: + current_exported: List[T] = get_export() + if wait_condition(exported, current_exported): + return current_exported + exported = current_exported + + sleep(_WAIT_INTERVAL_SEC) + # pylint: disable=broad-exception-caught + except Exception: + _logger.exception("Error while reading content") + + raise RuntimeError("Timeout waiting for content") diff --git a/contract-tests/images/mock-collector/mock_collector_metrics_service.py b/contract-tests/images/mock-collector/mock_collector_metrics_service.py new file mode 100644 index 000000000..b20f6811b --- /dev/null +++ b/contract-tests/images/mock-collector/mock_collector_metrics_service.py @@ -0,0 +1,31 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +from queue import Queue +from typing import List + +from grpc import ServicerContext +from typing_extensions import override + +from opentelemetry.proto.collector.metrics.v1.metrics_service_pb2 import ( + ExportMetricsServiceRequest, + ExportMetricsServiceResponse, +) +from opentelemetry.proto.collector.metrics.v1.metrics_service_pb2_grpc import MetricsServiceServicer + + +class MockCollectorMetricsService(MetricsServiceServicer): + _export_requests: Queue = Queue(maxsize=-1) + + def get_requests(self) -> List[ExportMetricsServiceRequest]: + with self._export_requests.mutex: + return list(self._export_requests.queue) + + def clear_requests(self) -> None: + with self._export_requests.mutex: + self._export_requests.queue.clear() + + @override + # pylint: disable=invalid-name + def Export(self, request: ExportMetricsServiceRequest, context: ServicerContext) -> ExportMetricsServiceResponse: + self._export_requests.put(request) + return ExportMetricsServiceResponse() diff --git a/contract-tests/images/mock-collector/mock_collector_server.py b/contract-tests/images/mock-collector/mock_collector_server.py new file mode 100644 index 000000000..21d60a1e6 --- /dev/null +++ b/contract-tests/images/mock-collector/mock_collector_server.py @@ -0,0 +1,35 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import atexit +from concurrent.futures import ThreadPoolExecutor + +from grpc import server +from mock_collector_metrics_service import MockCollectorMetricsService +from mock_collector_service import MockCollectorService +from mock_collector_service_pb2_grpc import add_MockCollectorServiceServicer_to_server +from mock_collector_trace_service import MockCollectorTraceService + +from opentelemetry.proto.collector.metrics.v1.metrics_service_pb2_grpc import add_MetricsServiceServicer_to_server +from opentelemetry.proto.collector.trace.v1.trace_service_pb2_grpc import add_TraceServiceServicer_to_server + + +def main() -> None: + mock_collector_server: server = server(thread_pool=ThreadPoolExecutor(max_workers=10)) + mock_collector_server.add_insecure_port("0.0.0.0:4315") + + trace_collector: MockCollectorTraceService = MockCollectorTraceService() + metrics_collector: MockCollectorMetricsService = MockCollectorMetricsService() + mock_collector: MockCollectorService = MockCollectorService(trace_collector, metrics_collector) + + add_TraceServiceServicer_to_server(trace_collector, mock_collector_server) + add_MetricsServiceServicer_to_server(metrics_collector, mock_collector_server) + add_MockCollectorServiceServicer_to_server(mock_collector, mock_collector_server) + + mock_collector_server.start() + atexit.register(mock_collector_server.stop, None) + print("Ready") + mock_collector_server.wait_for_termination(None) + + +if __name__ == "__main__": + main() diff --git a/contract-tests/images/mock-collector/mock_collector_service.py b/contract-tests/images/mock-collector/mock_collector_service.py new file mode 100644 index 000000000..bc5fe0e95 --- /dev/null +++ b/contract-tests/images/mock-collector/mock_collector_service.py @@ -0,0 +1,52 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +from typing import List + +from grpc import ServicerContext +from mock_collector_metrics_service import MockCollectorMetricsService +from mock_collector_service_pb2 import ( + ClearRequest, + ClearResponse, + GetMetricsRequest, + GetMetricsResponse, + GetTracesRequest, + GetTracesResponse, +) +from mock_collector_service_pb2_grpc import MockCollectorServiceServicer +from mock_collector_trace_service import MockCollectorTraceService +from typing_extensions import override + +from opentelemetry.proto.collector.metrics.v1.metrics_service_pb2 import ExportMetricsServiceRequest +from opentelemetry.proto.collector.trace.v1.trace_service_pb2 import ExportTraceServiceRequest + + +class MockCollectorService(MockCollectorServiceServicer): + """Implements clear, get_traces, and get_metrics for the mock collector. + + Relies on metrics and trace collector services to collect the telemetry. + """ + + def __init__(self, trace_collector: MockCollectorTraceService, metrics_collector: MockCollectorMetricsService): + super().__init__() + self.trace_collector: MockCollectorTraceService = trace_collector + self.metrics_collector: MockCollectorMetricsService = metrics_collector + + @override + def clear(self, request: ClearRequest, context: ServicerContext) -> ClearResponse: + self.trace_collector.clear_requests() + self.metrics_collector.clear_requests() + return ClearResponse() + + @override + def get_traces(self, request: GetTracesRequest, context: ServicerContext) -> GetTracesResponse: + trace_requests: List[ExportTraceServiceRequest] = self.trace_collector.get_requests() + traces: List[bytes] = list(map(ExportTraceServiceRequest.SerializeToString, trace_requests)) + response: GetTracesResponse = GetTracesResponse(traces=traces) + return response + + @override + def get_metrics(self, request: GetMetricsRequest, context: ServicerContext) -> GetMetricsResponse: + metric_requests: List[ExportMetricsServiceRequest] = self.metrics_collector.get_requests() + metrics: List[bytes] = list(map(ExportTraceServiceRequest.SerializeToString, metric_requests)) + response: GetMetricsResponse = GetMetricsResponse(metrics=metrics) + return response diff --git a/contract-tests/images/mock-collector/mock_collector_service_pb2.py b/contract-tests/images/mock-collector/mock_collector_service_pb2.py new file mode 100644 index 000000000..2e519e573 --- /dev/null +++ b/contract-tests/images/mock-collector/mock_collector_service_pb2.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: mock_collector_service.proto +# Protobuf Python Version: 4.25.0 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1cmock_collector_service.proto\"\x0e\n\x0c\x43learRequest\"\x0f\n\rClearResponse\"\x12\n\x10GetTracesRequest\"#\n\x11GetTracesResponse\x12\x0e\n\x06traces\x18\x01 \x03(\x0c\"\x13\n\x11GetMetricsRequest\"%\n\x12GetMetricsResponse\x12\x0f\n\x07metrics\x18\x01 \x03(\x0c\x32\xb1\x01\n\x14MockCollectorService\x12(\n\x05\x63lear\x12\r.ClearRequest\x1a\x0e.ClearResponse\"\x00\x12\x35\n\nget_traces\x12\x11.GetTracesRequest\x1a\x12.GetTracesResponse\"\x00\x12\x38\n\x0bget_metrics\x12\x12.GetMetricsRequest\x1a\x13.GetMetricsResponse\"\x00\x62\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'mock_collector_service_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + _globals['_CLEARREQUEST']._serialized_start=32 + _globals['_CLEARREQUEST']._serialized_end=46 + _globals['_CLEARRESPONSE']._serialized_start=48 + _globals['_CLEARRESPONSE']._serialized_end=63 + _globals['_GETTRACESREQUEST']._serialized_start=65 + _globals['_GETTRACESREQUEST']._serialized_end=83 + _globals['_GETTRACESRESPONSE']._serialized_start=85 + _globals['_GETTRACESRESPONSE']._serialized_end=120 + _globals['_GETMETRICSREQUEST']._serialized_start=122 + _globals['_GETMETRICSREQUEST']._serialized_end=141 + _globals['_GETMETRICSRESPONSE']._serialized_start=143 + _globals['_GETMETRICSRESPONSE']._serialized_end=180 + _globals['_MOCKCOLLECTORSERVICE']._serialized_start=183 + _globals['_MOCKCOLLECTORSERVICE']._serialized_end=360 +# @@protoc_insertion_point(module_scope) diff --git a/contract-tests/images/mock-collector/mock_collector_service_pb2.pyi b/contract-tests/images/mock-collector/mock_collector_service_pb2.pyi new file mode 100644 index 000000000..76bd1d24c --- /dev/null +++ b/contract-tests/images/mock-collector/mock_collector_service_pb2.pyi @@ -0,0 +1,34 @@ +from google.protobuf.internal import containers as _containers +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from typing import ClassVar as _ClassVar, Iterable as _Iterable, Optional as _Optional + +DESCRIPTOR: _descriptor.FileDescriptor + +class ClearRequest(_message.Message): + __slots__ = () + def __init__(self) -> None: ... + +class ClearResponse(_message.Message): + __slots__ = () + def __init__(self) -> None: ... + +class GetTracesRequest(_message.Message): + __slots__ = () + def __init__(self) -> None: ... + +class GetTracesResponse(_message.Message): + __slots__ = ("traces",) + TRACES_FIELD_NUMBER: _ClassVar[int] + traces: _containers.RepeatedScalarFieldContainer[bytes] + def __init__(self, traces: _Optional[_Iterable[bytes]] = ...) -> None: ... + +class GetMetricsRequest(_message.Message): + __slots__ = () + def __init__(self) -> None: ... + +class GetMetricsResponse(_message.Message): + __slots__ = ("metrics",) + METRICS_FIELD_NUMBER: _ClassVar[int] + metrics: _containers.RepeatedScalarFieldContainer[bytes] + def __init__(self, metrics: _Optional[_Iterable[bytes]] = ...) -> None: ... diff --git a/contract-tests/images/mock-collector/mock_collector_service_pb2_grpc.py b/contract-tests/images/mock-collector/mock_collector_service_pb2_grpc.py new file mode 100644 index 000000000..d25f8fa6b --- /dev/null +++ b/contract-tests/images/mock-collector/mock_collector_service_pb2_grpc.py @@ -0,0 +1,138 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +import mock_collector_service_pb2 as mock__collector__service__pb2 + + +class MockCollectorServiceStub(object): + """Service definition for mock collector + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.clear = channel.unary_unary( + '/MockCollectorService/clear', + request_serializer=mock__collector__service__pb2.ClearRequest.SerializeToString, + response_deserializer=mock__collector__service__pb2.ClearResponse.FromString, + ) + self.get_traces = channel.unary_unary( + '/MockCollectorService/get_traces', + request_serializer=mock__collector__service__pb2.GetTracesRequest.SerializeToString, + response_deserializer=mock__collector__service__pb2.GetTracesResponse.FromString, + ) + self.get_metrics = channel.unary_unary( + '/MockCollectorService/get_metrics', + request_serializer=mock__collector__service__pb2.GetMetricsRequest.SerializeToString, + response_deserializer=mock__collector__service__pb2.GetMetricsResponse.FromString, + ) + + +class MockCollectorServiceServicer(object): + """Service definition for mock collector + """ + + def clear(self, request, context): + """Clears all traces and metrics captured by mock collector, so it can be used for multiple tests. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def get_traces(self, request, context): + """Returns traces exported to mock collector + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def get_metrics(self, request, context): + """Returns metrics exported to mock collector + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_MockCollectorServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'clear': grpc.unary_unary_rpc_method_handler( + servicer.clear, + request_deserializer=mock__collector__service__pb2.ClearRequest.FromString, + response_serializer=mock__collector__service__pb2.ClearResponse.SerializeToString, + ), + 'get_traces': grpc.unary_unary_rpc_method_handler( + servicer.get_traces, + request_deserializer=mock__collector__service__pb2.GetTracesRequest.FromString, + response_serializer=mock__collector__service__pb2.GetTracesResponse.SerializeToString, + ), + 'get_metrics': grpc.unary_unary_rpc_method_handler( + servicer.get_metrics, + request_deserializer=mock__collector__service__pb2.GetMetricsRequest.FromString, + response_serializer=mock__collector__service__pb2.GetMetricsResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'MockCollectorService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + # This class is part of an EXPERIMENTAL API. +class MockCollectorService(object): + """Service definition for mock collector + """ + + @staticmethod + def clear(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/MockCollectorService/clear', + mock__collector__service__pb2.ClearRequest.SerializeToString, + mock__collector__service__pb2.ClearResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def get_traces(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/MockCollectorService/get_traces', + mock__collector__service__pb2.GetTracesRequest.SerializeToString, + mock__collector__service__pb2.GetTracesResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def get_metrics(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/MockCollectorService/get_metrics', + mock__collector__service__pb2.GetMetricsRequest.SerializeToString, + mock__collector__service__pb2.GetMetricsResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/contract-tests/images/mock-collector/mock_collector_trace_service.py b/contract-tests/images/mock-collector/mock_collector_trace_service.py new file mode 100644 index 000000000..68ed9a035 --- /dev/null +++ b/contract-tests/images/mock-collector/mock_collector_trace_service.py @@ -0,0 +1,31 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +from queue import Queue +from typing import List + +from grpc import ServicerContext +from typing_extensions import override + +from opentelemetry.proto.collector.trace.v1.trace_service_pb2 import ( + ExportTraceServiceRequest, + ExportTraceServiceResponse, +) +from opentelemetry.proto.collector.trace.v1.trace_service_pb2_grpc import TraceServiceServicer + + +class MockCollectorTraceService(TraceServiceServicer): + _export_requests: Queue = Queue(maxsize=-1) + + def get_requests(self) -> List[ExportTraceServiceRequest]: + with self._export_requests.mutex: + return list(self._export_requests.queue) + + def clear_requests(self) -> None: + with self._export_requests.mutex: + self._export_requests.queue.clear() + + @override + # pylint: disable=invalid-name + def Export(self, request: ExportTraceServiceRequest, context: ServicerContext) -> ExportTraceServiceResponse: + self._export_requests.put(request) + return ExportTraceServiceResponse() diff --git a/contract-tests/images/mock-collector/protos/mock_collector_service.proto b/contract-tests/images/mock-collector/protos/mock_collector_service.proto new file mode 100644 index 000000000..d7187c44d --- /dev/null +++ b/contract-tests/images/mock-collector/protos/mock_collector_service.proto @@ -0,0 +1,35 @@ +syntax = "proto3"; + +// Service definition for mock collector +service MockCollectorService { + // Clears all traces and metrics captured by mock collector, so it can be used for multiple tests. + rpc clear (ClearRequest) returns (ClearResponse) {} + + // Returns traces exported to mock collector + rpc get_traces (GetTracesRequest) returns (GetTracesResponse) {} + + // Returns metrics exported to mock collector + rpc get_metrics (GetMetricsRequest) returns (GetMetricsResponse) {} +} + +// Empty request for clear rpc. +message ClearRequest {} + +// Empty response for clear rpc. +message ClearResponse {} + +// Empty request for get traces rpc. +message GetTracesRequest {} + +// Response for get traces rpc - all traces in byte form. +message GetTracesResponse{ + repeated bytes traces = 1; +} + +// Empty request for get metrics rpc. +message GetMetricsRequest {} + +// Response for get metrics rpc - all metrics in byte form. +message GetMetricsResponse { + repeated bytes metrics = 1; +} \ No newline at end of file diff --git a/contract-tests/images/mock-collector/pyproject.toml b/contract-tests/images/mock-collector/pyproject.toml new file mode 100644 index 000000000..80b42ad7e --- /dev/null +++ b/contract-tests/images/mock-collector/pyproject.toml @@ -0,0 +1,24 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "mock-collector" +description = "Mock Collector used by contract tests for AWS OTEL Python Instrumentation" +version = "1.0.0" +license = "Apache-2.0" +requires-python = ">=3.8" + +dependencies = [ + "grpcio ~= 1.60.0", + "opentelemetry-proto==1.22.0", + "opentelemetry-sdk==1.22.0", + "protobuf==4.25.2", + "typing-extensions==4.9.0" +] + +[tool.hatch.build.targets.sdist] +include = ["*.py"] + +[tool.hatch.build.targets.wheel] +include = ["*.py"] diff --git a/contract-tests/images/mock-collector/requirements.txt b/contract-tests/images/mock-collector/requirements.txt new file mode 100644 index 000000000..c499e7423 --- /dev/null +++ b/contract-tests/images/mock-collector/requirements.txt @@ -0,0 +1,5 @@ +grpcio==1.60.1 +opentelemetry-proto==1.22.0 +opentelemetry-sdk==1.22.0 +protobuf==4.25.2 +typing-extensions==4.9.0 \ No newline at end of file diff --git a/contract-tests/set-up-contract-tests.sh b/contract-tests/set-up-contract-tests.sh new file mode 100755 index 000000000..6a62073e6 --- /dev/null +++ b/contract-tests/set-up-contract-tests.sh @@ -0,0 +1,54 @@ +#!/bin/bash +# Check script is running in contract-tests +current_path=`pwd` +current_dir="${current_path##*/}" +if [ "$current_dir" != "aws-otel-python-instrumentation" ]; then + echo "Please run from aws-otel-python-instrumentation dir" + exit +fi + +# Remove old whl files (excluding distro whl) +rm -rf dist/mock_collector* +rm -rf dist/contract_tests* + +# Create mock-collector image +cd contract-tests/images/mock-collector +docker build . -t aws-appsignals-mock-collector-python +if [ $? = 1 ]; then + echo "Docker build for mock collector failed" + exit 1 +fi + +# Find and store aws_opentelemetry_distro whl file +cd ../../../dist +DISTRO=(aws_opentelemetry_distro-*-py3-none-any.whl) +if [ "$DISTRO" = "aws_opentelemetry_distro-*-py3-none-any.whl" ]; then + echo "Could not find aws_opentelemetry_distro whl file in dist dir." + exit 1 +fi + +# Create application images +cd .. +for dir in contract-tests/images/applications/* +do + application="${dir##*/}" + docker build . -t aws-appsignals-tests-${application}-app -f ${dir}/Dockerfile --build-arg="DISTRO=${DISTRO}" + if [ $? = 1 ]; then + echo "Docker build for ${application} application failed" + exit 1 + fi +done + +# Build and install mock-collector +cd contract-tests/images/mock-collector +python3 -m build --outdir ../../../dist +cd ../../../dist +pip install mock_collector-1.0.0-py3-none-any.whl --force-reinstall + +# Build and install contract-tests +cd ../contract-tests/tests +python3 -m build --outdir ../../dist +cd ../../dist +# --force-reinstall causes `ERROR: No matching distribution found for mock-collector==1.0.0`, but uninstalling and reinstalling works pretty reliably. +pip uninstall contract-tests -y +pip install contract_tests-1.0.0-py3-none-any.whl \ No newline at end of file diff --git a/contract-tests/tests/pyproject.toml b/contract-tests/tests/pyproject.toml new file mode 100644 index 000000000..fc85fd2b8 --- /dev/null +++ b/contract-tests/tests/pyproject.toml @@ -0,0 +1,34 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "contract-tests" +description = "Contract tests for AWS OTEL Python Instrumentation" +version = "1.0.0" +license = "Apache-2.0" +requires-python = ">=3.8" + +dependencies = [ + "opentelemetry-proto==1.22.0", + "opentelemetry-sdk==1.22.0", + "testcontainers==3.7.1", + "grpcio==1.60.0", + "docker==7.0.0", + "mock-collector==1.0.0" +] + +[project.optional-dependencies] +test = [] + +[tool.hatch.build.targets.sdist] +include = ["/test"] + +[tool.hatch.build.targets.wheel] +packages = ["test/amazon"] + +[tool.pytest.ini_options] +log_cli = true +log_cli_level = "INFO" +log_cli_format = "%(asctime)s [%(levelname)8s] %(message)s (%(filename)s:%(lineno)s)" +log_cli_date_format = "%Y-%m-%d %H:%M:%S" \ No newline at end of file diff --git a/contract-tests/tests/test/amazon/base/contract_test_base.py b/contract-tests/tests/test/amazon/base/contract_test_base.py new file mode 100644 index 000000000..ee3ef1e05 --- /dev/null +++ b/contract-tests/tests/test/amazon/base/contract_test_base.py @@ -0,0 +1,115 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +from logging import INFO, Logger, getLogger +from typing import Dict, List +from unittest import TestCase + +from docker import DockerClient +from docker.models.networks import Network, NetworkCollection +from mock_collector_client import MockCollectorClient +from testcontainers.core.container import DockerContainer +from testcontainers.core.waiting_utils import wait_for_logs +from typing_extensions import override + +_logger: Logger = getLogger(__name__) +_logger.setLevel(INFO) + +_MOCK_COLLECTOR_ALIAS: str = "collector" +_MOCK_COLLECTOR_NAME: str = "aws-appsignals-mock-collector-python" +_MOCK_COLLECTOR_PORT: int = 4315 +_NETWORK_NAME: str = "aws-appsignals-network" + +_MOCK_COLLECTOR: DockerContainer = ( + DockerContainer(_MOCK_COLLECTOR_NAME).with_exposed_ports(_MOCK_COLLECTOR_PORT).with_name(_MOCK_COLLECTOR_NAME) +) +_NETWORK: Network = NetworkCollection(client=DockerClient()).create(_NETWORK_NAME) + + +class ContractTestBase(TestCase): + """Base class for implementing a contract test. + + This class will create all the boilerplate necessary to run a contract test. It will: 1.Create a mock collector + container that receives telemetry data of the application being tested. 2. Create an application container which + will be used to exercise the library under test. + + Several methods are provided that can be overridden to customize the test scenario. + """ + + _mock_collector_client: MockCollectorClient + _application: DockerContainer + + @classmethod + @override + def setUpClass(cls) -> None: + _MOCK_COLLECTOR.start() + wait_for_logs(_MOCK_COLLECTOR, "Ready", timeout=20) + _NETWORK.connect(_MOCK_COLLECTOR_NAME, aliases=[_MOCK_COLLECTOR_ALIAS]) + + @classmethod + @override + def tearDownClass(cls) -> None: + _logger.info("MockCollector stdout") + _logger.info(_MOCK_COLLECTOR.get_logs()[0].decode()) + _logger.info("MockCollector stderr") + _logger.info(_MOCK_COLLECTOR.get_logs()[1].decode()) + _MOCK_COLLECTOR.stop() + _NETWORK.remove() + + @override + def setUp(self) -> None: + self._application: DockerContainer = ( + DockerContainer(self.get_application_image_name()) + .with_exposed_ports(self.get_application_port()) + .with_env("OTEL_METRIC_EXPORT_INTERVAL", "100") + .with_env("OTEL_SMP_ENABLED", "true") + .with_env("OTEL_METRICS_EXPORTER", "none") + .with_env("OTEL_BSP_SCHEDULE_DELAY", "1") + .with_env("OTEL_AWS_SMP_EXPORTER_ENDPOINT", f"http://collector:{_MOCK_COLLECTOR_PORT}") + .with_env("OTEL_EXPORTER_OTLP_TRACES_ENDPOINT", f"http://collector:{_MOCK_COLLECTOR_PORT}") + .with_env("OTEL_RESOURCE_ATTRIBUTES", self.get_application_otel_resource_attributes()) + .with_env("OTEL_TRACES_SAMPLER", "always_on") + .with_name(self.get_application_image_name()) + ) + + extra_env: Dict[str, str] = self.get_application_extra_environment_variables() + for key in extra_env: + self._application.with_env(key, extra_env.get(key)) + self._application.start() + wait_for_logs(self._application, self.get_application_wait_pattern(), timeout=20) + _NETWORK.connect(self.get_application_image_name(), aliases=self.get_application_network_aliases()) + + self._mock_collector_client: MockCollectorClient = MockCollectorClient( + _MOCK_COLLECTOR.get_container_host_ip(), _MOCK_COLLECTOR.get_exposed_port(_MOCK_COLLECTOR_PORT) + ) + + @override + def tearDown(self) -> None: + _logger.info("Application stdout") + _logger.info(self._application.get_logs()[0].decode()) + _logger.info("Application stderr") + _logger.info(self._application.get_logs()[1].decode()) + self._application.stop() + self._mock_collector_client.clear_signals() + + # pylint: disable=no-self-use + # Methods that should be overridden in subclasses + def get_application_port(self) -> int: + return 8080 + + def get_application_extra_environment_variables(self) -> Dict[str, str]: + return {} + + def get_application_network_aliases(self) -> List[str]: + return [] + + def get_application_image_name(self) -> str: + return None + + def get_application_wait_pattern(self) -> str: + return "Ready" + + def get_application_otel_service_name(self) -> str: + return self.get_application_image_name() + + def get_application_otel_resource_attributes(self) -> str: + return "service.name=" + self.get_application_otel_service_name() diff --git a/contract-tests/tests/test/amazon/requests/requests_test.py b/contract-tests/tests/test/amazon/requests/requests_test.py new file mode 100644 index 000000000..1b182a86b --- /dev/null +++ b/contract-tests/tests/test/amazon/requests/requests_test.py @@ -0,0 +1,193 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +from typing import Dict, List + +from mock_collector_client import ResourceScopeMetric, ResourceScopeSpan +from requests import Response, request +from typing_extensions import override + +from amazon.base.contract_test_base import ContractTestBase +from amazon.utils.app_signals_constants import ( + AWS_LOCAL_OPERATION, + AWS_LOCAL_SERVICE, + AWS_REMOTE_OPERATION, + AWS_REMOTE_SERVICE, + AWS_SPAN_KIND, + ERROR_METRIC, + FAULT_METRIC, + LATENCY_METRIC, +) +from opentelemetry.proto.common.v1.common_pb2 import AnyValue, KeyValue +from opentelemetry.proto.metrics.v1.metrics_pb2 import ExponentialHistogramDataPoint, Metric +from opentelemetry.proto.trace.v1.trace_pb2 import Span +from opentelemetry.semconv.trace import SpanAttributes + + +class RequestsTest(ContractTestBase): + @override + def get_application_image_name(self) -> str: + return "aws-appsignals-tests-requests-app" + + @override + def get_application_network_aliases(self) -> List[str]: + """ + This will be the target hostname of the clients making http requests in the application image, so that they + don't use localhost. + """ + return ["backend"] + + @override + def get_application_extra_environment_variables(self) -> Dict[str, str]: + """ + This does not appear to do anything, as it does not seem that OTEL supports peer service for Python. Keeping + for consistency with Java contract tests at this time. + """ + return {"OTEL_INSTRUMENTATION_COMMON_PEER_SERVICE_MAPPING": "backend=backend:8080"} + + def test_success(self) -> None: + self.do_test_requests("success", "GET", 200, 0, 0) + + def test_error(self) -> None: + self.do_test_requests("error", "GET", 400, 1, 0) + + def test_fault(self) -> None: + self.do_test_requests("fault", "GET", 500, 0, 1) + + def test_success_post(self) -> None: + self.do_test_requests("success/postmethod", "POST", 200, 0, 0) + + def test_error_post(self) -> None: + self.do_test_requests("error/postmethod", "POST", 400, 1, 0) + + def test_fault_post(self) -> None: + self.do_test_requests("fault/postmethod", "POST", 500, 0, 1) + + def do_test_requests( + self, path: str, method: str, status_code: int, expected_error: int, expected_fault: int + ) -> None: + address: str = self._application.get_container_host_ip() + port: str = self._application.get_exposed_port(self.get_application_port()) + url: str = f"http://{address}:{port}/{path}" + response: Response = request(method, url, timeout=20) + + self.assertEqual(status_code, response.status_code) + + resource_scope_spans: List[ResourceScopeSpan] = self._mock_collector_client.get_traces() + self._assert_aws_span_attributes(resource_scope_spans, method, path) + self._assert_semantic_conventions_span_attributes(resource_scope_spans, method, path, status_code) + + metrics: List[ResourceScopeMetric] = self._mock_collector_client.get_metrics( + {LATENCY_METRIC, ERROR_METRIC, FAULT_METRIC} + ) + self._assert_metric_attributes(metrics, method, path, LATENCY_METRIC, 5000) + self._assert_metric_attributes(metrics, method, path, ERROR_METRIC, expected_error) + self._assert_metric_attributes(metrics, method, path, FAULT_METRIC, expected_fault) + + def _assert_aws_span_attributes( + self, resource_scope_spans: List[ResourceScopeSpan], method: str, path: str + ) -> None: + target_spans: List[Span] = [] + for resource_scope_span in resource_scope_spans: + # pylint: disable=no-member + if resource_scope_span.span.kind == Span.SPAN_KIND_CLIENT: + target_spans.append(resource_scope_span.span) + + self.assertEqual(len(target_spans), 1) + self._assert_aws_attributes(target_spans[0].attributes, method, path) + + def _assert_aws_attributes(self, attributes_list: List[KeyValue], method: str, endpoint: str) -> None: + attributes_dict: Dict[str, AnyValue] = self._get_attributes_dict(attributes_list) + self._assert_str_attribute(attributes_dict, AWS_LOCAL_SERVICE, self.get_application_otel_service_name()) + # InternalOperation as OTEL does not instrument the basic server we are using, so the client span is a local + # root. + self._assert_str_attribute(attributes_dict, AWS_LOCAL_OPERATION, "InternalOperation") + # TODO: This should be "backend:8080", but isn't because requests instrumentation is not populating peer + # attributes + self._assert_str_attribute(attributes_dict, AWS_REMOTE_SERVICE, "UnknownRemoteService") + self._assert_str_attribute(attributes_dict, AWS_REMOTE_OPERATION, f"{method} /backend") + # See comment above AWS_LOCAL_OPERATION + self._assert_str_attribute(attributes_dict, AWS_SPAN_KIND, "LOCAL_ROOT") + + def _get_attributes_dict(self, attributes_list: List[KeyValue]) -> Dict[str, AnyValue]: + attributes_dict: Dict[str, AnyValue] = {} + for attribute in attributes_list: + key: str = attribute.key + value: AnyValue = attribute.value + if key in attributes_dict: + old_value: AnyValue = attributes_dict[key] + self.fail(f"Attribute {key} unexpectedly duplicated. Value 1: {old_value} Value 2: {value}") + attributes_dict[key] = value + return attributes_dict + + def _assert_str_attribute(self, attributes_dict: Dict[str, AnyValue], key: str, expected_value: str): + self.assertIn(key, attributes_dict) + actual_value: AnyValue = attributes_dict[key] + self.assertIsNotNone(actual_value) + self.assertEqual(expected_value, actual_value.string_value) + + def _assert_int_attribute(self, attributes_dict: Dict[str, AnyValue], key: str, expected_value: int) -> None: + actual_value: AnyValue = attributes_dict[key] + self.assertIsNotNone(actual_value) + self.assertEqual(expected_value, actual_value.int_value) + + def _assert_semantic_conventions_span_attributes( + self, resource_scope_spans: List[ResourceScopeSpan], method: str, path: str, status_code: int + ) -> None: + target_spans: List[Span] = [] + for resource_scope_span in resource_scope_spans: + # pylint: disable=no-member + if resource_scope_span.span.kind == Span.SPAN_KIND_CLIENT: + target_spans.append(resource_scope_span.span) + + self.assertEqual(len(target_spans), 1) + self.assertEqual(target_spans[0].name, method) + self._assert_semantic_conventions_attributes(target_spans[0].attributes, method, path, status_code) + + def _assert_semantic_conventions_attributes( + self, attributes_list: List[KeyValue], method: str, endpoint: str, status_code: int + ) -> None: + attributes_dict: Dict[str, AnyValue] = self._get_attributes_dict(attributes_list) + # TODO: requests instrumentation is not populating net peer attributes + # self._assert_str_attribute(attributes_dict, SpanAttributes.NET_PEER_NAME, "backend") + # self._assert_int_attribute(attributes_dict, SpanAttributes.NET_PEER_PORT, 8080) + self._assert_int_attribute(attributes_dict, SpanAttributes.HTTP_STATUS_CODE, status_code) + self._assert_str_attribute(attributes_dict, SpanAttributes.HTTP_URL, f"http://backend:8080/backend/{endpoint}") + self._assert_str_attribute(attributes_dict, SpanAttributes.HTTP_METHOD, method) + # TODO: request instrumentation is not respecting PEER_SERVICE + # self._assert_str_attribute(attributes_dict, SpanAttributes.PEER_SERVICE, "backend:8080") + + def _assert_metric_attributes( + self, + resource_scope_metrics: List[ResourceScopeMetric], + method: str, + path: str, + metric_name: str, + expected_sum: int, + ) -> None: + target_metrics: List[Metric] = [] + for resource_scope_metric in resource_scope_metrics: + if resource_scope_metric.metric.name.lower() == metric_name.lower(): + target_metrics.append(resource_scope_metric.metric) + + self.assertEqual(len(target_metrics), 1) + target_metric: Metric = target_metrics[0] + dp_list: List[ExponentialHistogramDataPoint] = target_metric.exponential_histogram.data_points + + self.assertEqual(len(dp_list), 2) + dp: ExponentialHistogramDataPoint = dp_list[0] + if len(dp_list[1].attributes) > len(dp_list[0].attributes): + dp = dp_list[1] + attribute_dict: Dict[str, AnyValue] = self._get_attributes_dict(dp.attributes) + self._assert_str_attribute(attribute_dict, AWS_LOCAL_SERVICE, self.get_application_otel_service_name()) + # See comment on AWS_LOCAL_OPERATION in _assert_aws_attributes + self._assert_str_attribute(attribute_dict, AWS_LOCAL_OPERATION, "InternalOperation") + # See comment on AWS_REMOTE_SERVICE in _assert_aws_attributes + self._assert_str_attribute(attribute_dict, AWS_REMOTE_SERVICE, "UnknownRemoteService") + self._assert_str_attribute(attribute_dict, AWS_REMOTE_OPERATION, f"{method} /backend") + self._assert_str_attribute(attribute_dict, AWS_SPAN_KIND, "CLIENT") + + actual_sum: float = dp.sum + if metric_name is LATENCY_METRIC: + self.assertTrue(0 < actual_sum < expected_sum) + else: + self.assertEqual(actual_sum, expected_sum) diff --git a/contract-tests/tests/test/amazon/utils/app_signals_constants.py b/contract-tests/tests/test/amazon/utils/app_signals_constants.py new file mode 100644 index 000000000..9cf9ce029 --- /dev/null +++ b/contract-tests/tests/test/amazon/utils/app_signals_constants.py @@ -0,0 +1,18 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Constants for attributes and metric names defined in AppSignals. +""" + +# Metric names +LATENCY_METRIC: str = "latency" +ERROR_METRIC: str = "error" +FAULT_METRIC: str = "fault" + +# Attribute names +AWS_LOCAL_SERVICE: str = "aws.local.service" +AWS_LOCAL_OPERATION: str = "aws.local.operation" +AWS_REMOTE_SERVICE: str = "aws.remote.service" +AWS_REMOTE_OPERATION: str = "aws.remote.operation" +AWS_REMOTE_TARGET: str = "aws.remote.target" +AWS_SPAN_KIND: str = "aws.span.kind" diff --git a/pyproject.toml b/pyproject.toml index 55ec8d784..e752714b8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,2 +1,3 @@ [tool.black] line-length = 120 +extend-exclude = '''.*pb2.*''' # Exclude grpc-created files