Skip to content

Commit

Permalink
Merge pull request #23 from monocle2ai/kshitiz/infra_azure_component
Browse files Browse the repository at this point in the history
Added attributes for azure infra components
  • Loading branch information
kshitiz-okahu authored Aug 12, 2024
2 parents ed593f9 + 3790081 commit fde3add
Show file tree
Hide file tree
Showing 7 changed files with 51 additions and 5 deletions.
3 changes: 2 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,8 @@ dev = [
'pytest==8.0.0',
'llama-index==0.10.30',
'llama-index-embeddings-huggingface==0.2.0',
'llama-index-vector-stores-chroma==0.1.9'
'llama-index-vector-stores-chroma==0.1.9',
'parameterized==0.9.0'
]

[project.urls]
Expand Down
16 changes: 16 additions & 0 deletions src/monocle_apptrace/constants.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
# Azure environment constants
AZURE_ML_ENDPOINT_ENV_NAME = "AZUREML_ENTRY_SCRIPT"
AZURE_FUNCTION_WORKER_ENV_NAME = "FUNCTIONS_WORKER_RUNTIME"
AZURE_APP_SERVICE_ENV_NAME = "WEBSITE_SITE_NAME"

# Azure naming reference can be found here
# https://learn.microsoft.com/en-us/azure/cloud-adoption-framework/ready/azure-best-practices/resource-abbreviations
AZURE_FUNCTION_NAME = "azure.func"
AZURE_APP_SERVICE_NAME = "azure.asp"
AZURE_ML_SERVICE_NAME = "azure.mlw"

azure_service_map = {
AZURE_ML_ENDPOINT_ENV_NAME: AZURE_ML_SERVICE_NAME,
AZURE_APP_SERVICE_ENV_NAME: AZURE_APP_SERVICE_NAME,
AZURE_FUNCTION_WORKER_ENV_NAME: AZURE_FUNCTION_NAME
}
11 changes: 11 additions & 0 deletions src/monocle_apptrace/utils.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
import logging
import json
from importlib import import_module
import os
from opentelemetry.trace import Span
from monocle_apptrace.constants import AZURE_APP_SERVICE_ENV_NAME, AZURE_APP_SERVICE_NAME, AZURE_FUNCTION_NAME, AZURE_FUNCTION_WORKER_ENV_NAME, AZURE_ML_ENDPOINT_ENV_NAME, AZURE_ML_SERVICE_NAME

def set_span_attribute(span, name, value):
if value is not None:
Expand Down Expand Up @@ -60,3 +63,11 @@ def load_wrapper_from_config(config_file_path: str, module_name: str = None):
def get_wrapper_method(package_name: str, method_name: str):
wrapper_module = import_module("monocle_apptrace." + package_name)
return getattr(wrapper_module, method_name)

def update_span_with_infra_name(span: Span, span_key: str):
if AZURE_FUNCTION_WORKER_ENV_NAME in os.environ:
span.set_attribute(span_key, AZURE_FUNCTION_NAME)
elif AZURE_APP_SERVICE_ENV_NAME in os.environ:
span.set_attribute(span_key, AZURE_APP_SERVICE_NAME)
elif AZURE_ML_ENDPOINT_ENV_NAME in os.environ:
span.set_attribute(span_key, AZURE_ML_SERVICE_NAME)
7 changes: 5 additions & 2 deletions src/monocle_apptrace/wrap_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from urllib.parse import urlparse

from opentelemetry.trace import Span, Tracer
from monocle_apptrace.utils import resolve_from_alias, with_tracer_wrapper
from monocle_apptrace.utils import resolve_from_alias, update_span_with_infra_name, with_tracer_wrapper

logger = logging.getLogger(__name__)
WORKFLOW_TYPE_KEY = "workflow_type"
Expand All @@ -16,6 +16,7 @@
RESPONSE = "response"
TAGS = "tags"
CONTEXT_PROPERTIES_KEY = "workflow_context_properties"
INFRA_SERVICE_KEY = "infra_service_name"



Expand Down Expand Up @@ -60,7 +61,9 @@ def pre_task_processing(to_wrap, instance, args, span):
if is_root_span(span):
update_span_with_prompt_input(to_wrap=to_wrap, wrapped_args=args, span=span)

#capture the tags attribute of the instance if present, else ignore
update_span_with_infra_name(span, INFRA_SERVICE_KEY)

#capture the tags attribute of the instance if present, else ignore
try:
update_tags(instance, span)
except AttributeError:
Expand Down
3 changes: 2 additions & 1 deletion tests/langchain_sample.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,8 @@ def format_docs(docs):
# "workflow_input": "What is Task Decomposition?",
# "workflow_name": "langchain_app_1",
# "workflow_output": "Task decomposition is a technique where complex tasks are broken down into smaller and simpler steps to enhance model performance. This process allows agents to tackle difficult tasks by transforming them into more manageable components. Task decomposition can be achieved through various methods such as using prompting techniques, task-specific instructions, or human inputs.",
# "workflow_type": "workflow.langchain"
# "workflow_type": "workflow.langchain",
# "infra_service_name": "azure.func"
# },
# "events": []
# },
Expand Down
15 changes: 14 additions & 1 deletion tests/langchain_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import unittest
from unittest.mock import ANY, MagicMock, patch

import pytest
import requests
from dummy_class import DummyClass
from embeddings_wrapper import HuggingFaceEmbeddings
Expand All @@ -17,13 +18,15 @@
from langchain_community.vectorstores import faiss
from langchain_core.messages.ai import AIMessage
from langchain_core.runnables import RunnablePassthrough
from monocle_apptrace.constants import AZURE_APP_SERVICE_ENV_NAME, AZURE_APP_SERVICE_NAME, AZURE_FUNCTION_NAME, AZURE_FUNCTION_WORKER_ENV_NAME, AZURE_ML_ENDPOINT_ENV_NAME, AZURE_ML_SERVICE_NAME
from monocle_apptrace.instrumentor import (
MonocleInstrumentor,
set_context_properties,
setup_monocle_telemetry,
)
from monocle_apptrace.wrap_common import (
CONTEXT_PROPERTIES_KEY,
INFRA_SERVICE_KEY,
PROMPT_INPUT_KEY,
PROMPT_OUTPUT_KEY,
QUERY,
Expand All @@ -37,6 +40,7 @@
from opentelemetry.sdk.trace.export import BatchSpanProcessor, ConsoleSpanExporter

from fake_list_llm import FakeListLLM
from parameterized import parameterized

logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
Expand Down Expand Up @@ -104,10 +108,17 @@ def setUp(self):
def tearDown(self) -> None:
return super().tearDown()

@parameterized.expand([
("1", AZURE_ML_ENDPOINT_ENV_NAME, AZURE_ML_SERVICE_NAME),
("2", AZURE_FUNCTION_WORKER_ENV_NAME, AZURE_FUNCTION_NAME),
("3", AZURE_APP_SERVICE_ENV_NAME, AZURE_APP_SERVICE_NAME),
])
@patch.object(requests.Session, 'post')
def test_llm_chain(self, mock_post):
def test_llm_chain(self, test_name, test_input_infra, test_output_infra, mock_post):

try:

os.environ[test_input_infra] = "1"
context_key = "context_key_1"
context_value = "context_value_1"
set_context_properties({context_key: context_value})
Expand Down Expand Up @@ -148,11 +159,13 @@ def get_event_attributes(events, key):
assert input_event_attributes[QUERY] == query
assert output_event_attributes[RESPONSE] == TestHandler.ragText
assert root_span_attributes[f"{CONTEXT_PROPERTIES_KEY}.{context_key}"] == context_value
assert root_span_attributes[INFRA_SERVICE_KEY] == test_output_infra

for spanObject in dataJson['batch']:
assert not spanObject["context"]["span_id"].startswith("0x")
assert not spanObject["context"]["trace_id"].startswith("0x")
finally:
os.environ.pop(test_input_infra)
try:
if(self.instrumentor is not None):
self.instrumentor.uninstrument()
Expand Down
1 change: 1 addition & 0 deletions tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ deps =
llama-index==0.10.30
llama-index-embeddings-huggingface==0.2.0
llama-index-vector-stores-chroma==0.1.9
parameterized==0.9.0
commands =
pytest -vv

0 comments on commit fde3add

Please sign in to comment.