Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Metamodel entity changes #51

Merged
merged 10 commits into from
Oct 16, 2024
8 changes: 4 additions & 4 deletions src/monocle_apptrace/haystack/__init__.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@

import os
import logging
from monocle_apptrace.utils import load_wrapper_from_config
from monocle_apptrace.utils import get_wrapper_methods_config

logger = logging.getLogger(__name__)
parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
HAYSTACK_METHODS = load_wrapper_from_config(
os.path.join(parent_dir, 'metamodel', 'maps', 'haystack_methods.json'))
HAYSTACK_METHODS = get_wrapper_methods_config(
wrapper_methods_config_path=os.path.join(parent_dir, 'metamodel', 'maps', 'haystack_methods.json'),
attributes_config_base_path=os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
4 changes: 2 additions & 2 deletions src/monocle_apptrace/haystack/wrap_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from opentelemetry.instrumentation.utils import (
_SUPPRESS_INSTRUMENTATION_KEY,
)
from monocle_apptrace.wrap_common import PROMPT_INPUT_KEY, PROMPT_OUTPUT_KEY, WORKFLOW_TYPE_MAP, with_tracer_wrapper, CONTEXT_INPUT_KEY
from monocle_apptrace.wrap_common import PROMPT_INPUT_KEY, PROMPT_OUTPUT_KEY, WORKFLOW_TYPE_MAP, with_tracer_wrapper, DATA_INPUT_KEY
from monocle_apptrace.utils import set_embedding_model, set_attribute

logger = logging.getLogger(__name__)
Expand All @@ -20,7 +20,7 @@ def wrap(tracer, to_wrap, wrapped, instance, args, kwargs):
workflow_input = get_workflow_input(args, inputs)
embedding_model = get_embedding_model(instance)
set_embedding_model(embedding_model)
set_attribute(CONTEXT_INPUT_KEY, workflow_input)
set_attribute(DATA_INPUT_KEY, workflow_input)


with tracer.start_as_current_span(f"{name}.workflow") as span:
Expand Down
20 changes: 8 additions & 12 deletions src/monocle_apptrace/instrumentor.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@


import logging
from typing import Collection,List
from typing import Collection, List
from wrapt import wrap_function_wrapper
from opentelemetry.trace import get_tracer
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
Expand All @@ -11,17 +9,16 @@
from opentelemetry.sdk.resources import SERVICE_NAME, Resource
from opentelemetry import trace
from opentelemetry.context import get_value, attach, set_value
from monocle_apptrace.utils import process_wrapper_method_config
from monocle_apptrace.wrap_common import SESSION_PROPERTIES_KEY
from monocle_apptrace.wrapper import INBUILT_METHODS_LIST, WrapperMethod
from monocle_apptrace.exporters.file_exporter import FileSpanExporter


logger = logging.getLogger(__name__)

_instruments = ()

class MonocleInstrumentor(BaseInstrumentor):

workflow_name: str = ""
user_wrapper_methods: list[WrapperMethod] = []
instrumented_method_list: list[object] = []
Expand All @@ -37,7 +34,7 @@ def instrumentation_dependencies(self) -> Collection[str]:

def _instrument(self, **kwargs):
tracer_provider = kwargs.get("tracer_provider")
tracer = get_tracer(instrumenting_module_name= __name__, tracer_provider= tracer_provider)
tracer = get_tracer(instrumenting_module_name=__name__, tracer_provider=tracer_provider)

user_method_list = [
{
Expand All @@ -46,8 +43,9 @@ def _instrument(self, **kwargs):
"method": method.method,
"span_name": method.span_name,
"wrapper": method.wrapper,
"output_processor": method.output_processor
} for method in self.user_wrapper_methods]

process_wrapper_method_config(user_method_list)
final_method_list = user_method_list + INBUILT_METHODS_LIST

for wrapped_method in final_method_list:
Expand All @@ -69,7 +67,6 @@ def _instrument(self, **kwargs):
object:{wrap_object},
method:{wrap_method}""")


def _uninstrument(self, **kwargs):
for wrapped_method in self.instrumented_method_list:
try:
Expand Down Expand Up @@ -102,16 +99,15 @@ def setup_monocle_telemetry(
processor.on_start = on_processor_start
if not is_proxy_provider:
tracer_provider_default.add_span_processor(processor)
else :
else:
trace_provider.add_span_processor(processor)
if is_proxy_provider :
if is_proxy_provider:
trace.set_tracer_provider(trace_provider)
instrumentor = MonocleInstrumentor(user_wrapper_methods=wrapper_methods or [])
# instrumentor.app_name = workflow_name
if not instrumentor.is_instrumented_by_opentelemetry:
instrumentor.instrument()


def on_processor_start(span: Span, parent_context):
context_properties = get_value(SESSION_PROPERTIES_KEY)
if context_properties is not None:
Expand All @@ -121,4 +117,4 @@ def on_processor_start(span: Span, parent_context):
)

def set_context_properties(properties: dict) -> None:
attach(set_value(SESSION_PROPERTIES_KEY, properties))
attach(set_value(SESSION_PROPERTIES_KEY, properties))
9 changes: 6 additions & 3 deletions src/monocle_apptrace/langchain/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
import os
from monocle_apptrace.utils import load_wrapper_from_config
from monocle_apptrace.utils import get_wrapper_methods_config

parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
LANGCHAIN_METHODS = load_wrapper_from_config(
os.path.join(parent_dir, 'metamodel', 'maps', 'lang_chain_methods.json'))
LANGCHAIN_METHODS = get_wrapper_methods_config(
wrapper_methods_config_path=os.path.join(parent_dir, 'metamodel', 'maps', 'langchain_methods.json'),
attributes_config_base_path=os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))


15 changes: 8 additions & 7 deletions src/monocle_apptrace/llamaindex/__init__.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,16 @@

#pylint: disable=protected-access
# pylint: disable=protected-access
import os
from monocle_apptrace.utils import load_wrapper_from_config
from monocle_apptrace.utils import get_wrapper_methods_config


def get_llm_span_name_for_openai(instance):
if (hasattr(instance, "_is_azure_client")
and callable(getattr(instance, "_is_azure_client"))
and instance._is_azure_client()):
and callable(getattr(instance, "_is_azure_client"))
and instance._is_azure_client()):
return "llamaindex.azure_openai"
return "llamaindex.openai"

parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
LLAMAINDEX_METHODS = load_wrapper_from_config(
os.path.join(parent_dir, 'metamodel', 'maps', 'llama_index_methods.json'))
LLAMAINDEX_METHODS = get_wrapper_methods_config(
wrapper_methods_config_path=os.path.join(parent_dir, 'metamodel', 'maps', 'llamaindex_methods.json'),
attributes_config_base_path=os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
{
"type": "inference",
"attributes": [
[
{
"_comment": "provider type ,name , deployment , inference_endpoint",
"attribute": "type",
"accessor": "lambda instance,args:'inference.azure_oai'"
},
{
"attribute": "provider_name",
"accessor": "lambda instance,args:args['provider_name']"
},
{
"attribute": "deployment",
"accessor": "lambda instance,args: resolve_from_alias(instance.__dict__, ['engine', 'azure_deployment', 'deployment_name', 'deployment_id', 'deployment'])"
},
{
"attribute": "inference_endpoint",
"accessor": "lambda instance,args: resolve_from_alias(instance.__dict__, ['azure_endpoint', 'api_base'])"
}
],
[
{
"_comment": "LLM Model",
"attribute": "name",
"accessor": "lambda instance,args: resolve_from_alias(instance.__dict__, ['model', 'model_name'])"
},
{
"attribute": "type",
"accessor": "lambda instance,args: 'model.llm'"
},
{
"attribute": "model_name",
"accessor": "lambda instance,args: resolve_from_alias(instance.__dict__, ['model', 'model_name'])"
}
]
]
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
{
"type": "inference",
"attributes": [
[
{
"_comment": "provider type ,name , deployment , inference_endpoint",
"attribute": "type",
"accessor": "lambda instance,args:'inference.azure_oai'"
},
{
"attribute": "provider_name",
"accessor": "lambda instance,args:args['provider_name']"
},
{
"attribute": "deployment",
"accessor": "lambda instance,args: resolve_from_alias(instance.__dict__, ['engine', 'azure_deployment', 'deployment_name', 'deployment_id', 'deployment'])"
},
{
"attribute": "inference_endpoint",
"accessor": "lambda instance,args: resolve_from_alias(instance.__dict__, ['azure_endpoint', 'api_base'])"
}
],
[
{
"_comment": "LLM Model",
"attribute": "name",
"accessor": "lambda instance,args: resolve_from_alias(instance.__dict__, ['model', 'model_name'])"
},
{
"attribute": "type",
"accessor": "lambda instance,args: 'model.llm'"
},
{
"attribute": "model_name",
"accessor": "lambda instance,args: resolve_from_alias(instance.__dict__, ['model', 'model_name'])"
}
]
]
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
{
"type": "retrieval",
"attributes": [
[
{
"_comment": "vector store name and type",
"attribute": "name",
"accessor": "lambda instance,args: type(instance.vectorstore).__name__"
},
{
"attribute": "type",
"accessor": "lambda instance,args: 'vectorstore.'+type(instance.vectorstore).__name__"
},
{
"attribute": "embedding_model_name",
"accessor": "lambda instance,args: instance.vectorstore.embeddings.model"
}
],
[
{
"_comment": "embedding model name and type",
"attribute": "name",
"accessor": "lambda instance,args: instance.vectorstore.embeddings.model"
},
{
"attribute": "type",
"accessor": "lambda instance ,args: 'model.embedding'"
},
{
"attribute": "model_name",
"accessor": "lambda instance,args: instance.vectorstore.embeddings.model"
}
]
]
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
{
"type": "retrieval",
"attributes": [
[
{
"_comment": "vector store name and type",
"attribute": "name",
"accessor": "lambda instance,args: type(instance._vector_store).__name__"
},
{
"attribute": "type",
"accessor": "lambda instance,args: 'vectorstore.'+type(instance._vector_store).__name__"
},
{
"attribute": "embedding_model_name",
"accessor": "lambda instance,args: instance._embed_model.model_name"
}
],
[
{
"_comment": "embedding model name and type",
"attribute": "name",
"accessor": "lambda instance,args: instance._embed_model.model_name"
},
{
"attribute": "type",
"accessor": "lambda instance ,args: 'model.embedding'"
},
{
"attribute": "model_name",
"accessor": "lambda instance,args: instance._embed_model.model_name"
}
]
]
}
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
{
{
"wrapper_methods" : [
{
"package": "langchain.prompts.base",
Expand All @@ -19,7 +19,8 @@
"object": "BaseChatModel",
"method": "invoke",
"wrapper_package": "wrap_common",
"wrapper_method": "llm_wrapper"
"wrapper_method": "llm_wrapper",
"output_processor": ["metamodel/maps/attributes/inference/langchain_entities.json"]
},
{
"package": "langchain.chat_models.base",
Expand Down Expand Up @@ -47,7 +48,9 @@
"object": "BaseRetriever",
"method": "invoke",
"wrapper_package": "wrap_common",
"wrapper_method": "task_wrapper"
"wrapper_method": "task_wrapper",
"output_processor": ["metamodel/maps/attributes/retrieval/langchain_entities.json"]

},
{
"package": "langchain_core.retrievers",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@
"method": "retrieve",
"span_name": "llamaindex.retrieve",
"wrapper_package": "wrap_common",
"wrapper_method": "task_wrapper"
"wrapper_method": "task_wrapper",
"output_processor": ["metamodel/maps/attributes/retrieval/llamaindex_entities.json"]
},
{
"package": "llama_index.core.indices.base_retriever",
Expand Down Expand Up @@ -55,8 +56,7 @@
"span_name": "llamaindex.openai",
"wrapper_package": "wrap_common",
"wrapper_method": "llm_wrapper",
"span_name_getter_package" : "llamaindex",
"span_name_getter_mothod" : "get_llm_span_name_for_openai"
"output_processor": ["metamodel/maps/attributes/inference/llamaindex_entities.json"]
},
{
"package": "llama_index.llms.openai.base",
Expand Down
Loading
Loading