Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Metamodel entity changes #51

Merged
merged 10 commits into from
Oct 16, 2024
4 changes: 2 additions & 2 deletions src/monocle_apptrace/haystack/wrap_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from opentelemetry.instrumentation.utils import (
_SUPPRESS_INSTRUMENTATION_KEY,
)
from monocle_apptrace.wrap_common import PROMPT_INPUT_KEY, PROMPT_OUTPUT_KEY, WORKFLOW_TYPE_MAP, with_tracer_wrapper, CONTEXT_INPUT_KEY
from monocle_apptrace.wrap_common import PROMPT_INPUT_KEY, PROMPT_OUTPUT_KEY, WORKFLOW_TYPE_MAP, with_tracer_wrapper, DATA_INPUT_KEY
from monocle_apptrace.utils import set_embedding_model, set_attribute

logger = logging.getLogger(__name__)
Expand All @@ -20,7 +20,7 @@ def wrap(tracer, to_wrap, wrapped, instance, args, kwargs):
workflow_input = get_workflow_input(args, inputs)
embedding_model = get_embedding_model(instance)
set_embedding_model(embedding_model)
set_attribute(CONTEXT_INPUT_KEY, workflow_input)
set_attribute(DATA_INPUT_KEY, workflow_input)


with tracer.start_as_current_span(f"{name}.workflow") as span:
Expand Down
1 change: 1 addition & 0 deletions src/monocle_apptrace/instrumentor.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ def _instrument(self, **kwargs):
"method": method.method,
"span_name": method.span_name,
"wrapper": method.wrapper,
"output_processor":method.output_processor
} for method in self.user_wrapper_methods]

final_method_list = user_method_list + INBUILT_METHODS_LIST
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
{
"type": "inference",
"attributes": [
[
{
"_comment": "provider type ,name , deployment , inference_endpoint",
"attribute": "type",
"accessor": "lambda instance,args:'inference.azure_oai'"
},
{
"attribute": "provider_name",
"accessor": "lambda instance,args:args['provider_name']"
},
{
"attribute": "deployment",
"accessor": "lambda instance,args: resolve_from_alias(instance.__dict__, ['engine', 'azure_deployment', 'deployment_name', 'deployment_id', 'deployment'])"
},
{
"attribute": "inference_endpoint",
"accessor": "lambda instance,args: resolve_from_alias(instance.__dict__, ['azure_endpoint', 'api_base'])"
}
],
[
{
"_comment": "LLM Model",
"attribute": "name",
"accessor": "lambda instance,args: resolve_from_alias(instance.__dict__, ['model', 'model_name'])"
},
{
"attribute": "type",
"accessor": "lambda instance,args: 'model.llm'"
},
{
"attribute": "model_name",
"accessor": "lambda instance,args: resolve_from_alias(instance.__dict__, ['model', 'model_name'])"
}
]
]
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
{
"type": "inference",
"attributes": [
[
{
"_comment": "provider type ,name , deployment , inference_endpoint",
"attribute": "type",
"accessor": "lambda instance,args:'inference.azure_oai'"
},
{
"attribute": "provider_name",
"accessor": "lambda instance,args:args['provider_name']"
},
{
"attribute": "deployment",
"accessor": "lambda instance,args: resolve_from_alias(instance.__dict__, ['engine', 'azure_deployment', 'deployment_name', 'deployment_id', 'deployment'])"
},
{
"attribute": "inference_endpoint",
"accessor": "lambda instance,args: resolve_from_alias(instance.__dict__, ['azure_endpoint', 'api_base'])"
}
],
[
{
"_comment": "LLM Model",
"attribute": "name",
"accessor": "lambda instance,args: resolve_from_alias(instance.__dict__, ['model', 'model_name'])"
},
{
"attribute": "type",
"accessor": "lambda instance,args: 'model.llm'"
},
{
"attribute": "model_name",
"accessor": "lambda instance,args: resolve_from_alias(instance.__dict__, ['model', 'model_name'])"
}
]
]
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
{
"type": "retrieval",
"attributes": [
[
{
"_comment": "vector store name and type",
"attribute": "name",
"accessor": "lambda instance,args: type(instance.vectorstore).__name__"
},
{
"attribute": "type",
"accessor": "lambda instance,args: 'vectorstore.'+type(instance.vectorstore).__name__"
},
{
"attribute": "embedding_model_name",
"accessor": "lambda instance,args: instance.vectorstore.embeddings.model"
}
],
[
{
"_comment": "embedding model name and type",
"attribute": "name",
"accessor": "lambda instance,args: instance.vectorstore.embeddings.model"
},
{
"attribute": "type",
"accessor": "lambda instance ,args: 'model.embedding'"
},
{
"attribute": "model_name",
"accessor": "lambda instance,args: instance.vectorstore.embeddings.model"
}
]
]
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
{
"type": "retrieval",
"attributes": [
[
{
"_comment": "vector store name and type",
"attribute": "name",
"accessor": "lambda instance,args: type(instance._vector_store).__name__"
},
{
"attribute": "type",
"accessor": "lambda instance,args: 'vectorstore.'+type(instance._vector_store).__name__"
},
{
"attribute": "embedding_model_name",
"accessor": "lambda instance,args: instance._embed_model.model_name"
}
],
[
{
"_comment": "embedding model name and type",
"attribute": "name",
"accessor": "lambda instance,args: instance._embed_model.model_name"
},
{
"attribute": "type",
"accessor": "lambda instance ,args: 'model.embedding'"
},
{
"attribute": "model_name",
"accessor": "lambda instance,args: instance._embed_model.model_name"
}
]
]
}
9 changes: 6 additions & 3 deletions src/monocle_apptrace/metamodel/maps/lang_chain_methods.json
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
{
{
"wrapper_methods" : [
{
"package": "langchain.prompts.base",
Expand All @@ -19,7 +19,8 @@
"object": "BaseChatModel",
"method": "invoke",
"wrapper_package": "wrap_common",
"wrapper_method": "llm_wrapper"
"wrapper_method": "llm_wrapper",
"output_processor": ["metamodel/maps/attributes/inference/langchain_entities.json"]
},
{
"package": "langchain.chat_models.base",
Expand Down Expand Up @@ -47,7 +48,9 @@
"object": "BaseRetriever",
"method": "invoke",
"wrapper_package": "wrap_common",
"wrapper_method": "task_wrapper"
"wrapper_method": "task_wrapper",
"output_processor": ["metamodel/maps/attributes/retrieval/langchain_entities.json"]

},
{
"package": "langchain_core.retrievers",
Expand Down
6 changes: 3 additions & 3 deletions src/monocle_apptrace/metamodel/maps/llama_index_methods.json
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@
"method": "retrieve",
"span_name": "llamaindex.retrieve",
"wrapper_package": "wrap_common",
"wrapper_method": "task_wrapper"
"wrapper_method": "task_wrapper",
"output_processor": ["metamodel/maps/attributes/retrieval/llamaindex_entities.json"]
},
{
"package": "llama_index.core.indices.base_retriever",
Expand Down Expand Up @@ -55,8 +56,7 @@
"span_name": "llamaindex.openai",
"wrapper_package": "wrap_common",
"wrapper_method": "llm_wrapper",
"span_name_getter_package" : "llamaindex",
"span_name_getter_mothod" : "get_llm_span_name_for_openai"
"output_processor": ["metamodel/maps/attributes/inference/llamaindex_entities.json"]
},
{
"package": "llama_index.llms.openai.base",
Expand Down
27 changes: 26 additions & 1 deletion src/monocle_apptrace/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from opentelemetry.trace import Span
from opentelemetry.context import attach, set_value, get_value
from monocle_apptrace.constants import azure_service_map, aws_service_map

from json.decoder import JSONDecodeError
embedding_model_context = {}

def set_span_attribute(span, name, value):
Expand Down Expand Up @@ -48,6 +48,28 @@ def resolve_from_alias(my_map, alias):
if i in my_map.keys():
return my_map[i]
return None
def load_output_processor(wrapper_method):
"""Load the output processor from a file if the file path is provided and valid."""
logger = logging.getLogger()
output_processor_file_path = wrapper_method["output_processor"][0]
logger.info(f'Output processor file path is: {output_processor_file_path}')

if isinstance(output_processor_file_path, str) and output_processor_file_path: # Combined condition
current_dir = os.path.dirname(os.path.abspath(__file__))
absolute_file_path = os.path.join(current_dir,output_processor_file_path)
logger.info(f'Absolute file path is: {absolute_file_path}')
try:
with open(absolute_file_path, encoding='UTF-8') as op_file:
wrapper_method["output_processor"] = json.load(op_file)
logger.info('Output processor loaded successfully.')
except FileNotFoundError:
logger.error(f"Error: File not found at {absolute_file_path}.")
except JSONDecodeError:
logger.error(f"Error: Invalid JSON content in the file {absolute_file_path}.")
except Exception as e:
logger.error(f"Error: An unexpected error occurred: {e}")
else:
logger.error("Invalid or missing output processor file path.")

def load_wrapper_from_config(config_file_path: str, module_name: str = None):
wrapper_methods = []
Expand All @@ -61,6 +83,9 @@ def load_wrapper_from_config(config_file_path: str, module_name: str = None):
wrapper_method["span_name_getter"] = get_wrapper_method(
wrapper_method["span_name_getter_package"],
wrapper_method["span_name_getter_method"])
if "output_processor" in wrapper_method:
load_output_processor(wrapper_method)

return wrapper_methods

def get_wrapper_method(package_name: str, method_name: str):
Expand Down
Loading
Loading