diff --git a/src/monocle_apptrace/haystack/__init__.py b/src/monocle_apptrace/haystack/__init__.py index 971b500..521dae0 100644 --- a/src/monocle_apptrace/haystack/__init__.py +++ b/src/monocle_apptrace/haystack/__init__.py @@ -1,28 +1,9 @@ - +import os import logging -from monocle_apptrace.haystack.wrap_openai import wrap_openai -from monocle_apptrace.haystack.wrap_pipeline import wrap as wrap_pipeline +from monocle_apptrace.utils import load_wrapper_from_config logger = logging.getLogger(__name__) - -HAYSTACK_METHODS = [ - { - "package": "haystack.components.generators.openai", - "object": "OpenAIGenerator", - "method": "run", - "wrapper": wrap_openai, - }, - { - "package": "haystack.components.generators.chat.openai", - "object": "OpenAIChatGenerator", - "method": "run", - "wrapper": wrap_openai, - }, - { - "package": "haystack.core.pipeline.pipeline", - "object": "Pipeline", - "method": "run", - "wrapper": wrap_pipeline, - }, -] +parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) +HAYSTACK_METHODS = load_wrapper_from_config( + os.path.join(parent_dir, 'wrapper_config', 'haystack_methods.json')) diff --git a/src/monocle_apptrace/langchain/__init__.py b/src/monocle_apptrace/langchain/__init__.py index 6e1ef2c..61364c0 100644 --- a/src/monocle_apptrace/langchain/__init__.py +++ b/src/monocle_apptrace/langchain/__init__.py @@ -1,95 +1,6 @@ +import os +from monocle_apptrace.utils import load_wrapper_from_config - -from monocle_apptrace.wrap_common import allm_wrapper, atask_wrapper, llm_wrapper, task_wrapper - -LANGCHAIN_METHODS = [ - { - "package": "langchain.prompts.base", - "object": "BasePromptTemplate", - "method": "invoke", - "wrapper": task_wrapper, - }, - { - "package": "langchain.prompts.base", - "object": "BasePromptTemplate", - "method": "ainvoke", - "wrapper": atask_wrapper, - }, - { - "package": "langchain.chat_models.base", - "object": "BaseChatModel", - "method": "invoke", - "wrapper": llm_wrapper, - }, - { - "package": "langchain.chat_models.base", - "object": "BaseChatModel", - "method": "ainvoke", - "wrapper": allm_wrapper, - }, - { - "package": "langchain_core.language_models.llms", - "object": "LLM", - "method": "_generate", - "wrapper": llm_wrapper, - }, - { - "package": "langchain_core.language_models.llms", - "object": "LLM", - "method": "_agenerate", - "wrapper": llm_wrapper, - }, - { - "package": "langchain_core.retrievers", - "object": "BaseRetriever", - "method": "invoke", - "wrapper": task_wrapper, - }, - { - "package": "langchain_core.retrievers", - "object": "BaseRetriever", - "method": "ainvoke", - "wrapper": atask_wrapper, - }, - { - "package": "langchain.schema", - "object": "BaseOutputParser", - "method": "invoke", - "wrapper": task_wrapper, - }, - { - "package": "langchain.schema", - "object": "BaseOutputParser", - "method": "ainvoke", - "wrapper": atask_wrapper, - }, - { - "package": "langchain.schema.runnable", - "object": "RunnableSequence", - "method": "invoke", - "span_name": "langchain.workflow", - "wrapper": task_wrapper, - }, - { - "package": "langchain.schema.runnable", - "object": "RunnableSequence", - "method": "ainvoke", - "span_name": "langchain.workflow", - "wrapper": atask_wrapper, - }, - { - "package": "langchain.schema.runnable", - "object": "RunnableParallel", - "method": "invoke", - "span_name": "langchain.workflow", - "wrapper": task_wrapper, - }, - { - "package": "langchain.schema.runnable", - "object": "RunnableParallel", - "method": "ainvoke", - "span_name": "langchain.workflow", - "wrapper": atask_wrapper, - }, - -] +parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) +LANGCHAIN_METHODS = load_wrapper_from_config( + os.path.join(parent_dir, 'wrapper_config', 'lang_chain_methods.json')) diff --git a/src/monocle_apptrace/llamaindex/__init__.py b/src/monocle_apptrace/llamaindex/__init__.py index fde9707..fca7a47 100644 --- a/src/monocle_apptrace/llamaindex/__init__.py +++ b/src/monocle_apptrace/llamaindex/__init__.py @@ -1,6 +1,6 @@ - -from monocle_apptrace.wrap_common import allm_wrapper, atask_wrapper, llm_wrapper, task_wrapper +import os +from monocle_apptrace.utils import load_wrapper_from_config def get_llm_span_name_for_openai(instance): if (hasattr(instance, "_is_azure_client") @@ -9,63 +9,6 @@ def get_llm_span_name_for_openai(instance): return "llamaindex.azure_openai" return "llamaindex.openai" -LLAMAINDEX_METHODS = [ - { - "package": "llama_index.core.indices.base_retriever", - "object": "BaseRetriever", - "method": "retrieve", - "span_name": "llamaindex.retrieve", - "wrapper": task_wrapper - }, - { - "package": "llama_index.core.indices.base_retriever", - "object": "BaseRetriever", - "method": "aretrieve", - "span_name": "llamaindex.retrieve", - "wrapper": atask_wrapper - }, - { - "package": "llama_index.core.base.base_query_engine", - "object": "BaseQueryEngine", - "method": "query", - "span_name": "llamaindex.query", - "wrapper": task_wrapper, - }, - { - "package": "llama_index.core.base.base_query_engine", - "object": "BaseQueryEngine", - "method": "aquery", - "span_name": "llamaindex.query", - "wrapper": atask_wrapper, - }, - { - "package": "llama_index.core.llms.custom", - "object": "CustomLLM", - "method": "chat", - "span_name": "llamaindex.llmchat", - "wrapper": task_wrapper, - }, - { - "package": "llama_index.core.llms.custom", - "object": "CustomLLM", - "method": "achat", - "span_name": "llamaindex.llmchat", - "wrapper": atask_wrapper, - }, - { - "package": "llama_index.llms.openai.base", - "object": "OpenAI", - "method": "chat", - "span_name": "llamaindex.openai", - "span_name_getter" : get_llm_span_name_for_openai, - "wrapper": llm_wrapper, - }, - { - "package": "llama_index.llms.openai.base", - "object": "OpenAI", - "method": "achat", - "span_name": "llamaindex.openai", - "wrapper": allm_wrapper, - } -] - +parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) +LLAMAINDEX_METHODS = load_wrapper_from_config( + os.path.join(parent_dir, 'wrapper_config', 'llama_index_methods.json')) diff --git a/src/monocle_apptrace/utils.py b/src/monocle_apptrace/utils.py index dd8427f..bc8f0af 100644 --- a/src/monocle_apptrace/utils.py +++ b/src/monocle_apptrace/utils.py @@ -1,6 +1,7 @@ - - import logging +import json +from importlib import import_module + logger = logging.getLogger(__name__) class Config: @@ -51,3 +52,21 @@ def resolve_from_alias(map, alias): if i in map.keys(): return map[i] return None + +def load_wrapper_from_config(config_file_path:str, module_name:str=None): + wrapper_methods = [] + with open(config_file_path) as config_file: + json_data = json.load(config_file) + wrapper_methods = json_data["wrapper_methods"] + for wrapper_method in wrapper_methods: + wrapper_method["wrapper"] = get_wrapper_method( + wrapper_method["wrapper_package"], wrapper_method["wrapper_method"]) + if "span_name_getter_method" in wrapper_method : + wrapper_method["span_name_getter"] = get_wrapper_method( + wrapper_method["span_name_getter_package"], + wrapper_method["span_name_getter_method"]) + return wrapper_methods + +def get_wrapper_method(package_name: str, method_name: str): + wrapper_module = import_module("monocle_apptrace." + package_name) + return getattr(wrapper_module, method_name) diff --git a/src/monocle_apptrace/wrapper_config/haystack_methods.json b/src/monocle_apptrace/wrapper_config/haystack_methods.json new file mode 100644 index 0000000..e02b21c --- /dev/null +++ b/src/monocle_apptrace/wrapper_config/haystack_methods.json @@ -0,0 +1,25 @@ +{ +"wrapper_methods" : [ + { + "package": "haystack.components.generators.openai", + "object": "OpenAIGenerator", + "method": "run", + "wrapper_package": "haystack.wrap_openai", + "wrapper_method": "wrap_openai" + }, + { + "package": "haystack.components.generators.chat.openai", + "object": "OpenAIChatGenerator", + "method": "run", + "wrapper_package": "haystack.wrap_openai", + "wrapper_method": "wrap_openai" + }, + { + "package": "haystack.core.pipeline.pipeline", + "object": "Pipeline", + "method": "run", + "wrapper_package": "haystack.wrap_pipeline", + "wrapper_method": "wrap" + } +] +} diff --git a/src/monocle_apptrace/wrapper_config/lang_chain_methods.json b/src/monocle_apptrace/wrapper_config/lang_chain_methods.json new file mode 100644 index 0000000..8de8764 --- /dev/null +++ b/src/monocle_apptrace/wrapper_config/lang_chain_methods.json @@ -0,0 +1,106 @@ +{ +"wrapper_methods" : [ + { + "package": "langchain.prompts.base", + "object": "BasePromptTemplate", + "method": "invoke", + "wrapper_package": "wrap_common", + "wrapper_method": "task_wrapper" + }, + { + "package": "langchain.prompts.base", + "object": "BasePromptTemplate", + "method": "ainvoke", + "wrapper_package": "wrap_common", + "wrapper_method": "atask_wrapper" + }, + { + "package": "langchain.chat_models.base", + "object": "BaseChatModel", + "method": "invoke", + "wrapper_package": "wrap_common", + "wrapper_method": "llm_wrapper" + }, + { + "package": "langchain.chat_models.base", + "object": "BaseChatModel", + "method": "ainvoke", + "wrapper_package": "wrap_common", + "wrapper_method": "allm_wrapper" + }, + { + "package": "langchain_core.language_models.llms", + "object": "LLM", + "method": "_generate", + "wrapper_package": "wrap_common", + "wrapper_method": "llm_wrapper" + }, + { + "package": "langchain_core.language_models.llms", + "object": "LLM", + "method": "_agenerate", + "wrapper_package": "wrap_common", + "wrapper_method": "llm_wrapper" + }, + { + "package": "langchain_core.retrievers", + "object": "BaseRetriever", + "method": "invoke", + "wrapper_package": "wrap_common", + "wrapper_method": "task_wrapper" + }, + { + "package": "langchain_core.retrievers", + "object": "BaseRetriever", + "method": "ainvoke", + "wrapper_package": "wrap_common", + "wrapper_method": "atask_wrapper" + }, + { + "package": "langchain.schema", + "object": "BaseOutputParser", + "method": "invoke", + "wrapper_package": "wrap_common", + "wrapper_method": "task_wrapper" + }, + { + "package": "langchain.schema", + "object": "BaseOutputParser", + "method": "ainvoke", + "wrapper_package": "wrap_common", + "wrapper_method": "atask_wrapper" + }, + { + "package": "langchain.schema.runnable", + "object": "RunnableSequence", + "method": "invoke", + "span_name": "langchain.workflow", + "wrapper_package": "wrap_common", + "wrapper_method": "task_wrapper" + }, + { + "package": "langchain.schema.runnable", + "object": "RunnableSequence", + "method": "ainvoke", + "span_name": "langchain.workflow", + "wrapper_package": "wrap_common", + "wrapper_method": "atask_wrapper" + }, + { + "package": "langchain.schema.runnable", + "object": "RunnableParallel", + "method": "invoke", + "span_name": "langchain.workflow", + "wrapper_package": "wrap_common", + "wrapper_method": "task_wrapper" + }, + { + "package": "langchain.schema.runnable", + "object": "RunnableParallel", + "method": "ainvoke", + "span_name": "langchain.workflow", + "wrapper_package": "wrap_common", + "wrapper_method": "atask_wrapper" + } +] +} diff --git a/src/monocle_apptrace/wrapper_config/llama_index_methods.json b/src/monocle_apptrace/wrapper_config/llama_index_methods.json new file mode 100644 index 0000000..000b540 --- /dev/null +++ b/src/monocle_apptrace/wrapper_config/llama_index_methods.json @@ -0,0 +1,70 @@ +{ +"wrapper_methods" : [ + { + "package": "llama_index.core.indices.base_retriever", + "object": "BaseRetriever", + "method": "retrieve", + "span_name": "llamaindex.retrieve", + "wrapper_package": "wrap_common", + "wrapper_method": "task_wrapper" + }, + { + "package": "llama_index.core.indices.base_retriever", + "object": "BaseRetriever", + "method": "aretrieve", + "span_name": "llamaindex.retrieve", + "wrapper_package": "wrap_common", + "wrapper_method": "atask_wrapper" + }, + { + "package": "llama_index.core.base.base_query_engine", + "object": "BaseQueryEngine", + "method": "query", + "span_name": "llamaindex.query", + "wrapper_package": "wrap_common", + "wrapper_method": "task_wrapper" + }, + { + "package": "llama_index.core.base.base_query_engine", + "object": "BaseQueryEngine", + "method": "aquery", + "span_name": "llamaindex.query", + "wrapper_package": "wrap_common", + "wrapper_method": "atask_wrapper" + }, + { + "package": "llama_index.core.llms.custom", + "object": "CustomLLM", + "method": "chat", + "span_name": "llamaindex.llmchat", + "wrapper_package": "wrap_common", + "wrapper_method": "task_wrapper" + }, + { + "package": "llama_index.core.llms.custom", + "object": "CustomLLM", + "method": "achat", + "span_name": "llamaindex.llmchat", + "wrapper_package": "wrap_common", + "wrapper_method": "atask_wrapper" + }, + { + "package": "llama_index.llms.openai.base", + "object": "OpenAI", + "method": "chat", + "span_name": "llamaindex.openai", + "wrapper_package": "wrap_common", + "wrapper_method": "llm_wrapper", + "span_name_getter_package" : "llamaindex", + "span_name_getter_mothod" : "get_llm_span_name_for_openai" + }, + { + "package": "llama_index.llms.openai.base", + "object": "OpenAI", + "method": "achat", + "span_name": "llamaindex.openai", + "wrapper_package": "wrap_common", + "wrapper_method": "allm_wrapper" + } +] +}