Skip to content

Commit

Permalink
Add framework for reading wrapper methods from configuration. Move th…
Browse files Browse the repository at this point in the history
…e out of box wrapper methods into config files

Signed-off-by: Prasad Mujumdar <[email protected]>
  • Loading branch information
prasad-okahu committed Jul 24, 2024
1 parent 6584869 commit d9933b5
Show file tree
Hide file tree
Showing 7 changed files with 214 additions and 182 deletions.
29 changes: 5 additions & 24 deletions src/monocle_apptrace/haystack/__init__.py
Original file line number Diff line number Diff line change
@@ -1,28 +1,9 @@


import os
import logging
from monocle_apptrace.haystack.wrap_openai import wrap_openai
from monocle_apptrace.haystack.wrap_pipeline import wrap as wrap_pipeline
from monocle_apptrace.utils import load_wrapper_from_config

logger = logging.getLogger(__name__)

HAYSTACK_METHODS = [
{
"package": "haystack.components.generators.openai",
"object": "OpenAIGenerator",
"method": "run",
"wrapper": wrap_openai,
},
{
"package": "haystack.components.generators.chat.openai",
"object": "OpenAIChatGenerator",
"method": "run",
"wrapper": wrap_openai,
},
{
"package": "haystack.core.pipeline.pipeline",
"object": "Pipeline",
"method": "run",
"wrapper": wrap_pipeline,
},
]
parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
HAYSTACK_METHODS = load_wrapper_from_config(
os.path.join(parent_dir, 'wrapper_config', 'haystack_methods.json'))
99 changes: 5 additions & 94 deletions src/monocle_apptrace/langchain/__init__.py
Original file line number Diff line number Diff line change
@@ -1,95 +1,6 @@
import os
from monocle_apptrace.utils import load_wrapper_from_config


from monocle_apptrace.wrap_common import allm_wrapper, atask_wrapper, llm_wrapper, task_wrapper

LANGCHAIN_METHODS = [
{
"package": "langchain.prompts.base",
"object": "BasePromptTemplate",
"method": "invoke",
"wrapper": task_wrapper,
},
{
"package": "langchain.prompts.base",
"object": "BasePromptTemplate",
"method": "ainvoke",
"wrapper": atask_wrapper,
},
{
"package": "langchain.chat_models.base",
"object": "BaseChatModel",
"method": "invoke",
"wrapper": llm_wrapper,
},
{
"package": "langchain.chat_models.base",
"object": "BaseChatModel",
"method": "ainvoke",
"wrapper": allm_wrapper,
},
{
"package": "langchain_core.language_models.llms",
"object": "LLM",
"method": "_generate",
"wrapper": llm_wrapper,
},
{
"package": "langchain_core.language_models.llms",
"object": "LLM",
"method": "_agenerate",
"wrapper": llm_wrapper,
},
{
"package": "langchain_core.retrievers",
"object": "BaseRetriever",
"method": "invoke",
"wrapper": task_wrapper,
},
{
"package": "langchain_core.retrievers",
"object": "BaseRetriever",
"method": "ainvoke",
"wrapper": atask_wrapper,
},
{
"package": "langchain.schema",
"object": "BaseOutputParser",
"method": "invoke",
"wrapper": task_wrapper,
},
{
"package": "langchain.schema",
"object": "BaseOutputParser",
"method": "ainvoke",
"wrapper": atask_wrapper,
},
{
"package": "langchain.schema.runnable",
"object": "RunnableSequence",
"method": "invoke",
"span_name": "langchain.workflow",
"wrapper": task_wrapper,
},
{
"package": "langchain.schema.runnable",
"object": "RunnableSequence",
"method": "ainvoke",
"span_name": "langchain.workflow",
"wrapper": atask_wrapper,
},
{
"package": "langchain.schema.runnable",
"object": "RunnableParallel",
"method": "invoke",
"span_name": "langchain.workflow",
"wrapper": task_wrapper,
},
{
"package": "langchain.schema.runnable",
"object": "RunnableParallel",
"method": "ainvoke",
"span_name": "langchain.workflow",
"wrapper": atask_wrapper,
},

]
parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
LANGCHAIN_METHODS = load_wrapper_from_config(
os.path.join(parent_dir, 'wrapper_config', 'lang_chain_methods.json'))
67 changes: 5 additions & 62 deletions src/monocle_apptrace/llamaindex/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@


from monocle_apptrace.wrap_common import allm_wrapper, atask_wrapper, llm_wrapper, task_wrapper
import os
from monocle_apptrace.utils import load_wrapper_from_config

def get_llm_span_name_for_openai(instance):
if (hasattr(instance, "_is_azure_client")
Expand All @@ -9,63 +9,6 @@ def get_llm_span_name_for_openai(instance):
return "llamaindex.azure_openai"
return "llamaindex.openai"

LLAMAINDEX_METHODS = [
{
"package": "llama_index.core.indices.base_retriever",
"object": "BaseRetriever",
"method": "retrieve",
"span_name": "llamaindex.retrieve",
"wrapper": task_wrapper
},
{
"package": "llama_index.core.indices.base_retriever",
"object": "BaseRetriever",
"method": "aretrieve",
"span_name": "llamaindex.retrieve",
"wrapper": atask_wrapper
},
{
"package": "llama_index.core.base.base_query_engine",
"object": "BaseQueryEngine",
"method": "query",
"span_name": "llamaindex.query",
"wrapper": task_wrapper,
},
{
"package": "llama_index.core.base.base_query_engine",
"object": "BaseQueryEngine",
"method": "aquery",
"span_name": "llamaindex.query",
"wrapper": atask_wrapper,
},
{
"package": "llama_index.core.llms.custom",
"object": "CustomLLM",
"method": "chat",
"span_name": "llamaindex.llmchat",
"wrapper": task_wrapper,
},
{
"package": "llama_index.core.llms.custom",
"object": "CustomLLM",
"method": "achat",
"span_name": "llamaindex.llmchat",
"wrapper": atask_wrapper,
},
{
"package": "llama_index.llms.openai.base",
"object": "OpenAI",
"method": "chat",
"span_name": "llamaindex.openai",
"span_name_getter" : get_llm_span_name_for_openai,
"wrapper": llm_wrapper,
},
{
"package": "llama_index.llms.openai.base",
"object": "OpenAI",
"method": "achat",
"span_name": "llamaindex.openai",
"wrapper": allm_wrapper,
}
]

parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
LLAMAINDEX_METHODS = load_wrapper_from_config(
os.path.join(parent_dir, 'wrapper_config', 'llama_index_methods.json'))
26 changes: 24 additions & 2 deletions src/monocle_apptrace/utils.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@


import logging
import json
from importlib import import_module

logger = logging.getLogger(__name__)

class Config:
Expand Down Expand Up @@ -51,3 +52,24 @@ def resolve_from_alias(map, alias):
if i in map.keys():
return map[i]
return None

def load_wrapper_from_config(config_file_path:str, module_name:str=None):
wrapper_methods = []
with open(config_file_path) as config_file:
json_data = json.load(config_file)
wrapper_methods = json_data["wrapper_methods"]
for wrapper_method in wrapper_methods:
wrapper_method["wrapper"] = get_wrapper_method(
wrapper_method["wrapper"])
if "span_name_getter" in wrapper_method :
wrapper_method["span_name_getter"] = get_wrapper_method(
wrapper_method["span_name_getter"])
return wrapper_methods

def get_wrapper_method(method_signature: str):
method_tokens = method_signature.split(".")
module_name = ""
for i in range(len(method_tokens) -1):
module_name += "." + method_tokens[i]
wrapper_module = import_module("monocle_apptrace" + module_name)
return getattr(wrapper_module, method_tokens[len(method_tokens)-1])
22 changes: 22 additions & 0 deletions src/monocle_apptrace/wrapper_config/haystack_methods.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
{
"wrapper_methods" : [
{
"package": "haystack.components.generators.openai",
"object": "OpenAIGenerator",
"method": "run",
"wrapper": "haystack.wrap_openai.wrap_openai"
},
{
"package": "haystack.components.generators.chat.openai",
"object": "OpenAIChatGenerator",
"method": "run",
"wrapper": "haystack.wrap_openai.wrap_openai"
},
{
"package": "haystack.core.pipeline.pipeline",
"object": "Pipeline",
"method": "run",
"wrapper": "haystack.wrap_openai.wrap_pipeline"
}
]
}
92 changes: 92 additions & 0 deletions src/monocle_apptrace/wrapper_config/lang_chain_methods.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
{
"wrapper_methods" : [
{
"package": "langchain.prompts.base",
"object": "BasePromptTemplate",
"method": "invoke",
"wrapper": "wrap_common.task_wrapper"
},
{
"package": "langchain.prompts.base",
"object": "BasePromptTemplate",
"method": "ainvoke",
"wrapper": "wrap_common.atask_wrapper"
},
{
"package": "langchain.chat_models.base",
"object": "BaseChatModel",
"method": "invoke",
"wrapper": "wrap_common.llm_wrapper"
},
{
"package": "langchain.chat_models.base",
"object": "BaseChatModel",
"method": "ainvoke",
"wrapper": "wrap_common.allm_wrapper"
},
{
"package": "langchain_core.language_models.llms",
"object": "LLM",
"method": "_generate",
"wrapper": "wrap_common.llm_wrapper"
},
{
"package": "langchain_core.language_models.llms",
"object": "LLM",
"method": "_agenerate",
"wrapper": "wrap_common.llm_wrapper"
},
{
"package": "langchain_core.retrievers",
"object": "BaseRetriever",
"method": "invoke",
"wrapper": "wrap_common.task_wrapper"
},
{
"package": "langchain_core.retrievers",
"object": "BaseRetriever",
"method": "ainvoke",
"wrapper": "wrap_common.atask_wrapper"
},
{
"package": "langchain.schema",
"object": "BaseOutputParser",
"method": "invoke",
"wrapper": "wrap_common.task_wrapper"
},
{
"package": "langchain.schema",
"object": "BaseOutputParser",
"method": "ainvoke",
"wrapper": "wrap_common.atask_wrapper"
},
{
"package": "langchain.schema.runnable",
"object": "RunnableSequence",
"method": "invoke",
"span_name": "langchain.workflow",
"wrapper": "wrap_common.task_wrapper"
},
{
"package": "langchain.schema.runnable",
"object": "RunnableSequence",
"method": "ainvoke",
"span_name": "langchain.workflow",
"wrapper": "wrap_common.atask_wrapper"
},
{
"package": "langchain.schema.runnable",
"object": "RunnableParallel",
"method": "invoke",
"span_name": "langchain.workflow",
"wrapper": "wrap_common.task_wrapper"
},
{
"package": "langchain.schema.runnable",
"object": "RunnableParallel",
"method": "ainvoke",
"span_name": "langchain.workflow",
"wrapper": "wrap_common.atask_wrapper"
}
]
}
Loading

0 comments on commit d9933b5

Please sign in to comment.