Skip to content

Commit

Permalink
Add framework for reading wrapper methods from configuration. Move th… (
Browse files Browse the repository at this point in the history
#20)

* Add framework for reading wrapper methods from configuration. Move the out of box wrapper methods into config files

Signed-off-by: Prasad Mujumdar <[email protected]>
Signed-off-by: prasad-okahu <[email protected]>

* fixed haystack method name typo

Signed-off-by: prasad-okahu <[email protected]>

* Updated the wrapper method format

Signed-off-by: prasad-okahu <[email protected]>

---------

Signed-off-by: Prasad Mujumdar <[email protected]>
Signed-off-by: prasad-okahu <[email protected]>
Signed-off-by: Kshitiz Vijayvargiya <[email protected]>
  • Loading branch information
prasad-okahu authored and kshitiz-okahu committed Aug 12, 2024
1 parent 6584869 commit 582c795
Show file tree
Hide file tree
Showing 7 changed files with 237 additions and 182 deletions.
29 changes: 5 additions & 24 deletions src/monocle_apptrace/haystack/__init__.py
Original file line number Diff line number Diff line change
@@ -1,28 +1,9 @@


import os
import logging
from monocle_apptrace.haystack.wrap_openai import wrap_openai
from monocle_apptrace.haystack.wrap_pipeline import wrap as wrap_pipeline
from monocle_apptrace.utils import load_wrapper_from_config

logger = logging.getLogger(__name__)

HAYSTACK_METHODS = [
{
"package": "haystack.components.generators.openai",
"object": "OpenAIGenerator",
"method": "run",
"wrapper": wrap_openai,
},
{
"package": "haystack.components.generators.chat.openai",
"object": "OpenAIChatGenerator",
"method": "run",
"wrapper": wrap_openai,
},
{
"package": "haystack.core.pipeline.pipeline",
"object": "Pipeline",
"method": "run",
"wrapper": wrap_pipeline,
},
]
parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
HAYSTACK_METHODS = load_wrapper_from_config(
os.path.join(parent_dir, 'wrapper_config', 'haystack_methods.json'))
99 changes: 5 additions & 94 deletions src/monocle_apptrace/langchain/__init__.py
Original file line number Diff line number Diff line change
@@ -1,95 +1,6 @@
import os
from monocle_apptrace.utils import load_wrapper_from_config


from monocle_apptrace.wrap_common import allm_wrapper, atask_wrapper, llm_wrapper, task_wrapper

LANGCHAIN_METHODS = [
{
"package": "langchain.prompts.base",
"object": "BasePromptTemplate",
"method": "invoke",
"wrapper": task_wrapper,
},
{
"package": "langchain.prompts.base",
"object": "BasePromptTemplate",
"method": "ainvoke",
"wrapper": atask_wrapper,
},
{
"package": "langchain.chat_models.base",
"object": "BaseChatModel",
"method": "invoke",
"wrapper": llm_wrapper,
},
{
"package": "langchain.chat_models.base",
"object": "BaseChatModel",
"method": "ainvoke",
"wrapper": allm_wrapper,
},
{
"package": "langchain_core.language_models.llms",
"object": "LLM",
"method": "_generate",
"wrapper": llm_wrapper,
},
{
"package": "langchain_core.language_models.llms",
"object": "LLM",
"method": "_agenerate",
"wrapper": llm_wrapper,
},
{
"package": "langchain_core.retrievers",
"object": "BaseRetriever",
"method": "invoke",
"wrapper": task_wrapper,
},
{
"package": "langchain_core.retrievers",
"object": "BaseRetriever",
"method": "ainvoke",
"wrapper": atask_wrapper,
},
{
"package": "langchain.schema",
"object": "BaseOutputParser",
"method": "invoke",
"wrapper": task_wrapper,
},
{
"package": "langchain.schema",
"object": "BaseOutputParser",
"method": "ainvoke",
"wrapper": atask_wrapper,
},
{
"package": "langchain.schema.runnable",
"object": "RunnableSequence",
"method": "invoke",
"span_name": "langchain.workflow",
"wrapper": task_wrapper,
},
{
"package": "langchain.schema.runnable",
"object": "RunnableSequence",
"method": "ainvoke",
"span_name": "langchain.workflow",
"wrapper": atask_wrapper,
},
{
"package": "langchain.schema.runnable",
"object": "RunnableParallel",
"method": "invoke",
"span_name": "langchain.workflow",
"wrapper": task_wrapper,
},
{
"package": "langchain.schema.runnable",
"object": "RunnableParallel",
"method": "ainvoke",
"span_name": "langchain.workflow",
"wrapper": atask_wrapper,
},

]
parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
LANGCHAIN_METHODS = load_wrapper_from_config(
os.path.join(parent_dir, 'wrapper_config', 'lang_chain_methods.json'))
67 changes: 5 additions & 62 deletions src/monocle_apptrace/llamaindex/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@


from monocle_apptrace.wrap_common import allm_wrapper, atask_wrapper, llm_wrapper, task_wrapper
import os
from monocle_apptrace.utils import load_wrapper_from_config

def get_llm_span_name_for_openai(instance):
if (hasattr(instance, "_is_azure_client")
Expand All @@ -9,63 +9,6 @@ def get_llm_span_name_for_openai(instance):
return "llamaindex.azure_openai"
return "llamaindex.openai"

LLAMAINDEX_METHODS = [
{
"package": "llama_index.core.indices.base_retriever",
"object": "BaseRetriever",
"method": "retrieve",
"span_name": "llamaindex.retrieve",
"wrapper": task_wrapper
},
{
"package": "llama_index.core.indices.base_retriever",
"object": "BaseRetriever",
"method": "aretrieve",
"span_name": "llamaindex.retrieve",
"wrapper": atask_wrapper
},
{
"package": "llama_index.core.base.base_query_engine",
"object": "BaseQueryEngine",
"method": "query",
"span_name": "llamaindex.query",
"wrapper": task_wrapper,
},
{
"package": "llama_index.core.base.base_query_engine",
"object": "BaseQueryEngine",
"method": "aquery",
"span_name": "llamaindex.query",
"wrapper": atask_wrapper,
},
{
"package": "llama_index.core.llms.custom",
"object": "CustomLLM",
"method": "chat",
"span_name": "llamaindex.llmchat",
"wrapper": task_wrapper,
},
{
"package": "llama_index.core.llms.custom",
"object": "CustomLLM",
"method": "achat",
"span_name": "llamaindex.llmchat",
"wrapper": atask_wrapper,
},
{
"package": "llama_index.llms.openai.base",
"object": "OpenAI",
"method": "chat",
"span_name": "llamaindex.openai",
"span_name_getter" : get_llm_span_name_for_openai,
"wrapper": llm_wrapper,
},
{
"package": "llama_index.llms.openai.base",
"object": "OpenAI",
"method": "achat",
"span_name": "llamaindex.openai",
"wrapper": allm_wrapper,
}
]

parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
LLAMAINDEX_METHODS = load_wrapper_from_config(
os.path.join(parent_dir, 'wrapper_config', 'llama_index_methods.json'))
23 changes: 21 additions & 2 deletions src/monocle_apptrace/utils.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@


import logging
import json
from importlib import import_module

logger = logging.getLogger(__name__)

class Config:
Expand Down Expand Up @@ -51,3 +52,21 @@ def resolve_from_alias(map, alias):
if i in map.keys():
return map[i]
return None

def load_wrapper_from_config(config_file_path:str, module_name:str=None):
wrapper_methods = []
with open(config_file_path) as config_file:
json_data = json.load(config_file)
wrapper_methods = json_data["wrapper_methods"]
for wrapper_method in wrapper_methods:
wrapper_method["wrapper"] = get_wrapper_method(
wrapper_method["wrapper_package"], wrapper_method["wrapper_method"])
if "span_name_getter_method" in wrapper_method :
wrapper_method["span_name_getter"] = get_wrapper_method(
wrapper_method["span_name_getter_package"],
wrapper_method["span_name_getter_method"])
return wrapper_methods

def get_wrapper_method(package_name: str, method_name: str):
wrapper_module = import_module("monocle_apptrace." + package_name)
return getattr(wrapper_module, method_name)
25 changes: 25 additions & 0 deletions src/monocle_apptrace/wrapper_config/haystack_methods.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
{
"wrapper_methods" : [
{
"package": "haystack.components.generators.openai",
"object": "OpenAIGenerator",
"method": "run",
"wrapper_package": "haystack.wrap_openai",
"wrapper_method": "wrap_openai"
},
{
"package": "haystack.components.generators.chat.openai",
"object": "OpenAIChatGenerator",
"method": "run",
"wrapper_package": "haystack.wrap_openai",
"wrapper_method": "wrap_openai"
},
{
"package": "haystack.core.pipeline.pipeline",
"object": "Pipeline",
"method": "run",
"wrapper_package": "haystack.wrap_pipeline",
"wrapper_method": "wrap"
}
]
}
106 changes: 106 additions & 0 deletions src/monocle_apptrace/wrapper_config/lang_chain_methods.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,106 @@
{
"wrapper_methods" : [
{
"package": "langchain.prompts.base",
"object": "BasePromptTemplate",
"method": "invoke",
"wrapper_package": "wrap_common",
"wrapper_method": "task_wrapper"
},
{
"package": "langchain.prompts.base",
"object": "BasePromptTemplate",
"method": "ainvoke",
"wrapper_package": "wrap_common",
"wrapper_method": "atask_wrapper"
},
{
"package": "langchain.chat_models.base",
"object": "BaseChatModel",
"method": "invoke",
"wrapper_package": "wrap_common",
"wrapper_method": "llm_wrapper"
},
{
"package": "langchain.chat_models.base",
"object": "BaseChatModel",
"method": "ainvoke",
"wrapper_package": "wrap_common",
"wrapper_method": "allm_wrapper"
},
{
"package": "langchain_core.language_models.llms",
"object": "LLM",
"method": "_generate",
"wrapper_package": "wrap_common",
"wrapper_method": "llm_wrapper"
},
{
"package": "langchain_core.language_models.llms",
"object": "LLM",
"method": "_agenerate",
"wrapper_package": "wrap_common",
"wrapper_method": "llm_wrapper"
},
{
"package": "langchain_core.retrievers",
"object": "BaseRetriever",
"method": "invoke",
"wrapper_package": "wrap_common",
"wrapper_method": "task_wrapper"
},
{
"package": "langchain_core.retrievers",
"object": "BaseRetriever",
"method": "ainvoke",
"wrapper_package": "wrap_common",
"wrapper_method": "atask_wrapper"
},
{
"package": "langchain.schema",
"object": "BaseOutputParser",
"method": "invoke",
"wrapper_package": "wrap_common",
"wrapper_method": "task_wrapper"
},
{
"package": "langchain.schema",
"object": "BaseOutputParser",
"method": "ainvoke",
"wrapper_package": "wrap_common",
"wrapper_method": "atask_wrapper"
},
{
"package": "langchain.schema.runnable",
"object": "RunnableSequence",
"method": "invoke",
"span_name": "langchain.workflow",
"wrapper_package": "wrap_common",
"wrapper_method": "task_wrapper"
},
{
"package": "langchain.schema.runnable",
"object": "RunnableSequence",
"method": "ainvoke",
"span_name": "langchain.workflow",
"wrapper_package": "wrap_common",
"wrapper_method": "atask_wrapper"
},
{
"package": "langchain.schema.runnable",
"object": "RunnableParallel",
"method": "invoke",
"span_name": "langchain.workflow",
"wrapper_package": "wrap_common",
"wrapper_method": "task_wrapper"
},
{
"package": "langchain.schema.runnable",
"object": "RunnableParallel",
"method": "ainvoke",
"span_name": "langchain.workflow",
"wrapper_package": "wrap_common",
"wrapper_method": "atask_wrapper"
}
]
}
Loading

0 comments on commit 582c795

Please sign in to comment.