diff --git a/Monocle_User_Guide.md b/Monocle_User_Guide.md new file mode 100644 index 0000000..61af885 --- /dev/null +++ b/Monocle_User_Guide.md @@ -0,0 +1,77 @@ +#Monocle User Guide + +## Monocle Concepts +### Traces +Traces are the full view of a single end-to-end application KPI eg Chatbot application to provide a response to end user’s question. Traces consists of various metadata about the application run including status, start time, duration, input/outputs etc. It also includes a list of individual steps aka “spans with details about that step. +It’s typically the workflow code components of an application that generate the traces for application runs. +### Spans +Spans are the individual steps executed by the application to perform a GenAI related task” eg app retrieving vectors from DB, app querying LLM for inference etc. The span includes the type of operation, start time, duration and metadata relevant to that step eg Model name, parameters and model endpoint/server for an inference request. +It’s typically the workflow code components of an application that generate the traces for application runs. + +## Setup Monocle +- You can download Monocle library releases from Pypi +``` + > python3 -m pip install pipenv + > pip install monocle-observability +``` +- You can locally build and install Monocle library from source +``` +> pip install . +> pip install -e ".[dev]" + +> python3 -m pip install pipenv +> pipenv install build +``` + +## Examples +### Enable Monocle tracing in your application +```python +from monocle_apptrace.instrumentor import setup_monocle_telemetry +from opentelemetry.sdk.trace.export import BatchSpanProcessor, ConsoleSpanExporter +from langchain.chains import LLMChain +from langchain_openai import OpenAI +from langchain.prompts import PromptTemplate + +# Call the setup Monocle telemetry method +setup_monocle_telemetry(workflow_name = "simple_math_app", + span_processors=[BatchSpanProcessor(ConsoleSpanExporter())]) + +llm = OpenAI() +prompt = PromptTemplate.from_template("1 + {number} = ") + +chain = LLMChain(llm=llm, prompt=prompt) +chain.invoke({"number":2}) + +# Request callbacks: Finally, let's use the request `callbacks` to achieve the same result +chain = LLMChain(llm=llm, prompt=prompt) +chain.invoke({"number":2}, {"callbacks":[handler]}) + +``` + +### Monitoring custom methods with Monocle + +```python +from monocle_apptrace.wrapper import WrapperMethod,task_wrapper,atask_wrapper +from opentelemetry.sdk.trace.export import BatchSpanProcessor, ConsoleSpanExporter + +# extend the default wrapped methods list as follows +app_name = "simple_math_app" +setup_monocle_telemetry( + workflow_name=app_name, + span_processors=[BatchSpanProcessor(ConsoleSpanExporter())], + wrapper_methods=[ + WrapperMethod( + package="langchain.schema.runnable", + object="RunnableParallel", + method="invoke", + span_name="langchain.workflow", + wrapper=task_wrapper), + WrapperMethod( + package="langchain.schema.runnable", + object="RunnableParallel", + method="ainvoke", + span_name="langchain.workflow", + wrapper=atask_wrapper) + ]) + +``` \ No newline at end of file diff --git a/Monocle_committer_guide.md b/Monocle_committer_guide.md new file mode 100644 index 0000000..8c919f4 --- /dev/null +++ b/Monocle_committer_guide.md @@ -0,0 +1,26 @@ +# Monocle Committer Guide +This document provide details for Monocle committers tasks + +## Build and publishing python packages +### Building the package + +``` +> python3 -m build +``` +### Publishing the package + +``` +> python3 -m pip install --upgrade twine +> python3 -m twine upload --repository testpypi dist/* +``` +### Installing the package + +The steps to set the credential can be found here: +https://packaging.python.org/en/latest/specifications/pypirc/ + +After setup of credentials, follow the commands below to publish the package to testpypi: + +``` +> python3 -m pip install pipenv +> pipenv install monocle-observability +``` diff --git a/Monocle_contributor_guide.md b/Monocle_contributor_guide.md new file mode 100644 index 0000000..78a0d59 --- /dev/null +++ b/Monocle_contributor_guide.md @@ -0,0 +1 @@ +Coming soon ... \ No newline at end of file diff --git a/src/monocle_apptrace/README.md b/src/monocle_apptrace/README.md new file mode 100644 index 0000000..61af885 --- /dev/null +++ b/src/monocle_apptrace/README.md @@ -0,0 +1,77 @@ +#Monocle User Guide + +## Monocle Concepts +### Traces +Traces are the full view of a single end-to-end application KPI eg Chatbot application to provide a response to end user’s question. Traces consists of various metadata about the application run including status, start time, duration, input/outputs etc. It also includes a list of individual steps aka “spans with details about that step. +It’s typically the workflow code components of an application that generate the traces for application runs. +### Spans +Spans are the individual steps executed by the application to perform a GenAI related task” eg app retrieving vectors from DB, app querying LLM for inference etc. The span includes the type of operation, start time, duration and metadata relevant to that step eg Model name, parameters and model endpoint/server for an inference request. +It’s typically the workflow code components of an application that generate the traces for application runs. + +## Setup Monocle +- You can download Monocle library releases from Pypi +``` + > python3 -m pip install pipenv + > pip install monocle-observability +``` +- You can locally build and install Monocle library from source +``` +> pip install . +> pip install -e ".[dev]" + +> python3 -m pip install pipenv +> pipenv install build +``` + +## Examples +### Enable Monocle tracing in your application +```python +from monocle_apptrace.instrumentor import setup_monocle_telemetry +from opentelemetry.sdk.trace.export import BatchSpanProcessor, ConsoleSpanExporter +from langchain.chains import LLMChain +from langchain_openai import OpenAI +from langchain.prompts import PromptTemplate + +# Call the setup Monocle telemetry method +setup_monocle_telemetry(workflow_name = "simple_math_app", + span_processors=[BatchSpanProcessor(ConsoleSpanExporter())]) + +llm = OpenAI() +prompt = PromptTemplate.from_template("1 + {number} = ") + +chain = LLMChain(llm=llm, prompt=prompt) +chain.invoke({"number":2}) + +# Request callbacks: Finally, let's use the request `callbacks` to achieve the same result +chain = LLMChain(llm=llm, prompt=prompt) +chain.invoke({"number":2}, {"callbacks":[handler]}) + +``` + +### Monitoring custom methods with Monocle + +```python +from monocle_apptrace.wrapper import WrapperMethod,task_wrapper,atask_wrapper +from opentelemetry.sdk.trace.export import BatchSpanProcessor, ConsoleSpanExporter + +# extend the default wrapped methods list as follows +app_name = "simple_math_app" +setup_monocle_telemetry( + workflow_name=app_name, + span_processors=[BatchSpanProcessor(ConsoleSpanExporter())], + wrapper_methods=[ + WrapperMethod( + package="langchain.schema.runnable", + object="RunnableParallel", + method="invoke", + span_name="langchain.workflow", + wrapper=task_wrapper), + WrapperMethod( + package="langchain.schema.runnable", + object="RunnableParallel", + method="ainvoke", + span_name="langchain.workflow", + wrapper=atask_wrapper) + ]) + +``` \ No newline at end of file diff --git a/src/monocle_apptrace/pyproject.toml b/src/monocle_apptrace/pyproject.toml new file mode 100644 index 0000000..9ced244 --- /dev/null +++ b/src/monocle_apptrace/pyproject.toml @@ -0,0 +1,76 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "monocle-apptrace" +version = "0.0.1" +authors = [ + { name="Monocle", email="moncle@okahu.ai" }, +] +description = "Monocle genAI tracing" +readme = "README.md" +requires-python = ">=3.8" +classifiers = [ + "Programming Language :: Python :: 3", + "License :: OSI Approved :: MIT License", + "Operating System :: OS Independent", +] +dependencies = [ + 'requests', + 'wrapt>=1.14.0', + 'opentelemetry-api>=1.21.0', + 'opentelemetry-sdk>=1.21.0', + 'opentelemetry-instrumentation', +] + +[tool.pytest.ini_options] +pythonpath = [ + "src" +] + +[project.optional-dependencies] +dev = [ + 'langchain-openai==0.0.5', + 'numpy==1.26.4', + 'types-requests==2.31.0.20240106', + 'InstructorEmbedding==1.0.1', + 'sentence-transformers==2.6.1', + 'faiss-cpu==1.7.4', + 'pytest==8.0.0', + 'llama-index==0.10.30', + 'llama-index-embeddings-huggingface==0.2.0' +] + +[project.urls] +Homepage = "https://github.com/monocle2ai/monocle" +Issues = "https://github.com/monocle2ai/monocle/issues" + +[tool.hatch.build.targets.wheel] +packages = ["src/monocle_apptrace"] + +[tool.hatch.build.targets.sdist] +ignore-vcs = true + +[tool.hatch.build] +exclude = [ + "/.*", + "*.txt", + "Pipfile", + "/data", + "/docs", + "/tests", + "*.yml", +] + +[tool.pylint] +max-line-length = 120 +disable = [ + "C0103", # (invalid-name) + "C0114", # (missing-module-docstring) + "C0115", # (missing-class-docstring) + "C0116", # (missing-function-docstring) + "R0903", # (too-few-public-methods) + "R0913", # (too-many-arguments) + "W0105", # (pointless-string-statement) +]