Skip to content

Commit

Permalink
get example working, iron out kinks
Browse files Browse the repository at this point in the history
  • Loading branch information
WillBeebe committed Jul 2, 2024
1 parent 9211c6f commit 97e1d34
Show file tree
Hide file tree
Showing 9 changed files with 2,005 additions and 26 deletions.
1,949 changes: 1,949 additions & 0 deletions examples/0-simple/poetry.lock

Large diffs are not rendered by default.

3 changes: 2 additions & 1 deletion examples/0-simple/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,8 @@ packages = [

[tool.poetry.dependencies]
python = "^3.11"
ada = "^0.1.0"
# ada-python = "^0.2.0"
ada-python = {path = "../../", develop = true}

[tool.poetry.dev-dependencies]
pytest = "^8.1.1"
Expand Down
9 changes: 6 additions & 3 deletions examples/0-simple/src/main.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
from ada.abcs import AnthropicLLM
from ada.agents import Ada
from abcs.anthropic import AnthropicLLM
from agents.ada import Ada

agent = Ada(client=AnthropicLLM())
response = agent.generate_text("Name five fruit that start with the letter a.")
prompt = "Name five fruit that start with the letter a."

print(prompt)
response = agent.generate_text(prompt)
print(response.content)
21 changes: 20 additions & 1 deletion poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 4 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,13 +1,14 @@
[tool.poetry]
name = "ada-python"
version = "0.2.1"
description = ""
version = "0.2.2"
description = "Ada, making LLMs easier to work with."
authors = ["Will Beebe"]
packages = [
{include = "abcs", from="src"},
{include = "agents", from="src"},
{include = "tools", from="src"},
{include = "storage", from="src"},
# {include = "metrics", from="src"},
{include = "data", from="."}
]

Expand All @@ -23,6 +24,7 @@ groq = "^0.9.0"
yfinance = "^0.2.38"
openai-multi-tool-use-parallel-patch = "^0.2.0"
ollama = "^0.2.1"
neo4j = "^5.22.0"

[tool.poetry.dev-dependencies]
pytest = "^8.1.1"
Expand Down
7 changes: 4 additions & 3 deletions src/metrics/main.py → snippets/metrics.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@

from opentelemetry import metrics
from opentelemetry.exporter.prometheus import PrometheusMetricReader
from opentelemetry.sdk.metrics import Counter, MeterProvider, ObservableGauge
from opentelemetry.sdk.metrics.export import InMemoryMetricReader
from prometheus_client import start_http_server
from opentelemetry.sdk.metrics import MeterProvider

# from opentelemetry.sdk.metrics.export import InMemoryMetricReader
# from prometheus_client import start_http_server

prometheus_exporter = PrometheusMetricReader()
metrics.set_meter_provider(MeterProvider(metric_readers=[prometheus_exporter]))
Expand Down
1 change: 1 addition & 0 deletions src/abcs/anthropic.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,7 @@ def _translate_response(self, response) -> PromptResponse:
content = response.content[0].text
return PromptResponse(
content=content,
raw_response=response,
error={},
usage=UsageStats(
input_tokens=response.usage.input_tokens,
Expand Down
35 changes: 19 additions & 16 deletions src/agents/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@

from abcs.llm import LLM
from abcs.models import PromptResponse
from metrics.main import call_tool_counter, generate_text_counter

# from metrics.main import call_tool_counter, generate_text_counter
from storage.storage_manager import StorageManager
from tools.tool_manager import ToolManager

Expand Down Expand Up @@ -32,7 +33,7 @@ def get_history(self):
return []

def generate_text(self, prompt: str) -> PromptResponse:
generate_text_counter.add(1)
# generate_text_counter.add(1)
logger.debug("Generating text for prompt: '%s'", prompt)
past_messages = []
if self.storage_manager is not None:
Expand All @@ -41,11 +42,11 @@ def generate_text(self, prompt: str) -> PromptResponse:
# past_messages = self.storage_manager.get_past_messages_callback()
# else:
past_messages = self.storage_manager.get_past_messages()
logger.info("Fetched %d past messages", len(past_messages))
logger.debug("Fetched %d past messages", len(past_messages))
# todo: push down to core llm class, leave for now while scripting

try:
logger.info("passing %d past messages", len(past_messages))
logger.debug("passing %d past messages", len(past_messages))
if self.storage_manager is not None:
self.storage_manager.store_message("user", prompt)
response = self.client.generate_text(prompt, past_messages, self.tools)
Expand All @@ -56,17 +57,18 @@ def generate_text(self, prompt: str) -> PromptResponse:

if self.storage_manager is not None:
try:
translated = self.translate_response(response)
self.storage_manager.store_message("assistant", translated.content)
# translated = self._translate_response(response)
self.storage_manager.store_message("assistant", response.content)
except Exception as e:
logger.error("Error storing messages: %s", e, exc_info=True)
raise e

logger.debug("Generated response: %s", response)
return self.translate_response(response)
# logger.debug("Generated response: %s", response)
# return self._translate_response(response)
return response

def call_tool(self, past_messages, tool_msg, tools) -> str:
call_tool_counter.add(1)
# call_tool_counter.add(1)
logger.debug("Calling tool with message: %s", tool_msg)
try:
if len(tools) == 0:
Expand All @@ -79,10 +81,11 @@ def call_tool(self, past_messages, tool_msg, tools) -> str:
logger.error("Error calling tool: %s", e, exc_info=True)
raise e

def translate_response(self, response) -> PromptResponse:
try:
translated_response = self.client.translate_response(response)
return translated_response
except Exception as e:
logger.error("Error translating response: %s", e, exc_info=True)
raise e
def _translate_response(self, response) -> PromptResponse:
pass
# try:
# translated_response = self.client._translate_response(response)
# return translated_response
# except Exception as e:
# logger.error("Error translating response: %s", e, exc_info=True)
# raise e
Empty file added src/metrics/.gitkeep
Empty file.

0 comments on commit 97e1d34

Please sign in to comment.