-
Notifications
You must be signed in to change notification settings - Fork 18
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
13 changed files
with
844 additions
and
22 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,27 @@ | ||
import os | ||
import asyncio | ||
from anthropic import AsyncAnthropic | ||
import lunary | ||
|
||
client = AsyncAnthropic( | ||
api_key=os.environ.get("ANTHROPIC_API_KEY"), # This is the default and can be omitted | ||
) | ||
lunary.monitor(client) | ||
|
||
|
||
async def main() -> None: | ||
stream = await client.messages.create( | ||
max_tokens=1024, | ||
messages=[ | ||
{ | ||
"role": "user", | ||
"content": "Hello, Claude", | ||
} | ||
], | ||
model="claude-3-opus-20240229", | ||
) | ||
for event in stream: | ||
pass | ||
|
||
|
||
asyncio.run(main()) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,26 @@ | ||
import os | ||
import asyncio | ||
from anthropic import AsyncAnthropic | ||
import lunary | ||
|
||
client = AsyncAnthropic( | ||
api_key=os.environ.get("ANTHROPIC_API_KEY"), # This is the default and can be omitted | ||
) | ||
lunary.monitor(client) | ||
|
||
|
||
async def main() -> None: | ||
message = await client.messages.create( | ||
max_tokens=1024, | ||
messages=[ | ||
{ | ||
"role": "user", | ||
"content": "Hello, Claude", | ||
} | ||
], | ||
model="claude-3-opus-20240229", | ||
) | ||
print(message.content) | ||
|
||
|
||
asyncio.run(main()) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,22 @@ | ||
import os | ||
from anthropic import Anthropic | ||
import lunary | ||
|
||
client = Anthropic( | ||
api_key=os.environ.get("ANTHROPIC_API_KEY"), | ||
) | ||
lunary.monitor(client) | ||
|
||
|
||
message = client.messages.create( | ||
max_tokens=1024, | ||
messages=[ | ||
{ | ||
"role": "user", | ||
"content": "Hello, Claude", | ||
} | ||
], | ||
model="claude-3-opus-20240229", | ||
) | ||
|
||
print(message.ro |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,26 @@ | ||
import os | ||
from anthropic import Anthropic | ||
import lunary | ||
|
||
client = Anthropic( | ||
api_key=os.environ.get("ANTHROPIC_API_KEY"), | ||
) | ||
lunary.monitor(client) | ||
|
||
|
||
stream = client.messages.create( | ||
max_tokens=1024, | ||
messages=[ | ||
{ | ||
"role": "user", | ||
"content": "Hello, Claude", | ||
} | ||
], | ||
model="claude-3-opus-20240229", | ||
stream=True | ||
) | ||
|
||
for event in stream: | ||
pass | ||
|
||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,86 @@ | ||
from anthropic import Anthropic | ||
import lunary | ||
|
||
client = Anthropic() | ||
lunary.monitor(client) | ||
|
||
response = client.messages.create( | ||
model="claude-3-5-sonnet-20241022", | ||
max_tokens=1024, | ||
tools=[ | ||
{ | ||
"name": "get_weather", | ||
"description": "Get the current weather in a given location", | ||
"input_schema": { | ||
"type": "object", | ||
"properties": { | ||
"location": { | ||
"type": "string", | ||
"description": "The city and state, e.g. San Francisco, CA", | ||
} | ||
}, | ||
"required": ["location"], | ||
}, | ||
} | ||
], | ||
messages=[{"role": "user", "content": "What's the weather like in San Francisco?"}], | ||
) | ||
|
||
response = client.messages.create( | ||
model="claude-3-5-sonnet-20241022", | ||
max_tokens=1024, | ||
tools=[ | ||
{ | ||
"name": "get_weather", | ||
"description": "Get the current weather in a given location", | ||
"input_schema": { | ||
"type": "object", | ||
"properties": { | ||
"location": { | ||
"type": "string", | ||
"description": "The city and state, e.g. San Francisco, CA" | ||
}, | ||
"unit": { | ||
"type": "string", | ||
"enum": ["celsius", "fahrenheit"], | ||
"description": "The unit of temperature, either 'celsius' or 'fahrenheit'" | ||
} | ||
}, | ||
"required": ["location"] | ||
} | ||
} | ||
], | ||
messages=[ | ||
{ | ||
"role": "user", | ||
"content": "What's the weather like in San Francisco?" | ||
}, | ||
{ | ||
"role": "assistant", | ||
"content": [ | ||
{ | ||
"type": "text", | ||
"text": "<thinking>I need to use get_weather, and the user wants SF, which is likely San Francisco, CA.</thinking>" | ||
}, | ||
{ | ||
"type": "tool_use", | ||
"id": "toolu_01A09q90qw90lq917835lq9", | ||
"name": "get_weather", | ||
"input": {"location": "San Francisco, CA", "unit": "celsius"} | ||
} | ||
] | ||
}, | ||
{ | ||
"role": "user", | ||
"content": [ | ||
{ | ||
"type": "tool_result", | ||
"tool_use_id": "toolu_01A09q90qw90lq917835lq9", # from the API response | ||
"content": "65 degrees" # from running your tool | ||
} | ||
] | ||
} | ||
] | ||
) | ||
|
||
print(response) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,24 @@ | ||
from ibm_watsonx_ai import Credentials | ||
from ibm_watsonx_ai.foundation_models import ModelInference | ||
import lunary | ||
import json | ||
|
||
model = ModelInference( | ||
model_id="meta-llama/llama-3-1-8b-instruct", | ||
credentials=Credentials( | ||
api_key = "pfc6XWOR0oX_oZY-c4axG-JKKzMhKz1NF04g9C8Idfz2", | ||
url = "https://us-south.ml.cloud.ibm.com"), | ||
project_id="c36b3e63-c64c-4f2c-8885-933244642424" | ||
) | ||
|
||
lunary.monitor_ibm(model) | ||
|
||
print(model.model_id) | ||
messages = [ | ||
{"role": "system", "content": "You are a helpful assistant."}, | ||
{"role": "user", "content": "Who won the world series in 2020?"} | ||
] | ||
response = model.chat(messages=messages) | ||
|
||
# print(json.dumps(response, indent=4)) | ||
# print(generated_response['choices'][0]['message']['content']) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,28 @@ | ||
from openai import OpenAI | ||
import lunary | ||
|
||
client = OpenAI() | ||
lunary.monitor(client) | ||
|
||
tools = [ | ||
{ | ||
"type": "function", | ||
"function": { | ||
"name": "get_weather", | ||
"parameters": { | ||
"type": "object", | ||
"properties": { | ||
"location": {"type": "string"} | ||
}, | ||
}, | ||
}, | ||
} | ||
] | ||
|
||
completion = client.chat.completions.create( | ||
model="gpt-4o", | ||
messages=[{"role": "user", "content": "What's the weather like in Paris today?"}], | ||
tools=tools, | ||
) | ||
|
||
print(completion.choices[0].message.tool_calls) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,83 @@ | ||
import json, logging | ||
import anthropic | ||
|
||
logger = logging.getLogger(__name__) | ||
|
||
# TODO: make sure it's the correct list | ||
KWARGS_TO_CAPTURE = [ | ||
"frequency_penalty", | ||
"functions", | ||
"logit_bias", | ||
"max_tokens", | ||
"max_completion_tokens", | ||
"n", | ||
"presence_penalty", | ||
"response_format", | ||
"seed", | ||
"stop", | ||
"stream", | ||
"audio", | ||
"modalities", | ||
"temperature", | ||
"tool_choice", | ||
"tools", | ||
"tool_calls", | ||
"top_p", | ||
"top_k", | ||
"top_logprobs", | ||
"logprobs", | ||
"prediction", | ||
"service_tier", | ||
"parallel_tool_calls" | ||
] | ||
|
||
class AnthropicUtils: | ||
@staticmethod | ||
def parse_message(message): | ||
tool_calls = getattr(message, "tool_calls") | ||
|
||
if tool_calls is not None: | ||
tool_calls = [ | ||
json.loads(tool_calls.model_dump_json(index=2, exclude_unset=True)) | ||
for tool_calls in tool_calls | ||
] | ||
# TODO: audio? | ||
# audio = getattr(message, "audio") | ||
# if audio is not None: | ||
# audio = json.loads(audio.model_dump_json(indent=2, exclude_unset=True)) | ||
|
||
parsed_message = { | ||
"role": getattr(message, "role"), | ||
"content": getattr(message, "content"), | ||
"refusal": getattr(message, "refusal"), | ||
# TODO: "audio": audio? | ||
# TODO: function_calls? | ||
"tool_calls": getattr(message, "tool_calls") | ||
} | ||
return parsed_message | ||
|
||
|
||
@staticmethod | ||
def parse_input(*args, **kwargs): | ||
try: | ||
messages = [AnthropicUtils.parse_message(message) for message in kwargs["messages"]] | ||
name = kwargs.get("model") | ||
extra = {key: kwargs[key] for key in KWARGS_TO_CAPTURE if key in kwargs} | ||
|
||
extra = {k: v for k, v in kwargs.items() if k in KWARGS_TO_CAPTURE} | ||
return {"name": name, "input": messages, "extra": extra} | ||
except Exception as e: | ||
logger.error("Error parsing input: ", e) | ||
|
||
@staticmethod | ||
def parse_output(message, stream=False): | ||
try: | ||
parsed_output = { | ||
"output": AnthropicUtils.parse_message(message), | ||
"tokenUsage": { | ||
"prompt": getattr(message.usage, "input_tokens"), | ||
"completion": getattr(message.usage, "output_tokens") | ||
} | ||
} | ||
except Exception as e: | ||
logger.error("Error parsing output: ", e) |
Oops, something went wrong.