-
Notifications
You must be signed in to change notification settings - Fork 363
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
* mo.ai.chat() * progress * example * improvements * ignore imports * ignore again * cr comments * docs * more tests * fixes * docs * more docs * copyright * add variable tempalting * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
- Loading branch information
1 parent
1bbd18b
commit 6fba656
Showing
23 changed files
with
2,019 additions
and
14 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,135 @@ | ||
# Chat | ||
|
||
```{eval-rst} | ||
.. marimo-embed:: | ||
:size: large | ||
@app.cell | ||
def __(): | ||
def simple_echo_model(messages, config): | ||
return f"You said: {messages[-1].content}" | ||
mo.ui.chat( | ||
simple_echo_model, | ||
prompts=["Hello", "How are you?"], | ||
show_configuration_controls=True | ||
) | ||
return | ||
``` | ||
|
||
The chat UI element provides an interactive chatbot interface for conversations. It can be customized with different models, including built-in AI models or custom functions. | ||
|
||
```{eval-rst} | ||
.. autoclass:: marimo.ui.chat | ||
:members: | ||
.. autoclasstoc:: marimo._plugins.ui._impl.chat.chat.chat | ||
``` | ||
|
||
## Basic Usage | ||
|
||
Here's a simple example using a custom echo model: | ||
|
||
```python | ||
import marimo as mo | ||
|
||
def echo_model(messages, config): | ||
return f"Echo: {messages[-1].content}" | ||
|
||
chat = mo.ui.chat(echo_model, prompts=["Hello", "How are you?"]) | ||
chat | ||
``` | ||
|
||
## Using a Built-in AI Model | ||
|
||
You can use marimo's built-in AI models, such as OpenAI's GPT: | ||
|
||
```python | ||
import marimo as mo | ||
|
||
chat = mo.ui.chat( | ||
mo.ai.openai( | ||
"gpt-4", | ||
system_message="You are a helpful assistant.", | ||
), | ||
show_configuration_controls=True | ||
) | ||
chat | ||
``` | ||
|
||
## Accessing Chat History | ||
|
||
You can access the chat history using the `value` attribute: | ||
|
||
```python | ||
chat.value | ||
``` | ||
|
||
This returns a list of `ChatMessage` objects, each containing `role` and `content` attributes. | ||
|
||
```{eval-rst} | ||
.. autoclass:: ChatMessage | ||
:members: | ||
.. autoclasstoc:: marimo._plugins.ui._impl.chat.types.ChatMessage | ||
``` | ||
|
||
## Custom Model with Additional Context | ||
|
||
Here's an example of a custom model that uses additional context: | ||
|
||
```python | ||
import marimo as mo | ||
|
||
def rag_model(messages, config): | ||
question = messages[-1].content | ||
docs = find_relevant_docs(question) | ||
context = "\n".join(docs) | ||
prompt = f"Context: {context}\n\nQuestion: {question}\n\nAnswer:" | ||
response = query_llm(prompt, config) | ||
return response | ||
|
||
mo.ui.chat(rag_model) | ||
``` | ||
|
||
This example demonstrates how you can implement a Retrieval-Augmented Generation (RAG) model within the chat interface. | ||
|
||
## Built-in Models | ||
|
||
marimo provides several built-in AI models that you can use with the chat UI element. | ||
|
||
```python | ||
import marimo as mo | ||
|
||
mo.ui.chat( | ||
mo.ai.openai( | ||
"gpt-4", | ||
system_message="You are a helpful assistant.", | ||
api_key="sk-...", | ||
), | ||
show_configuration_controls=True | ||
) | ||
|
||
mo.ui.chat( | ||
mo.ai.anthropic( | ||
"claude-3-5-sonnet-20240602", | ||
system_message="You are a helpful assistant.", | ||
api_key="sk-...", | ||
), | ||
show_configuration_controls=True | ||
) | ||
``` | ||
|
||
```{eval-rst} | ||
.. autoclass:: marimo.ai.models.openai | ||
:members: | ||
.. autoclasstoc:: marimo._plugins.ui._impl.chat.models.openai | ||
``` | ||
|
||
```{eval-rst} | ||
.. autoclass:: marimo.ai.models.anthropic | ||
:members: | ||
.. autoclasstoc:: marimo._plugins.ui._impl.chat.models.anthropic | ||
``` |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,76 @@ | ||
# /// script | ||
# requires-python = ">=3.11" | ||
# dependencies = [ | ||
# "ell-ai==0.0.12", | ||
# "marimo", | ||
# "openai==1.50.1", | ||
# ] | ||
# /// | ||
|
||
import marimo | ||
|
||
__generated_with = "0.8.20" | ||
app = marimo.App(width="medium") | ||
|
||
|
||
@app.cell(hide_code=True) | ||
def __(mo): | ||
mo.md(r"""# Simple chatbot 🤖""") | ||
return | ||
|
||
|
||
@app.cell(hide_code=True) | ||
def __(mo): | ||
import os | ||
|
||
os_key = os.environ.get("OPENAI_API_KEY") | ||
input_key = mo.ui.text(label="OpenAI API key", kind="password") | ||
input_key if not os_key else None | ||
return input_key, os, os_key | ||
|
||
|
||
@app.cell(hide_code=True) | ||
def __(input_key, mo, os_key): | ||
# Initialize a client | ||
openai_key = os_key or input_key.value | ||
|
||
mo.stop( | ||
not openai_key, | ||
"Please set the OPENAI_API_KEY environment variable or provide it in the input field", | ||
) | ||
|
||
import ell | ||
import openai | ||
|
||
# Create an openai client | ||
client = openai.Client(api_key=openai_key) | ||
return client, ell, openai, openai_key | ||
|
||
|
||
@app.cell | ||
def __(client, ell, mo): | ||
@ell.simple("gpt-4o-mini-2024-07-18", client=client) | ||
def _my_model(prompt): | ||
"""You are an annoying little brother, whatever I say, be sassy with your response""" | ||
return prompt | ||
|
||
|
||
mo.ui.chat( | ||
mo.ai.models.simple(_my_model), | ||
prompts=[ | ||
"Hello", | ||
"How are you?", | ||
"I'm doing great, how about you?", | ||
], | ||
) | ||
return | ||
|
||
|
||
@app.cell | ||
def __(): | ||
import marimo as mo | ||
return (mo,) | ||
|
||
|
||
if __name__ == "__main__": | ||
app.run() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,74 @@ | ||
/* Copyright 2024 Marimo. All rights reserved. */ | ||
import { z } from "zod"; | ||
import { createPlugin } from "@/plugins/core/builder"; | ||
import { rpc } from "@/plugins/core/rpc"; | ||
import { TooltipProvider } from "@/components/ui/tooltip"; | ||
import { Chatbot } from "./chat-ui"; | ||
import type { ChatMessage, SendMessageRequest } from "./types"; | ||
import { Arrays } from "@/utils/arrays"; | ||
|
||
// eslint-disable-next-line @typescript-eslint/consistent-type-definitions | ||
type PluginFunctions = { | ||
get_chat_history: () => Promise<{ messages: ChatMessage[] }>; | ||
send_prompt: (req: SendMessageRequest) => Promise<string>; | ||
}; | ||
|
||
export const ChatPlugin = createPlugin<ChatMessage[]>("marimo-chatbot") | ||
.withData( | ||
z.object({ | ||
prompts: z.array(z.string()).default(Arrays.EMPTY), | ||
showConfigurationControls: z.boolean(), | ||
config: z.object({ | ||
maxTokens: z.number().default(100), | ||
temperature: z.number().default(0.5), | ||
topP: z.number().default(1), | ||
topK: z.number().default(40), | ||
frequencyPenalty: z.number().default(0), | ||
presencePenalty: z.number().default(0), | ||
}), | ||
}), | ||
) | ||
.withFunctions<PluginFunctions>({ | ||
get_chat_history: rpc.input(z.object({})).output( | ||
z.object({ | ||
messages: z.array( | ||
z.object({ | ||
role: z.enum(["system", "user", "assistant"]), | ||
content: z.string(), | ||
}), | ||
), | ||
}), | ||
), | ||
send_prompt: rpc | ||
.input( | ||
z.object({ | ||
messages: z.array( | ||
z.object({ | ||
role: z.enum(["system", "user", "assistant"]), | ||
content: z.string(), | ||
}), | ||
), | ||
config: z.object({ | ||
max_tokens: z.number(), | ||
temperature: z.number(), | ||
top_p: z.number(), | ||
top_k: z.number(), | ||
frequency_penalty: z.number(), | ||
presence_penalty: z.number(), | ||
}), | ||
}), | ||
) | ||
.output(z.string()), | ||
}) | ||
.renderer((props) => ( | ||
<TooltipProvider> | ||
<Chatbot | ||
prompts={props.data.prompts} | ||
showConfigurationControls={props.data.showConfigurationControls} | ||
config={props.data.config} | ||
sendPrompt={props.functions.send_prompt} | ||
value={props.value || Arrays.EMPTY} | ||
setValue={props.setValue} | ||
/> | ||
</TooltipProvider> | ||
)); |
Oops, something went wrong.