Skip to content

Commit

Permalink
feat(prompt-lab): Register a CLI command for Prompt Lab (#52)
Browse files Browse the repository at this point in the history
  • Loading branch information
ludwiktrammer authored Sep 30, 2024
1 parent d390703 commit fdc5790
Show file tree
Hide file tree
Showing 4 changed files with 20 additions and 47 deletions.
2 changes: 1 addition & 1 deletion packages/ragbits-cli/src/ragbits/cli/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

import ragbits

app = Typer()
app = Typer(no_args_is_help=True)


def main() -> None:
Expand Down
19 changes: 0 additions & 19 deletions packages/ragbits-core/src/ragbits/core/cli.py

This file was deleted.

16 changes: 16 additions & 0 deletions packages/ragbits-dev-kit/src/ragbits/dev_kit/cli.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
import typer

from .prompt_lab.app import lab_app

prompts_app = typer.Typer(no_args_is_help=True)


def register(app: typer.Typer) -> None:
"""
Register the CLI commands for the package.
Args:
app: The Typer object to register the commands with.
"""
prompts_app.command(name="lab")(lab_app)
app.add_typer(prompts_app, name="prompts", help="Commands for managing prompts")
30 changes: 3 additions & 27 deletions packages/ragbits-dev-kit/src/ragbits/dev_kit/prompt_lab/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@

import gradio as gr
import jinja2
import typer
from pydantic import BaseModel

from ragbits.core.llms import LiteLLM
Expand Down Expand Up @@ -159,31 +158,12 @@ def get_input_type_fields(obj: BaseModel | None) -> list[dict]:
]


typer_app = typer.Typer(no_args_is_help=True)


@typer_app.command()
def run_app(
def lab_app( # pylint: disable=missing-param-doc
file_pattern: str = DEFAULT_FILE_PATTERN, llm_model: str | None = None, llm_api_key: str | None = None
) -> None:
"""
Launches the interactive application for working with Large Language Models (LLMs).
This function serves as the entry point for the application. It performs several key tasks:
1. Initializes the application state using the PromptState class.
2. Sets the LLM model name and API key based on user-provided arguments.
3. Fetches a list of prompts from the specified paths using the load_prompts_list function.
4. Creates a Gradio interface with various UI elements:
- A dropdown menu for selecting prompts.
- Textboxes for displaying and potentially modifying system and user prompts.
- Textboxes for entering input values based on the selected prompt.
- Buttons for rendering prompts, sending prompts to the LLM, and displaying the response.
Args:
file_pattern (str): A pattern for looking up prompt files.
llm_model (str): The name of the LLM model to use.
llm_api_key (str): The API key for the chosen LLM model.
Launches the interactive application for listing, rendering, and testing prompts
defined within the current project.
"""
with gr.Blocks() as gr_app:
prompt_state_obj = PromptState()
Expand Down Expand Up @@ -262,7 +242,3 @@ def show_split(index: int, state: gr.State) -> None:
llm_request_button.click(send_prompt_to_llm, prompts_state, llm_prompt_response)

gr_app.launch()


if __name__ == "__main__":
typer_app()

0 comments on commit fdc5790

Please sign in to comment.