Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(core): add CLI commands to render / exec prompts #146

Merged
merged 5 commits into from
Oct 25, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
54 changes: 54 additions & 0 deletions packages/ragbits-core/src/ragbits/core/cli.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,28 @@
# pylint: disable=import-outside-toplevel
# pylint: disable=missing-param-doc
import asyncio
import json
from importlib import import_module
from pathlib import Path

import typer
from rich import print as pprint

from ragbits.core.config import core_config
from ragbits.core.prompt.prompt import Prompt


def _render(prompt_path: str, payload: str | None) -> Prompt:
module_stringified, object_stringified = prompt_path.split(":")
prompt_cls = getattr(import_module(module_stringified), object_stringified)

if payload is not None:
payload = json.loads(payload)
inputs = prompt_cls.input_type(**payload)
return prompt_cls(inputs)

return prompt_cls()


prompts_app = typer.Typer(no_args_is_help=True)

Expand Down Expand Up @@ -43,4 +61,40 @@ def generate_promptfoo_configs(

generate_configs(file_pattern=file_pattern, root_path=root_path, target_path=target_path)

@prompts_app.command()
def render(prompt_path: str, payload: str | None = None) -> None:
"""
Renders a prompt by loading a class from a module and initializing it with a given payload.
"""
prompt = _render(prompt_path=prompt_path, payload=payload)

pprint("[orange3]RENDERED PROMPT:")
pprint(prompt.chat)

@prompts_app.command(name="exec")
def execute(
akotyla marked this conversation as resolved.
Show resolved Hide resolved
prompt_path: str, payload: str | None = None, llm_factory: str | None = core_config.default_llm_factory
) -> None:
"""
Executes a prompt using the specified prompt class and LLM factory.

Raises:
ValueError: If `llm_factory` is not provided.
"""

from ragbits.core.llms.factory import get_llm_from_factory

prompt = _render(prompt_path=prompt_path, payload=payload)

if llm_factory is None:
raise ValueError("`llm_factory` must be provided")
llm = get_llm_from_factory(llm_factory)

response = asyncio.run(llm.generate(prompt))

pprint("[orange3]QUESTION:")
pprint(prompt.chat)
pprint("[orange3]ANSWER:")
pprint(response)

app.add_typer(prompts_app, name="prompts", help="Commands for managing prompts")
Loading