Skip to content

Commit 47f203b

Browse files
committed
pass model and/or provider for llm
1 parent 8ffb385 commit 47f203b

File tree

1 file changed

+31
-4
lines changed
  • src/ontobot_change_agent

1 file changed

+31
-4
lines changed

src/ontobot_change_agent/cli.py

+31-4
Original file line numberDiff line numberDiff line change
@@ -12,9 +12,27 @@
1212

1313
try:
1414
from llm_change_agent.cli import execute
15-
from llm_change_agent.utils.llm_utils import extract_commands
15+
from llm_change_agent.utils.llm_utils import (
16+
extract_commands,
17+
get_anthropic_models,
18+
get_lbl_cborg_models,
19+
get_ollama_models,
20+
get_openai_models,
21+
)
22+
23+
from ontobot_change_agent.constants import (
24+
ANTHROPIC_PROVIDER,
25+
CBORG_PROVIDER,
26+
OLLAMA_PROVIDER,
27+
OPENAI_PROVIDER,
28+
OWL_EXTENSION,
29+
)
1630

1731
llm_change_agent_available = True
32+
ALL_AVAILABLE_PROVIDERS = [OPENAI_PROVIDER, OLLAMA_PROVIDER, ANTHROPIC_PROVIDER, CBORG_PROVIDER]
33+
ALL_AVAILABLE_MODELS = (
34+
get_openai_models() + get_ollama_models() + get_anthropic_models() + get_lbl_cborg_models()
35+
)
1836
except ImportError:
1937
# Handle the case where the package is not installed
2038
llm_change_agent_available = False
@@ -29,7 +47,6 @@
2947
process_issue_via_jar,
3048
process_issue_via_oak,
3149
)
32-
from ontobot_change_agent.constants import OPEN_AI_MODEL, OPENAI_PROVIDER, OWL_EXTENSION
3350

3451
__all__ = [
3552
"main",
@@ -123,6 +140,12 @@ def main(verbose: int, quiet: bool):
123140
default=False,
124141
help="Use llm-change-agent for processing.",
125142
)
143+
llm_provider_option = click.option(
144+
"--provider", type=click.Choice(ALL_AVAILABLE_PROVIDERS), help="Provider to use for generation."
145+
)
146+
llm_model_option = click.option(
147+
"--model", type=click.Choice(ALL_AVAILABLE_MODELS), help="Model to use for generation."
148+
)
126149

127150

128151
@main.command()
@@ -188,6 +211,8 @@ def get_labels(repo: str, token: str):
188211
@jar_path_option
189212
@output_option
190213
@use_llm_option
214+
@llm_provider_option
215+
@llm_model_option
191216
def process_issue(
192217
input: str,
193218
repo: str,
@@ -200,6 +225,8 @@ def process_issue(
200225
jar_path: str,
201226
output: str,
202227
use_llm: bool = False,
228+
provider: str = None,
229+
model: str = None,
203230
):
204231
"""Run processes based on issue label.
205232
@@ -249,8 +276,8 @@ def process_issue(
249276
click.echo(f"Summoning llm-change-agent for {issue[TITLE]}")
250277
with click.Context(execute) as ctx:
251278
ctx.params["prompt"] = issue[BODY]
252-
ctx.params["provider"] = OPENAI_PROVIDER
253-
ctx.params["model"] = OPEN_AI_MODEL
279+
ctx.params["provider"] = provider
280+
ctx.params["model"] = model
254281
response = extract_commands(execute.invoke(ctx))
255282
KGCL_COMMANDS = [
256283
command.replace('"', "'") for command in ast.literal_eval(response)

0 commit comments

Comments
 (0)