Skip to content

Commit

Permalink
allow calling any recipe via functions
Browse files Browse the repository at this point in the history
allow disabling deep search and generate answer on google gpt
  • Loading branch information
devxpy committed Jan 17, 2025
1 parent 51cd0ed commit 73dcbeb
Show file tree
Hide file tree
Showing 3 changed files with 114 additions and 54 deletions.
4 changes: 2 additions & 2 deletions daras_ai_v2/serp_search_locations.py
Original file line number Diff line number Diff line change
Expand Up @@ -305,7 +305,7 @@ class SerpSearchLocation(TextChoices):

class GoogleSearchLocationMixin(BaseModel):
serp_search_location: SerpSearchLocation | None = Field(
title="Web Search Location",
title="Location",
)
scaleserp_locations: list[str] | None = Field(
description="DEPRECATED: use `serp_search_location` instead"
Expand All @@ -314,7 +314,7 @@ class GoogleSearchLocationMixin(BaseModel):

class GoogleSearchMixin(GoogleSearchLocationMixin, BaseModel):
serp_search_type: SerpSearchType | None = Field(
title="Web Search Type",
title="Search Type",
)
scaleserp_search_field: str | None = Field(
description="DEPRECATED: use `serp_search_type` instead"
Expand Down
73 changes: 52 additions & 21 deletions functions/recipe_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,41 +2,50 @@
import typing

import gooey_gui as gui
from django.utils.text import slugify

from app_users.models import AppUser
from daras_ai_v2.enum_selector_widget import enum_selector
from daras_ai_v2.field_render import field_title_desc
from django.utils.text import slugify

from functions.models import CalledFunction, FunctionTrigger

if typing.TYPE_CHECKING:
from bots.models import SavedRun
from bots.models import SavedRun, Workflow
from daras_ai_v2.base import BasePage, JsonTypes
from workspaces.models import Workspace


class LLMTool:
def __init__(self, function_url: str):
from daras_ai_v2.workflow_url_input import url_to_runs
from bots.models import Workflow

self.function_url = function_url

_, fn_sr, fn_pr = url_to_runs(function_url)
self._function_runs = (fn_sr, fn_pr)
self._fn_runs = (fn_sr, fn_pr)

self.name = slugify(fn_pr.title).replace("-", "_")
self.label = fn_pr.title

fn_vars = fn_sr.state.get("variables", {})
fn_vars_schema = fn_sr.state.get("variables_schema", {})
if fn_sr.workflow == Workflow.FUNCTIONS:
tool_vars = fn_sr.state.get("variables", {})
tool_vars_schema = fn_sr.state.get("variables_schema", {})
else:
page_cls = Workflow(fn_sr.workflow).page_cls
_, tool_vars = page_cls.get_example_request(fn_sr.state, fn_pr)
tool_vars_schema = page_cls.RequestModel.schema()["properties"]

self.spec = {
"type": "function",
"function": {
"name": self.name,
"description": fn_pr.notes,
"parameters": {
"type": "object",
"properties": dict(generate_tool_params(fn_vars, fn_vars_schema)),
"properties": dict(
generate_tool_params(tool_vars, tool_vars_schema)
),
},
},
}
Expand All @@ -61,34 +70,48 @@ def bind(
return self

def __call__(self, **kwargs):
from bots.models import Workflow
from daras_ai_v2.base import extract_model_fields

try:
self.saved_run
except AttributeError:
raise RuntimeError("This LLMTool instance is not yet bound")

fn_sr, fn_pr = self._function_runs
fn_sr, fn_pr = self._fn_runs

if fn_sr.workflow == Workflow.FUNCTIONS:
state_vars = self.state.setdefault("variables", {})
state_vars_schema = self.state.setdefault("variables_schema", {})
system_vars, system_vars_schema = self._get_system_vars()
request_body = dict(
variables=(
(fn_sr.state.get("variables") or {})
| state_vars
| system_vars
| kwargs
),
variables_schema=(
(fn_sr.state.get("variables_schema") or {})
| state_vars_schema
| system_vars_schema
),
)
else:
request_body = kwargs

state_vars = self.state.setdefault("variables", {})
state_vars_schema = self.state.setdefault("variables_schema", {})
system_vars, system_vars_schema = self._get_system_vars()
fn_vars = (
(fn_sr.state.get("variables") or {}) | state_vars | system_vars | kwargs
)
fn_vars_schema = (
(fn_sr.state.get("variables_schema") or {})
| state_vars_schema
| system_vars_schema
)
result, fn_sr = fn_sr.submit_api_call(
workspace=self.workspace,
current_user=self.current_user,
parent_pr=fn_pr,
request_body=dict(variables=fn_vars, variables_schema=fn_vars_schema),
request_body=request_body,
deduct_credits=False,
)

CalledFunction.objects.create(
saved_run=self.saved_run, function_run=fn_sr, trigger=self.trigger.db_value
saved_run=self.saved_run,
function_run=fn_sr,
trigger=self.trigger.db_value,
)

# wait for the result if its a pre request function
Expand All @@ -99,6 +122,11 @@ def __call__(self, **kwargs):
if fn_sr.error_msg:
raise RuntimeError(fn_sr.error_msg)

if fn_sr.workflow != Workflow.FUNCTIONS:
page_cls = Workflow(fn_sr.workflow).page_cls
return_value = extract_model_fields(page_cls.ResponseModel, fn_sr.state)
return return_value

# save the output from the function
return_value = fn_sr.state.get("return_value")
if return_value is not None:
Expand Down Expand Up @@ -270,6 +298,7 @@ def render_function_input(list_key: str, del_key: str, d: dict):

def render_called_functions(*, saved_run: "SavedRun", trigger: FunctionTrigger):
from daras_ai_v2.breadcrumbs import get_title_breadcrumbs
from bots.models import Workflow
from recipes.Functions import FunctionsPage

if not is_functions_enabled():
Expand All @@ -295,6 +324,8 @@ def render_called_functions(*, saved_run: "SavedRun", trigger: FunctionTrigger):
"</a>"
)

if fn_sr.workflow != Workflow.FUNCTIONS:
continue
fn_vars = fn_sr.state.get("variables", {})
fn_vars_schema = fn_sr.state.get("variables_schema", {})
inputs = {
Expand Down
91 changes: 60 additions & 31 deletions recipes/GoogleGPT.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,22 +137,42 @@ def render_example(self, state: dict):
render_output_with_refs(state, 200)

def render_settings(self):
gui.text_area(
"### Task Instructions",
key="task_instructions",
height=300,
)
gui.write("---")
selected_model = language_model_selector()
language_model_settings(selected_model)
gui.write("---")
serp_search_settings()
gui.write("---")
gui.write("##### 🔎 Document Search Settings")
query_instructions_widget()
cache_knowledge_widget(self)
with gui.styled("& + label h5 { margin-bottom: 0 }"):
activate_deep_search = gui.checkbox(
"##### 🕵️‍♀️ Activate Deep Search",
help="Download and embed the webpages for enhanced understanding. Slower but powerful.",
value=bool(gui.session_state.get("embedding_model")),
)
if activate_deep_search:
doc_search_advanced_settings()
else:
gui.session_state["embedding_model"] = None
gui.write("---")
doc_search_advanced_settings()
with gui.styled("& + label h5 { margin-bottom: 0 }"):
generate_answer = gui.checkbox(
"##### 💬 Generate Answer",
value=bool(gui.session_state.get("task_instructions")),
)
if generate_answer:
gui.text_area(
"""
### Instructions
Instruct the LLM model on how to interpret the results to create an answer.
""",
key="task_instructions",
height=300,
)
gui.write("---")
selected_model = language_model_selector()
language_model_settings(selected_model)
gui.write("---")
gui.write("##### 🔎 Document Search Settings")
query_instructions_widget()
cache_knowledge_widget(self)
else:
gui.session_state["task_instructions"] = None

def related_workflows(self) -> list:
from recipes.DocSearch import DocSearchPage
Expand Down Expand Up @@ -245,29 +265,38 @@ def run_v2(
if not link_titles:
raise EmptySearchResults(response.final_search_query)

# run vector search on links
response.references = yield from get_top_k_references(
DocSearchRequest.parse_obj(
{
**request.dict(),
"documents": list(link_titles.keys()),
"search_query": request.search_query,
},
),
is_user_url=False,
current_user=self.request.user,
)

# add pretty titles to references

for ref in response.references:
key = furl(ref["url"]).remove(fragment=True).url
ref["title"] = link_titles.get(key, "")
if request.embedding_model:
# run vector search on links
response.references = yield from get_top_k_references(
DocSearchRequest.parse_obj(
{
**request.dict(),
"documents": list(link_titles.keys()),
"search_query": request.search_query,
},
),
is_user_url=False,
current_user=self.request.user,
)
# add pretty titles to references
for ref in response.references:
key = furl(ref["url"]).remove(fragment=True).url
ref["title"] = link_titles.get(key, "")
else:
response.references = [
SearchReference(url=item.url, title=item.title, snippet=item.snippet)
for item in links
]

# empty search result, abort!
if not response.references:
raise EmptySearchResults(request.search_query)

if not request.task_instructions:
response.final_prompt = ""
response.output_text = []
return

response.final_prompt = ""
# add search results to the prompt
response.final_prompt += references_as_prompt(response.references) + "\n\n"
Expand Down

0 comments on commit 73dcbeb

Please sign in to comment.