Skip to content

Commit

Permalink
allow calling any recipe via functions
Browse files Browse the repository at this point in the history
allow disabling deep search and generate answer on google gpt
  • Loading branch information
devxpy committed Jan 17, 2025
1 parent 51cd0ed commit f6beff7
Show file tree
Hide file tree
Showing 3 changed files with 136 additions and 62 deletions.
4 changes: 2 additions & 2 deletions daras_ai_v2/serp_search_locations.py
Original file line number Diff line number Diff line change
Expand Up @@ -305,7 +305,7 @@ class SerpSearchLocation(TextChoices):

class GoogleSearchLocationMixin(BaseModel):
serp_search_location: SerpSearchLocation | None = Field(
title="Web Search Location",
title="Location",
)
scaleserp_locations: list[str] | None = Field(
description="DEPRECATED: use `serp_search_location` instead"
Expand All @@ -314,7 +314,7 @@ class GoogleSearchLocationMixin(BaseModel):

class GoogleSearchMixin(GoogleSearchLocationMixin, BaseModel):
serp_search_type: SerpSearchType | None = Field(
title="Web Search Type",
title="Search Type",
)
scaleserp_search_field: str | None = Field(
description="DEPRECATED: use `serp_search_type` instead"
Expand Down
78 changes: 55 additions & 23 deletions functions/recipe_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,33 +2,40 @@
import typing

import gooey_gui as gui
from django.utils.text import slugify

from app_users.models import AppUser
from daras_ai_v2.enum_selector_widget import enum_selector
from daras_ai_v2.field_render import field_title_desc
from django.utils.text import slugify

from functions.models import CalledFunction, FunctionTrigger

if typing.TYPE_CHECKING:
from bots.models import SavedRun
from bots.models import SavedRun, Workflow
from daras_ai_v2.base import BasePage, JsonTypes
from workspaces.models import Workspace


class LLMTool:
def __init__(self, function_url: str):
from daras_ai_v2.workflow_url_input import url_to_runs
from bots.models import Workflow

self.function_url = function_url

_, fn_sr, fn_pr = url_to_runs(function_url)
self._function_runs = (fn_sr, fn_pr)
self._fn_runs = (fn_sr, fn_pr)

self.name = slugify(fn_pr.title).replace("-", "_")
self.label = fn_pr.title

fn_vars = fn_sr.state.get("variables", {})
fn_vars_schema = fn_sr.state.get("variables_schema", {})
if fn_sr.workflow == Workflow.FUNCTIONS:
fn_vars = fn_sr.state.get("variables", {})
fn_vars_schema = fn_sr.state.get("variables_schema", {})
else:
page_cls = Workflow(fn_sr.workflow).page_cls
_, fn_vars = page_cls.get_example_request(fn_sr.state, fn_pr)
fn_vars_schema = page_cls.RequestModel.schema()["properties"]

self.spec = {
"type": "function",
"function": {
Expand Down Expand Up @@ -61,34 +68,48 @@ def bind(
return self

def __call__(self, **kwargs):
from bots.models import Workflow
from daras_ai_v2.base import extract_model_fields

try:
self.saved_run
except AttributeError:
raise RuntimeError("This LLMTool instance is not yet bound")

fn_sr, fn_pr = self._function_runs
fn_sr, fn_pr = self._fn_runs

if fn_sr.workflow == Workflow.FUNCTIONS:
state_vars = self.state.setdefault("variables", {})
state_vars_schema = self.state.setdefault("variables_schema", {})
system_vars, system_vars_schema = self._get_system_vars()
request_body = dict(
variables=(
(fn_sr.state.get("variables") or {})
| state_vars
| system_vars
| kwargs
),
variables_schema=(
(fn_sr.state.get("variables_schema") or {})
| state_vars_schema
| system_vars_schema
),
)
else:
request_body = kwargs

state_vars = self.state.setdefault("variables", {})
state_vars_schema = self.state.setdefault("variables_schema", {})
system_vars, system_vars_schema = self._get_system_vars()
fn_vars = (
(fn_sr.state.get("variables") or {}) | state_vars | system_vars | kwargs
)
fn_vars_schema = (
(fn_sr.state.get("variables_schema") or {})
| state_vars_schema
| system_vars_schema
)
result, fn_sr = fn_sr.submit_api_call(
workspace=self.workspace,
current_user=self.current_user,
parent_pr=fn_pr,
request_body=dict(variables=fn_vars, variables_schema=fn_vars_schema),
request_body=request_body,
deduct_credits=False,
)

CalledFunction.objects.create(
saved_run=self.saved_run, function_run=fn_sr, trigger=self.trigger.db_value
saved_run=self.saved_run,
function_run=fn_sr,
trigger=self.trigger.db_value,
)

# wait for the result if its a pre request function
Expand All @@ -99,6 +120,11 @@ def __call__(self, **kwargs):
if fn_sr.error_msg:
raise RuntimeError(fn_sr.error_msg)

if fn_sr.workflow != Workflow.FUNCTIONS:
page_cls = Workflow(fn_sr.workflow).page_cls
return_value = extract_model_fields(page_cls.ResponseModel, fn_sr.state)
return return_value

# save the output from the function
return_value = fn_sr.state.get("return_value")
if return_value is not None:
Expand Down Expand Up @@ -270,7 +296,7 @@ def render_function_input(list_key: str, del_key: str, d: dict):

def render_called_functions(*, saved_run: "SavedRun", trigger: FunctionTrigger):
from daras_ai_v2.breadcrumbs import get_title_breadcrumbs
from recipes.Functions import FunctionsPage
from bots.models import Workflow

if not is_functions_enabled():
return
Expand All @@ -280,11 +306,15 @@ def render_called_functions(*, saved_run: "SavedRun", trigger: FunctionTrigger):
for called_fn in qs:
fn_sr = called_fn.function_run
tb = get_title_breadcrumbs(
FunctionsPage,
Workflow(fn_sr.workflow).page_cls,
fn_sr,
fn_sr.parent_published_run(),
)
title = (tb.published_title and tb.published_title.title) or tb.h1_title
title = (
(tb.published_title and tb.published_title.title)
or (tb.root_title and tb.root_title.title)
or tb.h1_title
)
key = f"fn-call-details-{called_fn.id}"
with gui.expander(f"🧩 Called `{title}`", key=key):
if not gui.session_state.get(key):
Expand All @@ -295,6 +325,8 @@ def render_called_functions(*, saved_run: "SavedRun", trigger: FunctionTrigger):
"</a>"
)

if fn_sr.workflow != Workflow.FUNCTIONS:
continue
fn_vars = fn_sr.state.get("variables", {})
fn_vars_schema = fn_sr.state.get("variables_schema", {})
inputs = {
Expand Down
116 changes: 79 additions & 37 deletions recipes/GoogleGPT.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
import typing

import gooey_gui as gui
from furl import furl
from pydantic import BaseModel

from bots.models import Workflow
from daras_ai_v2.base import BasePage
from daras_ai_v2.doc_search_settings_widgets import (
Expand Down Expand Up @@ -33,9 +36,6 @@
)
from daras_ai_v2.variables_widget import render_prompt_vars
from daras_ai_v2.vector_search import render_sources_widget
from furl import furl
from pydantic import BaseModel

from recipes.DocSearch import (
DocSearchRequest,
EmptySearchResults,
Expand All @@ -52,8 +52,6 @@ class GoogleGPTPage(BasePage):
workflow = Workflow.GOOGLE_GPT
slug_versions = ["google-gpt"]

price = 175

sane_defaults = dict(
search_query="rugs",
keywords="outdoor rugs,8x10 rugs,rug sizes,checkered rugs,5x7 rugs",
Expand Down Expand Up @@ -117,6 +115,18 @@ def render_form_v2(self):
gui.text_area("#### Google Search Query", key="search_query")
gui.text_input("Search on a specific site *(optional)*", key="site_filter")

gui.switch(
"##### 🕵️‍♀️ Activate Deep Search",
help="Download and embed the webpages for enhanced understanding. Slower but powerful.",
key="_activate_deep_search",
value=bool(gui.session_state.get("embedding_model")),
)
gui.switch(
"##### 💬 Generate Answer",
key="_generate_answer",
value=bool(gui.session_state.get("task_instructions")),
)

def validate_form_v2(self):
assert gui.session_state.get(
"search_query", ""
Expand All @@ -137,22 +147,37 @@ def render_example(self, state: dict):
render_output_with_refs(state, 200)

def render_settings(self):
gui.text_area(
"### Task Instructions",
key="task_instructions",
height=300,
)
gui.write("---")
selected_model = language_model_selector()
language_model_settings(selected_model)
gui.write("---")
if gui.session_state.get("_generate_answer"):
if gui.session_state.get("task_instructions") is None:
gui.session_state["task_instructions"] = self.current_sr.state.get(
"task_instructions", ""
)
gui.text_area(
"""
### Instructions
Instruct the LLM model on how to interpret the results to create an answer.
""",
key="task_instructions",
height=300,
)
gui.write("---")
selected_model = language_model_selector()
language_model_settings(selected_model)
gui.write("---")
else:
gui.session_state["task_instructions"] = None

serp_search_settings()
gui.write("---")
gui.write("##### 🔎 Document Search Settings")
query_instructions_widget()
cache_knowledge_widget(self)
gui.write("---")
doc_search_advanced_settings()

if gui.session_state.get("_activate_deep_search"):
gui.write("##### 🔎 Document Search Settings")
query_instructions_widget()
cache_knowledge_widget(self)
gui.write("---")
doc_search_advanced_settings()
else:
gui.session_state["embedding_model"] = None

def related_workflows(self) -> list:
from recipes.DocSearch import DocSearchPage
Expand Down Expand Up @@ -245,29 +270,38 @@ def run_v2(
if not link_titles:
raise EmptySearchResults(response.final_search_query)

# run vector search on links
response.references = yield from get_top_k_references(
DocSearchRequest.parse_obj(
{
**request.dict(),
"documents": list(link_titles.keys()),
"search_query": request.search_query,
},
),
is_user_url=False,
current_user=self.request.user,
)

# add pretty titles to references

for ref in response.references:
key = furl(ref["url"]).remove(fragment=True).url
ref["title"] = link_titles.get(key, "")
if request.embedding_model:
# run vector search on links
response.references = yield from get_top_k_references(
DocSearchRequest.parse_obj(
{
**request.dict(),
"documents": list(link_titles.keys()),
"search_query": request.search_query,
},
),
is_user_url=False,
current_user=self.request.user,
)
# add pretty titles to references
for ref in response.references:
key = furl(ref["url"]).remove(fragment=True).url
ref["title"] = link_titles.get(key, "")
else:
response.references = [
SearchReference(url=item.url, title=item.title, snippet=item.snippet)
for item in links
]

# empty search result, abort!
if not response.references:
raise EmptySearchResults(request.search_query)

if not request.task_instructions:
response.final_prompt = ""
response.output_text = []
return

response.final_prompt = ""
# add search results to the prompt
response.final_prompt += references_as_prompt(response.references) + "\n\n"
Expand All @@ -290,3 +324,11 @@ def run_v2(
avoid_repetition=request.avoid_repetition,
response_format_type=request.response_format_type,
)

def get_raw_price(self, state: dict) -> float:
price = 1
if state.get("embedding_model"):
price += 87
if state.get("task_instructions"):
price += 87
return price

0 comments on commit f6beff7

Please sign in to comment.