Skip to content

Commit

Permalink
build cycle multi-hop rag, trainable, added component graph with cycl…
Browse files Browse the repository at this point in the history
…e, output graph as simplified version of the dynamic computation graph
  • Loading branch information
liyin2015 committed Dec 17, 2024
1 parent 1ec2557 commit 2aac4ff
Show file tree
Hide file tree
Showing 7 changed files with 815 additions and 114 deletions.
7 changes: 6 additions & 1 deletion adalflow/adalflow/core/generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -506,6 +506,7 @@ def forward(
self.model_kwargs, model_kwargs
),
}

output = self.call(**input_args, id=id)
# 2. Generate a Parameter object from the output
combined_prompt_kwargs = compose_model_kwargs(self.prompt_kwargs, prompt_kwargs)
Expand All @@ -527,9 +528,12 @@ def forward(
name=self.name + "_output",
role_desc=f"Output from (llm) {self.name}",
param_type=ParameterType.GENERATOR_OUTPUT,
data_id=id,
)
response.set_predecessors(predecessors)
response.trace_forward_pass(input_args=input_args, full_response=output)
response.trace_forward_pass(
input_args=input_args, full_response=output, id=self.id, name=self.name
)
# *** special to the generator ***
response.trace_api_kwargs(api_kwargs=self._trace_api_kwargs)
# attach the demo to the demo parameter
Expand Down Expand Up @@ -755,6 +759,7 @@ def _backward_through_one_predecessor(
score=response._score, # add score to gradient
param_type=ParameterType.GRADIENT,
from_response_id=response.id,
data_id=response.data_id,
)
pred.add_gradient(var_gradient)
pred.set_score(response._score)
Expand Down
8 changes: 7 additions & 1 deletion adalflow/adalflow/core/retriever.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,13 @@ def forward(
)
if input is None:
raise ValueError("Input cannot be empty")
response = super().forward(input, top_k=top_k, **kwargs)
response = super().forward(input, top_k=top_k, id=id, **kwargs)
response.trace_forward_pass(
input_args={"input": input, "top_k": top_k},
full_response=response,
id=self.id,
name=self.name,
)
response.param_type = (
ParameterType.RETRIEVER_OUTPUT
) # be more specific about the type
Expand Down
12 changes: 11 additions & 1 deletion adalflow/adalflow/optim/grad_component.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

from typing import TYPE_CHECKING
from collections import OrderedDict
import uuid
import logging

if TYPE_CHECKING:
Expand All @@ -13,6 +14,7 @@
from adalflow.core.component import Component
from adalflow.optim.function import BackwardContext


__all__ = ["GradComponent"]
log = logging.getLogger(__name__)

Expand All @@ -31,10 +33,12 @@ class GradComponent(Component):
"""
backward_engine: "BackwardEngine"
_component_type = "grad"
id = None

def __init__(self, *args, **kwargs):
super().__init__()
super().__setattr__("backward_engine", None)
super().__setattr__("id", str(uuid.uuid4()))

def __call__(self, *args, **kwargs):
if self.training:
Expand Down Expand Up @@ -122,9 +126,15 @@ def forward(self, *args, **kwargs) -> "Parameter":
name=self.name + "_output",
role_desc=self.name + " response",
param_type=ParameterType.OUTPUT,
data_id=kwargs.get("id", None),
)
response.set_predecessors(predecessors)
response.trace_forward_pass(input_args=input_args, full_response=call_response)
response.trace_forward_pass(
input_args=input_args,
full_response=call_response,
id=self.id,
name=self.name,
)
response.set_grad_fn(
BackwardContext(
backward_fn=self.backward,
Expand Down
Loading

0 comments on commit 2aac4ff

Please sign in to comment.