Skip to content

Commit

Permalink
self.prompts bug - wasn't adding if we didn't pull from cache
Browse files Browse the repository at this point in the history
  • Loading branch information
parkervg committed May 24, 2024
1 parent bdd4c3e commit 3d7eb0d
Showing 1 changed file with 3 additions and 2 deletions.
5 changes: 3 additions & 2 deletions blendsql/models/_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,6 +119,7 @@ def predict(self, program: Type[Program], **kwargs) -> dict:
return self.cache.get(key)
# Modify fields used for tracking Model usage
response, prompt = program(model=self, **kwargs)
self.prompts.insert(-1, self.format_prompt(response, **kwargs))
self.num_calls += 1
if self.tokenizer is not None:
self.prompt_tokens += len(self.tokenizer.encode(prompt))
Expand Down Expand Up @@ -155,8 +156,8 @@ def _create_key(self, program: Program, **kwargs) -> str:
return hasher.hexdigest()

@staticmethod
def format_prompt(res, **kwargs) -> dict:
d = {"answer": res}
def format_prompt(response: str, **kwargs) -> dict:
d = {"answer": response}
if IngredientKwarg.QUESTION in kwargs:
d[IngredientKwarg.QUESTION] = kwargs.get(IngredientKwarg.QUESTION)
if IngredientKwarg.CONTEXT in kwargs:
Expand Down

0 comments on commit 3d7eb0d

Please sign in to comment.