Skip to content

Commit

Permalink
max tokens for multimodal calls
Browse files Browse the repository at this point in the history
  • Loading branch information
mertyg committed Jul 5, 2024
1 parent e9c1240 commit 8318dee
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 2 deletions.
2 changes: 1 addition & 1 deletion textgrad/engine/anthropic.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ def _format_content(self, content: List[Union[str, bytes]]) -> List[dict]:
return formatted_content

def _generate_multimodal(
self, content: List[Union[str, bytes]], system_prompt=None, temperature=0, max_tokens=300, top_p=0.99
self, content: List[Union[str, bytes]], system_prompt=None, temperature=0, max_tokens=2000, top_p=0.99
):
sys_prompt_arg = system_prompt if system_prompt else self.system_prompt
formatted_content = self._format_content(content)
Expand Down
2 changes: 1 addition & 1 deletion textgrad/engine/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ def _format_content(self, content: List[Union[str, bytes]]) -> List[dict]:
return formatted_content

def _generate_multimodal(
self, content: List[Union[str, bytes]], system_prompt=None, temperature=0, max_tokens=300, top_p=0.99
self, content: List[Union[str, bytes]], system_prompt=None, temperature=0, max_tokens=2000, top_p=0.99
):
sys_prompt_arg = system_prompt if system_prompt else self.system_prompt
formatted_content = self._format_content(content)
Expand Down

0 comments on commit 8318dee

Please sign in to comment.