From 8318dee3688db169bcae9bd4b0525dc1dade11b8 Mon Sep 17 00:00:00 2001 From: mertyg Date: Fri, 5 Jul 2024 00:15:37 +0000 Subject: [PATCH] max tokens for multimodal calls --- textgrad/engine/anthropic.py | 2 +- textgrad/engine/openai.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/textgrad/engine/anthropic.py b/textgrad/engine/anthropic.py index 9fde954..b2a8f6d 100644 --- a/textgrad/engine/anthropic.py +++ b/textgrad/engine/anthropic.py @@ -103,7 +103,7 @@ def _format_content(self, content: List[Union[str, bytes]]) -> List[dict]: return formatted_content def _generate_multimodal( - self, content: List[Union[str, bytes]], system_prompt=None, temperature=0, max_tokens=300, top_p=0.99 + self, content: List[Union[str, bytes]], system_prompt=None, temperature=0, max_tokens=2000, top_p=0.99 ): sys_prompt_arg = system_prompt if system_prompt else self.system_prompt formatted_content = self._format_content(content) diff --git a/textgrad/engine/openai.py b/textgrad/engine/openai.py index c59f489..0e755a9 100644 --- a/textgrad/engine/openai.py +++ b/textgrad/engine/openai.py @@ -109,7 +109,7 @@ def _format_content(self, content: List[Union[str, bytes]]) -> List[dict]: return formatted_content def _generate_multimodal( - self, content: List[Union[str, bytes]], system_prompt=None, temperature=0, max_tokens=300, top_p=0.99 + self, content: List[Union[str, bytes]], system_prompt=None, temperature=0, max_tokens=2000, top_p=0.99 ): sys_prompt_arg = system_prompt if system_prompt else self.system_prompt formatted_content = self._format_content(content)