From f9fdadb4c02dd6c49112e77fe130d8a67aedc35b Mon Sep 17 00:00:00 2001 From: Lewis Wolfgang Date: Thu, 18 Jul 2024 11:18:21 -0400 Subject: [PATCH] Allow openai http connections to remain open in the pool indefinitely. Rather than expiring in 5 seconds. --- src/pipecat/services/openai.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/src/pipecat/services/openai.py b/src/pipecat/services/openai.py index f037a095f..d2d7ae175 100644 --- a/src/pipecat/services/openai.py +++ b/src/pipecat/services/openai.py @@ -8,6 +8,7 @@ import base64 import io import json +import httpx from typing import AsyncGenerator, List, Literal @@ -37,7 +38,7 @@ ) try: - from openai import AsyncOpenAI, AsyncStream, BadRequestError + from openai import AsyncOpenAI, AsyncStream, DefaultAsyncHttpxClient, BadRequestError from openai.types.chat import ( ChatCompletionChunk, ChatCompletionFunctionMessageParam, @@ -71,7 +72,14 @@ def __init__(self, *, model: str, api_key=None, base_url=None, **kwargs): self._client = self.create_client(api_key=api_key, base_url=base_url, **kwargs) def create_client(self, api_key=None, base_url=None, **kwargs): - return AsyncOpenAI(api_key=api_key, base_url=base_url) + return AsyncOpenAI( + api_key=api_key, + base_url=base_url, + http_client=DefaultAsyncHttpxClient( + limits=httpx.Limits( + max_keepalive_connections=100, + max_connections=1000, + keepalive_expiry=None))) def can_generate_metrics(self) -> bool: return True