Skip to content

Commit

Permalink
Allow openai http connections to remain open in the pool indefinitely.
Browse files Browse the repository at this point in the history
Rather than expiring in 5 seconds.
  • Loading branch information
LewisWolfgang committed Jul 18, 2024
1 parent 4efccb7 commit f9fdadb
Showing 1 changed file with 10 additions and 2 deletions.
12 changes: 10 additions & 2 deletions src/pipecat/services/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import base64
import io
import json
import httpx

from typing import AsyncGenerator, List, Literal

Expand Down Expand Up @@ -37,7 +38,7 @@
)

try:
from openai import AsyncOpenAI, AsyncStream, BadRequestError
from openai import AsyncOpenAI, AsyncStream, DefaultAsyncHttpxClient, BadRequestError
from openai.types.chat import (
ChatCompletionChunk,
ChatCompletionFunctionMessageParam,
Expand Down Expand Up @@ -71,7 +72,14 @@ def __init__(self, *, model: str, api_key=None, base_url=None, **kwargs):
self._client = self.create_client(api_key=api_key, base_url=base_url, **kwargs)

def create_client(self, api_key=None, base_url=None, **kwargs):
return AsyncOpenAI(api_key=api_key, base_url=base_url)
return AsyncOpenAI(
api_key=api_key,
base_url=base_url,
http_client=DefaultAsyncHttpxClient(
limits=httpx.Limits(
max_keepalive_connections=100,
max_connections=1000,
keepalive_expiry=None)))

def can_generate_metrics(self) -> bool:
return True
Expand Down

0 comments on commit f9fdadb

Please sign in to comment.