Skip to content

Commit

Permalink
Merge pull request #997 from pipecat-ai/aleix/update-dependencies-01-…
Browse files Browse the repository at this point in the history
…15-25

update dependencies (go back to numpy1)
  • Loading branch information
aconchillo authored Jan 15, 2025
2 parents 80b6c28 + 9dacbbb commit bb47664
Show file tree
Hide file tree
Showing 6 changed files with 46 additions and 46 deletions.
8 changes: 4 additions & 4 deletions dev-requirements.txt
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
build~=1.2.2
grpcio-tools~=1.68.1
grpcio-tools~=1.69.0
pip-tools~=7.4.1
pyright~=1.1.390
pyright~=1.1.392
pytest~=8.3.4
ruff~=0.8.3
setuptools~=75.6.0
ruff~=0.9.1
setuptools~=75.8.0
setuptools_scm~=8.1.0
python-dotenv~=1.0.1
4 changes: 1 addition & 3 deletions examples/foundational/06-listen-and-respond.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,9 +47,7 @@ async def process_frame(self, frame: Frame, direction: FrameDirection):
elif isinstance(d, LLMUsageMetricsData):
tokens = d.value
print(
f"!!! MetricsFrame: {frame}, tokens: {
tokens.prompt_tokens}, characters: {
tokens.completion_tokens}"
f"!!! MetricsFrame: {frame}, tokens: {tokens.prompt_tokens}, characters: {tokens.completion_tokens}"
)
elif isinstance(d, TTSUsageMetricsData):
print(f"!!! MetricsFrame: {frame}, characters: {d.value}")
Expand Down
72 changes: 37 additions & 35 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -20,15 +20,17 @@ classifiers = [
"Topic :: Scientific/Engineering :: Artificial Intelligence"
]
dependencies = [
"aiohttp~=3.11.10",
"aiohttp~=3.11.11",
"audioop-lts~=0.2.1; python_version>='3.13'",
# We need an older version of `httpx` that doesn't remove the deprecated
# `proxies` argument. This is necessary for Azure and Anthropic clients.
"httpx~=0.27.2",
"loguru~=0.7.3",
"Markdown~=3.7",
"numpy~=2.1.3",
"numba~=0.61.0rc1",
"Pillow~=11.0.0",
"protobuf~=5.29.1",
"pydantic~=2.10.3",
"numpy~=1.26.4",
"Pillow~=11.1.0",
"protobuf~=5.29.3",
"pydantic~=2.10.5",
"pyloudnorm~=0.1.1",
"resampy~=0.4.3",
"tenacity~=9.0.0"
Expand All @@ -39,45 +41,45 @@ Source = "https://github.com/pipecat-ai/pipecat"
Website = "https://pipecat.ai"

[project.optional-dependencies]
anthropic = [ "anthropic~=0.40.0" ]
assemblyai = [ "assemblyai~=0.34.0" ]
aws = [ "boto3~=1.35.27" ]
azure = [ "azure-cognitiveservices-speech~=1.41.1", "openai~=1.59.0" ]
anthropic = [ "anthropic~=0.39.0" ]
assemblyai = [ "assemblyai~=0.36.0" ]
aws = [ "boto3~=1.35.99" ]
azure = [ "azure-cognitiveservices-speech~=1.42.0", "openai~=1.59.6" ]
canonical = [ "aiofiles~=24.1.0" ]
cartesia = [ "cartesia~=1.0.13", "websockets~=13.1" ]
cerebras = [ "openai~=1.59.0" ]
deepseek = [ "openai~=1.59.0" ]
cartesia = [ "cartesia~=1.3.1", "websockets~=13.1" ]
cerebras = [ "openai~=1.59.6" ]
deepseek = [ "openai~=1.59.6" ]
daily = [ "daily-python~=0.14.2" ]
deepgram = [ "deepgram-sdk~=3.7.7" ]
deepgram = [ "deepgram-sdk~=3.8.0" ]
elevenlabs = [ "websockets~=13.1" ]
fal = [ "fal-client~=0.4.1" ]
fal = [ "fal-client~=0.5.6" ]
fish = [ "ormsgpack~=1.7.0", "websockets~=13.1" ]
gladia = [ "websockets~=13.1" ]
google = [ "google-generativeai~=0.8.3", "google-cloud-texttospeech~=2.21.1" ]
grok = [ "openai~=1.59.0" ]
groq = [ "openai~=1.59.0" ]
gstreamer = [ "pygobject~=3.48.2" ]
fireworks = [ "openai~=1.59.0" ]
google = [ "google-generativeai~=0.8.3", "google-cloud-texttospeech~=2.24.0" ]
grok = [ "openai~=1.59.6" ]
groq = [ "openai~=1.59.6" ]
gstreamer = [ "pygobject~=3.50.0" ]
fireworks = [ "openai~=1.59.6" ]
krisp = [ "pipecat-ai-krisp~=0.3.0" ]
koala = [ "pvkoala~=2.0.2" ]
langchain = [ "langchain~=0.3.12", "langchain-community~=0.3.12", "langchain-openai~=0.2.12" ]
livekit = [ "livekit~=0.17.5", "livekit-api~=0.7.1" ]
lmnt = [ "lmnt~=1.1.4" ]
koala = [ "pvkoala~=2.0.3" ]
langchain = [ "langchain~=0.3.14", "langchain-community~=0.3.14", "langchain-openai~=0.3.0" ]
livekit = [ "livekit~=0.19.1", "livekit-api~=0.8.1" ]
lmnt = [ "websockets~=13.1" ]
local = [ "pyaudio~=0.2.14" ]
moondream = [ "einops~=0.8.0", "timm~=1.0.8", "transformers~=4.44.0" ]
nim = [ "openai~=1.59.0" ]
moondream = [ "einops~=0.8.0", "timm~=1.0.13", "transformers~=4.48.0" ]
nim = [ "openai~=1.59.6" ]
noisereduce = [ "noisereduce~=3.0.3" ]
openai = [ "openai~=1.59.0", "websockets~=13.1", "python-deepcompare~=1.0.1" ]
openpipe = [ "openpipe~=4.40.0" ]
playht = [ "pyht~=0.1.9", "websockets~=13.1" ]
openai = [ "openai~=1.59.6", "websockets~=13.1", "python-deepcompare~=2.1.0" ]
openpipe = [ "openpipe~=4.43.0" ]
playht = [ "pyht~=0.1.6", "websockets~=13.1" ]
riva = [ "nvidia-riva-client~=2.18.0" ]
silero = [ "onnxruntime~=1.20.1" ]
simli = [ "simli-ai~=0.1.7"]
soundfile = [ "soundfile~=0.12.1" ]
together = [ "openai~=1.59.0" ]
websocket = [ "websockets~=13.1", "fastapi~=0.115.0" ]
whisper = [ "faster-whisper~=1.1.0" ]
openrouter = [ "openai~=1.59.0" ]
simli = [ "simli-ai~=0.1.10"]
soundfile = [ "soundfile~=0.13.0" ]
together = [ "openai~=1.59.6" ]
websocket = [ "websockets~=13.1", "fastapi~=0.115.6" ]
whisper = [ "faster-whisper~=1.1.1" ]
openrouter = [ "openai~=1.59.6" ]

[tool.setuptools.packages.find]
# All the following settings are optional:
Expand Down
2 changes: 1 addition & 1 deletion src/pipecat/services/cartesia.py
Original file line number Diff line number Diff line change
Expand Up @@ -273,7 +273,7 @@ async def _receive_messages(self):
logger.error(f"{self} error: {msg}")
await self.push_frame(TTSStoppedFrame())
await self.stop_all_metrics()
await self.push_error(ErrorFrame(f'{self} error: {msg["error"]}'))
await self.push_error(ErrorFrame(f"{self} error: {msg['error']}"))
else:
logger.error(f"{self} error, unknown message type: {msg}")

Expand Down
4 changes: 2 additions & 2 deletions src/pipecat/services/lmnt.py
Original file line number Diff line number Diff line change
Expand Up @@ -186,10 +186,10 @@ async def _receive_messages(self):
try:
msg = json.loads(message)
if "error" in msg:
logger.error(f'{self} error: {msg["error"]}')
logger.error(f"{self} error: {msg['error']}")
await self.push_frame(TTSStoppedFrame())
await self.stop_all_metrics()
await self.push_error(ErrorFrame(f'{self} error: {msg["error"]}'))
await self.push_error(ErrorFrame(f"{self} error: {msg['error']}"))
return
except json.JSONDecodeError:
logger.error(f"Invalid JSON message: {message}")
Expand Down
2 changes: 1 addition & 1 deletion src/pipecat/services/playht.py
Original file line number Diff line number Diff line change
Expand Up @@ -269,7 +269,7 @@ async def _receive_messages(self):
self._request_id = None
elif "error" in msg:
logger.error(f"{self} error: {msg}")
await self.push_error(ErrorFrame(f'{self} error: {msg["error"]}'))
await self.push_error(ErrorFrame(f"{self} error: {msg['error']}"))
except json.JSONDecodeError:
logger.error(f"Invalid JSON message: {message}")

Expand Down

0 comments on commit bb47664

Please sign in to comment.