Skip to content

Commit

Permalink
wip proposal: first test
Browse files Browse the repository at this point in the history
  • Loading branch information
aconchillo committed Apr 25, 2024
1 parent 2ecc923 commit 32a7cfd
Showing 1 changed file with 64 additions and 0 deletions.
64 changes: 64 additions & 0 deletions examples/foundational/new-test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
import asyncio
import sys

from dailyai.pipeline.frames import FrameType
from dailyai.pipeline.pipeline import Pipeline
from dailyai.processors.passthrough import Passthrough
from dailyai.services.live_stream import LiveStream
from dailyai.transports.daily_transport import DailyTransport
from dailyai.processors.demuxer import Demuxer
from dailyai.processors.llm_response_aggregator import LLMUserResponseAggregator

from runner import configure

from loguru import logger

from dotenv import load_dotenv
load_dotenv(override=True)

logger.remove(0)
logger.add(sys.stderr, level="DEBUG")


async def main(room_url, token):
transport = DailyTransport(
room_url, token,
camera_enabled=True,
camera_width=1280,
camera_height=720,
mic_enabled=True,
speaker_enabled=True,
video_capture_enabled=True,
transcription_enabled=True,
)

media_passthrough = Passthrough([FrameType.AUDIO_RAW, FrameType.IMAGE_RAW])

livestream_source = LiveStream(transport, speaker_enabled=True)
livestream_sink = LiveStream(transport, camera_enabled=True, mic_enabled=True)

messages = [
{
"role": "system",
"content": "You are a helpful LLM in a WebRTC call. Your goal is to demonstrate your capabilities in a succinct way. Your output will be converted to audio. Respond to what the user said in a creative and helpful way.",
},
]

llm_user_response = LLMUserResponseAggregator(messages)

@livestream_source.event_handler("on_first_participant_joined")
async def on_first_participant_joined(livestream, participant):
livestream_source.capture_participant_transcription(participant["id"])
livestream_source.capture_participant_video(participant["id"])

pipeline = Pipeline([livestream_source,
Demuxer([llm_user_response],
[media_passthrough]),
livestream_sink])

await pipeline.run()


if __name__ == "__main__":
(url, token) = configure()
asyncio.run(main(url, token))

0 comments on commit 32a7cfd

Please sign in to comment.