From 151af2774ac8399edbe47b28715881e76d95ad89 Mon Sep 17 00:00:00 2001 From: Juwan Date: Thu, 23 Nov 2023 17:01:19 +0900 Subject: [PATCH] =?UTF-8?q?feat:=20=EA=B0=95=EC=9D=98=EC=9E=90=20=ED=97=A4?= =?UTF-8?q?=EB=8D=94=EC=97=90=EC=84=9C=20=EC=9D=8C=EB=9F=89=20=EC=A1=B0?= =?UTF-8?q?=EC=A0=88=EB=90=9C=20=EC=98=A4=EB=94=94=EC=98=A4=20=EC=84=9C?= =?UTF-8?q?=EB=B2=84=EC=97=90=20=EC=A0=84=EC=86=A1=20=EA=B0=80=EB=8A=A5=20?= =?UTF-8?q?#67?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 강의자 헤더에서 음량 조절된 오디오를 서버에 전송 가능합니다. --- .../components/HeaderInstructorControls.tsx | 45 ++++++++++++++----- .../pages/MicTest/components/AudioRecord.tsx | 1 - mediaServer/src/RelayServer.ts | 1 - 3 files changed, 33 insertions(+), 14 deletions(-) diff --git a/frontend/src/components/Header/components/HeaderInstructorControls.tsx b/frontend/src/components/Header/components/HeaderInstructorControls.tsx index 62ad96b..ccd4e7b 100644 --- a/frontend/src/components/Header/components/HeaderInstructorControls.tsx +++ b/frontend/src/components/Header/components/HeaderInstructorControls.tsx @@ -1,4 +1,4 @@ -import { useState, useRef } from "react"; +import { useState, useRef, useEffect } from "react"; import { useRecoilValue } from "recoil"; import { io, Socket } from "socket.io-client"; @@ -12,6 +12,7 @@ import SmallButton from "@/components/SmallButton/SmallButton"; import Modal from "@/components/Modal/Modal"; import selectedMicrophoneState from "./stateMicrophone"; +import micVolmeState from "./stateMicVolme"; const HeaderInstructorControls = () => { const [isLectureStart, setIsLectureStart] = useState(false); @@ -29,12 +30,20 @@ const HeaderInstructorControls = () => { const selectedMicrophone = useRecoilValue(selectedMicrophoneState); const MEDIA_SERVER_URL = "http://localhost:3000/create-room"; + const inputMicVolume = useRecoilValue(micVolmeState); + const inputMicVolumeRef = useRef(0); + useEffect(() => { + inputMicVolumeRef.current = inputMicVolume; + }, [inputMicVolume]); + const updatedStreamRef = useRef(null); + const startLecture = async () => { if (!selectedMicrophone) return alert("음성 입력장치(마이크)를 먼저 선택해주세요"); await initConnection(); await createPresenterOffer(); listenForServerAnswer(); + setIsLectureStart(true); }; const stopLecture = () => { @@ -63,24 +72,26 @@ const HeaderInstructorControls = () => { audio: { deviceId: selectedMicrophone } }); mediaStreamRef.current = stream; - console.log("1. 로컬 stream 생성 완료"); - setIsLectureStart(true); - setupAudioAnalysis(stream); + await setupAudioAnalysis(stream); startRecordingTimer(); + if (updatedStreamRef.current) console.log("1. 로컬 stream 생성 완료"); + // 2. 로컬 RTCPeerConnection 생성 pcRef.current = new RTCPeerConnection(); console.log("2. 로컬 RTCPeerConnection 생성 완료"); // 3. 로컬 stream에 track 추가, 발표자의 미디어 트랙을 로컬 RTCPeerConnection에 추가 - if (stream) { - console.log(stream); + if (updatedStreamRef.current) { + console.log(updatedStreamRef.current); console.log("3.track 추가"); - stream.getTracks().forEach((track) => { + + updatedStreamRef.current.getTracks().forEach((track) => { + if (!updatedStreamRef.current) return; console.log("track:", track); if (!pcRef.current) return; - pcRef.current.addTrack(track, stream); + pcRef.current.addTrack(track, updatedStreamRef.current); }); } else { console.error("no stream"); @@ -139,13 +150,23 @@ const HeaderInstructorControls = () => { // 마이크 볼륨 측정을 위한 부분입니다 const setupAudioAnalysis = (stream: MediaStream) => { - const context = new AudioContext(); - const analyser = context.createAnalyser(); - const mediaStreamAudioSourceNode = context.createMediaStreamSource(stream); - mediaStreamAudioSourceNode.connect(analyser, 0); + const audioContext = new AudioContext(); + const analyser = audioContext.createAnalyser(); + const mediaStreamAudioSourceNode = audioContext.createMediaStreamSource(stream); + + const gainNode = audioContext.createGain(); + mediaStreamAudioSourceNode.connect(gainNode); + gainNode.connect(analyser); + + const mediaStreamDestination = audioContext.createMediaStreamDestination(); + gainNode.connect(mediaStreamDestination); + updatedStreamRef.current = mediaStreamDestination.stream; + const pcmData = new Float32Array(analyser.fftSize); const onFrame = () => { + gainNode.gain.value = inputMicVolumeRef.current; + analyser.getFloatTimeDomainData(pcmData); let sum = 0.0; for (const amplitude of pcmData) { diff --git a/frontend/src/pages/MicTest/components/AudioRecord.tsx b/frontend/src/pages/MicTest/components/AudioRecord.tsx index 58873f8..fe3156f 100644 --- a/frontend/src/pages/MicTest/components/AudioRecord.tsx +++ b/frontend/src/pages/MicTest/components/AudioRecord.tsx @@ -11,7 +11,6 @@ const AudioRecord = () => { const [selectedMicrophone, setSelectedMicrophone] = useState(null); const [recordingTime, setRecordingTime] = useState(0); - const gainNodeRef = useRef(null); const gainValueRef = useRef(1); const updatedStreamRef = useRef(null); diff --git a/mediaServer/src/RelayServer.ts b/mediaServer/src/RelayServer.ts index c49148b..0fefa4d 100644 --- a/mediaServer/src/RelayServer.ts +++ b/mediaServer/src/RelayServer.ts @@ -39,7 +39,6 @@ export class RelayServer { }); const stream = event.streams[0]; console.log(stream); - console.log(`stream id: ${stream.id}`); }; this.getServerCandidate(socket, data.socketId);