Skip to content

Commit

Permalink
Merge pull request #132 from Jw705/feature/231123-add-mic-volume-update
Browse files Browse the repository at this point in the history
Feature(#67) μ„€μ • λͺ¨λ‹¬μ—μ„œ λ°œν‘œμž 마이크 μŒλŸ‰μ„ μ„€μ •ν•  수 μžˆλ‹€.
  • Loading branch information
Jw705 authored Nov 23, 2023
2 parents 00457bd + 151af27 commit 339bb32
Show file tree
Hide file tree
Showing 5 changed files with 139 additions and 38 deletions.
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { useState, useRef } from "react";
import { useState, useRef, useEffect } from "react";
import { useRecoilValue } from "recoil";
import { io, Socket } from "socket.io-client";

Expand All @@ -12,6 +12,7 @@ import SmallButton from "@/components/SmallButton/SmallButton";
import Modal from "@/components/Modal/Modal";

import selectedMicrophoneState from "./stateMicrophone";
import micVolmeState from "./stateMicVolme";

const HeaderInstructorControls = () => {
const [isLectureStart, setIsLectureStart] = useState(false);
Expand All @@ -29,12 +30,20 @@ const HeaderInstructorControls = () => {
const selectedMicrophone = useRecoilValue(selectedMicrophoneState);
const MEDIA_SERVER_URL = "http://localhost:3000/create-room";

const inputMicVolume = useRecoilValue(micVolmeState);
const inputMicVolumeRef = useRef<number>(0);
useEffect(() => {
inputMicVolumeRef.current = inputMicVolume;
}, [inputMicVolume]);
const updatedStreamRef = useRef<MediaStream | null>(null);

const startLecture = async () => {
if (!selectedMicrophone) return alert("μŒμ„± μž…λ ₯μž₯치(마이크)λ₯Ό λ¨Όμ € μ„ νƒν•΄μ£Όμ„Έμš”");

await initConnection();
await createPresenterOffer();
listenForServerAnswer();
setIsLectureStart(true);
};

const stopLecture = () => {
Expand Down Expand Up @@ -63,24 +72,26 @@ const HeaderInstructorControls = () => {
audio: { deviceId: selectedMicrophone }
});
mediaStreamRef.current = stream;
console.log("1. 둜컬 stream 생성 μ™„λ£Œ");

setIsLectureStart(true);
setupAudioAnalysis(stream);
await setupAudioAnalysis(stream);
startRecordingTimer();

if (updatedStreamRef.current) console.log("1. 둜컬 stream 생성 μ™„λ£Œ");

// 2. 둜컬 RTCPeerConnection 생성
pcRef.current = new RTCPeerConnection();
console.log("2. 둜컬 RTCPeerConnection 생성 μ™„λ£Œ");

// 3. 둜컬 stream에 track μΆ”κ°€, λ°œν‘œμžμ˜ λ―Έλ””μ–΄ νŠΈλž™μ„ 둜컬 RTCPeerConnection에 μΆ”κ°€
if (stream) {
console.log(stream);
if (updatedStreamRef.current) {
console.log(updatedStreamRef.current);
console.log("3.track μΆ”κ°€");
stream.getTracks().forEach((track) => {

updatedStreamRef.current.getTracks().forEach((track) => {
if (!updatedStreamRef.current) return;
console.log("track:", track);
if (!pcRef.current) return;
pcRef.current.addTrack(track, stream);
pcRef.current.addTrack(track, updatedStreamRef.current);
});
} else {
console.error("no stream");
Expand Down Expand Up @@ -139,13 +150,23 @@ const HeaderInstructorControls = () => {

// 마이크 λ³Όλ₯¨ 츑정을 μœ„ν•œ λΆ€λΆ„μž…λ‹ˆλ‹€
const setupAudioAnalysis = (stream: MediaStream) => {
const context = new AudioContext();
const analyser = context.createAnalyser();
const mediaStreamAudioSourceNode = context.createMediaStreamSource(stream);
mediaStreamAudioSourceNode.connect(analyser, 0);
const audioContext = new AudioContext();
const analyser = audioContext.createAnalyser();
const mediaStreamAudioSourceNode = audioContext.createMediaStreamSource(stream);

const gainNode = audioContext.createGain();
mediaStreamAudioSourceNode.connect(gainNode);
gainNode.connect(analyser);

const mediaStreamDestination = audioContext.createMediaStreamDestination();
gainNode.connect(mediaStreamDestination);
updatedStreamRef.current = mediaStreamDestination.stream;

const pcmData = new Float32Array(analyser.fftSize);

const onFrame = () => {
gainNode.gain.value = inputMicVolumeRef.current;

analyser.getFloatTimeDomainData(pcmData);
let sum = 0.0;
for (const amplitude of pcmData) {
Expand Down
30 changes: 24 additions & 6 deletions frontend/src/components/Header/components/HeaderSettingModal.tsx
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import { useState, useEffect } from "react";
import { useSetRecoilState } from "recoil";
import selectedMicrophoneState from "./stateMicrophone";
import micVolmeState from "./stateMicVolme";

interface HeaderSettingModalProps {
isSettingClicked: boolean;
Expand All @@ -10,6 +11,7 @@ interface HeaderSettingModalProps {
const HeaderSettingModal = ({ isSettingClicked, setIsSettingClicked }: HeaderSettingModalProps) => {
const [microphoneDevices, setMicrophoneDevices] = useState<MediaDeviceInfo[]>([]);
const setSelectedMicrophone = useSetRecoilState(selectedMicrophoneState);
const setMicVolume = useSetRecoilState(micVolmeState);

useEffect(() => {
// 마이크 μž₯치 λͺ©λ‘ κ°€μ Έμ˜€κΈ°
Expand All @@ -24,6 +26,11 @@ const HeaderSettingModal = ({ isSettingClicked, setIsSettingClicked }: HeaderSet
});
}, []);

const handleGainChange = (event: React.ChangeEvent<HTMLInputElement>) => {
const newGainValue = parseFloat(event.target.value);
setMicVolume(newGainValue);
};

return (
<>
<div
Expand All @@ -33,19 +40,17 @@ const HeaderSettingModal = ({ isSettingClicked, setIsSettingClicked }: HeaderSet
onClick={() => setIsSettingClicked(!isSettingClicked)}
/>
<div
className={`flex flex-col absolute top-24 right-4 items-center gap-4 px-6 py-4 w-96 h-fit bg-grayscale-white rounded-xl border-default duration-500 ${
className={`flex flex-col absolute top-24 right-4 items-center gap-4 px-6 py-4 w-96 h-fit semibold-18 bg-grayscale-white rounded-xl border-default duration-500 ${
isSettingClicked ? "opacity-100 visible" : "opacity-0 invisible"
}`}
>
<div className="flex flex-row gap-3 w-full h-14 justify-start">
<p className="semibold-18" id="input-device-label">
μž…λ ₯ μž₯치 μ„€μ •
</p>
<div className="flex flex-row gap-3 w-full h-10 justify-start">
<p id="input-device-label">μž…λ ₯ μž₯치 μ„€μ •</p>
</div>

<select
aria-labelledby="input-device-label"
className="border semibold-18 w-[100%]"
className="border w-full"
onChange={(e) => setSelectedMicrophone(e.target.value)}
>
{microphoneDevices.map((device) => (
Expand All @@ -55,6 +60,19 @@ const HeaderSettingModal = ({ isSettingClicked, setIsSettingClicked }: HeaderSet
))}
</select>

<div className="flex flex-row gap-3 w-full h-10 justify-start">
<label htmlFor="volumeSlider">μž…λ ₯ λ³Όλ₯¨:</label>
</div>
<input
className="w-full"
type="range"
id="volumeSlider"
min="0"
max="1"
step="0.01"
onChange={handleGainChange}
/>

<div className="flex flex-row gap-4 w-full"></div>
</div>
</>
Expand Down
8 changes: 8 additions & 0 deletions frontend/src/components/Header/components/stateMicVolme.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
import { atom } from "recoil";

export const micVolmeState = atom<number>({
key: "micVolme",
default: 1
});

export default micVolmeState;
93 changes: 74 additions & 19 deletions frontend/src/pages/MicTest/components/AudioRecord.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,15 @@ const AudioRecord = () => {
const [selectedMicrophone, setSelectedMicrophone] = useState<string | null>(null);
const [recordingTime, setRecordingTime] = useState<number>(0);

const gainValueRef = useRef<number>(1);
const updatedStreamRef = useRef<MediaStream | null>(null);

const mediaRecorderRef = useRef<MediaRecorder | null>(null);
const recordingTimerRef = useRef<number | null>(null);
const onFrameIdRef = useRef<number | null>(null);
const volumeMeterRef = useRef<HTMLDivElement>(null);
const volumeMeterRef2 = useRef<HTMLDivElement>(null);
const volumeMeterRef3 = useRef<HTMLDivElement>(null);

const socketRef = useRef<Socket>();
const myVideoRef = useRef<HTMLVideoElement>(null);
Expand Down Expand Up @@ -82,27 +86,34 @@ const AudioRecord = () => {
video: true
});

handleRecordingStart(stream);
setupAudioAnalysis(stream);
await setupAudioAnalysis(stream);
startRecordingTimer();

if (myVideoRef.current) {
myVideoRef.current.srcObject = stream;
}
console.log("1. 둜컬 stream 생성 μ™„λ£Œ");
if (updatedStreamRef.current) console.log("1. 둜컬 stream 생성 μ™„λ£Œ");

// 2. 둜컬 RTCPeerConnection 생성
pcRef.current = new RTCPeerConnection();
console.log("2. 둜컬 RTCPeerConnection 생성 μ™„λ£Œ");

// 3. 둜컬 stream에 track μΆ”κ°€, λ°œν‘œμžμ˜ λ―Έλ””μ–΄ νŠΈλž™μ„ 둜컬 RTCPeerConnection에 μΆ”κ°€
if (stream) {
console.log(stream);
if (updatedStreamRef.current) {
if (myVideoRef.current) {
myVideoRef.current.srcObject = updatedStreamRef.current;
}
console.log("stream", stream);
console.log("updatedStreamRef.current", updatedStreamRef.current);
handleRecordingStart(updatedStreamRef.current);
console.log("3.track μΆ”κ°€");
stream.getTracks().forEach((track) => {

updatedStreamRef.current.getTracks().forEach((track) => {
if (!updatedStreamRef.current) return;
console.log("track:", track);
if (!pcRef.current) return;
pcRef.current.addTrack(track, stream);
pcRef.current.addTrack(track, updatedStreamRef.current);
});

stream.getTracks().forEach((track) => {
console.log("track origin:", track);
});
} else {
console.error("no stream");
Expand Down Expand Up @@ -185,24 +196,39 @@ const AudioRecord = () => {
setIsRecording(true);
};

// 마이크 λ³Όλ₯¨ 츑정을 μœ„ν•œ λΆ€λΆ„μž…λ‹ˆλ‹€
const setupAudioAnalysis = (stream: MediaStream) => {
const context = new AudioContext();
const analyser = context.createAnalyser();
const mediaStreamAudioSourceNode = context.createMediaStreamSource(stream);
mediaStreamAudioSourceNode.connect(analyser, 0);
// 마이크 λ³Όλ₯¨ λ³€κ²½κ³Ό 츑정을 μœ„ν•΄ μ˜€λ””μ˜€λ₯Ό μˆ˜μ •/λΆ„μ„ν•˜λŠ” ν•¨μˆ˜μž…λ‹ˆλ‹€.
const setupAudioAnalysis = async (stream: MediaStream) => {
const audioContext = new AudioContext();
const analyser = audioContext.createAnalyser();
const mediaStreamAudioSourceNode = audioContext.createMediaStreamSource(stream);

const gainNode = audioContext.createGain();
mediaStreamAudioSourceNode.connect(gainNode);
gainNode.connect(analyser);

const mediaStreamDestination = audioContext.createMediaStreamDestination();
gainNode.connect(mediaStreamDestination);
mediaStreamDestination.stream.addTrack(stream.getVideoTracks()[0]); // ν”„λ‘œν† νƒ€μž…μ— μ‚¬μš©ν•  λΉ„λ””μ˜€ νŠΈλž™ κ·ΈλŒ€λ‘œ μΆ”κ°€
updatedStreamRef.current = mediaStreamDestination.stream;

const pcmData = new Float32Array(analyser.fftSize);

const onFrame = () => {
//console.log("Gain value:", gainValueRef.current);
gainNode.gain.value = gainValueRef.current;

analyser.getFloatTimeDomainData(pcmData);
let sum = 0.0;
for (const amplitude of pcmData) {
sum += amplitude * amplitude;
}
const rms = Math.sqrt(sum / pcmData.length);
//console.log("rms:", rms);
const normalizedVolume = Math.min(1, rms / 0.5);
colorVolumeMeter(normalizedVolume * 2);
colorVolumeMeter2(normalizedVolume * 2);
colorVolumeMeter(normalizedVolume);
colorVolumeMeter2(normalizedVolume);

colorVolumeMeter3(normalizedVolume);
onFrameIdRef.current = window.requestAnimationFrame(onFrame);
};
onFrameIdRef.current = window.requestAnimationFrame(onFrame);
Expand Down Expand Up @@ -241,7 +267,20 @@ const AudioRecord = () => {
if (!volumeMeterRef2.current) return;
const VOL_METER_MAX = 10; // ν‘œμ‹œν•  λ³Όλ₯¨ λ―Έν„° 개수
const childrens = volumeMeterRef2.current.querySelectorAll("div") as NodeListOf<HTMLDivElement>;
const numberOfChildToColor = normalizeToInteger(vol, 1, VOL_METER_MAX);
const numberOfChildToColor = normalizeToInteger(vol, 0, VOL_METER_MAX);
const coloredChild = Array.from(childrens).slice(0, numberOfChildToColor);
childrens.forEach((pid) => {
pid.style.backgroundColor = "#e6e6e6";
});
coloredChild.forEach((pid) => {
pid.style.backgroundColor = "#69ce2b";
});
};
const colorVolumeMeter3 = (vol: number) => {
if (!volumeMeterRef3.current) return;
const VOL_METER_MAX = 10; // ν‘œμ‹œν•  λ³Όλ₯¨ λ―Έν„° 개수
const childrens = volumeMeterRef3.current.querySelectorAll("div") as NodeListOf<HTMLDivElement>;
const numberOfChildToColor = normalizeToInteger(vol, 0, VOL_METER_MAX);
const coloredChild = Array.from(childrens).slice(0, numberOfChildToColor);
childrens.forEach((pid) => {
pid.style.backgroundColor = "#e6e6e6";
Expand All @@ -251,6 +290,12 @@ const AudioRecord = () => {
});
};

const handleGainChange = (event: React.ChangeEvent<HTMLInputElement>) => {
//console.log("set", event.target.value);
const newGainValue = parseFloat(event.target.value);
gainValueRef.current = newGainValue;
};

return (
<div className="flex flex-col justify-center items-center m-4 gap-2">
<select className="border" onChange={(e) => setSelectedMicrophone(e.target.value)}>
Expand Down Expand Up @@ -289,6 +334,11 @@ const AudioRecord = () => {
<div key={index} className="w-[8%] rounded"></div>
))}
</div>
<div className="volume-meter2 w-[150px] h-[20px] flex gap-1" ref={volumeMeterRef3}>
{Array.from({ length: 10 }, (_, index) => (
<div key={index} className="w-[8%] rounded"></div>
))}
</div>

<br></br>

Expand All @@ -309,6 +359,11 @@ const AudioRecord = () => {
</p>
</div>

<div className="flex items-center gap-2">
<label htmlFor="volumeSlider">λ³Όλ₯¨ 쑰절:</label>
<input type="range" id="volumeSlider" min="0" max="1" step="0.01" onChange={handleGainChange} />
</div>

{audioURL && (
<div>
<p>λ…ΉμŒλœ μŒμ„±:</p>
Expand Down
1 change: 0 additions & 1 deletion mediaServer/src/RelayServer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,6 @@ export class RelayServer {
});
const stream = event.streams[0];
console.log(stream);
console.log(`stream id: ${stream.id}`);
};

this.getServerCandidate(socket, data.socketId);
Expand Down

0 comments on commit 339bb32

Please sign in to comment.