From c38d430d4898c601916c257646eca990b96a6579 Mon Sep 17 00:00:00 2001 From: platinouss Date: Tue, 12 Mar 2024 03:02:08 +0900 Subject: [PATCH 1/3] =?UTF-8?q?refactor:=20=EB=B0=9C=ED=91=9C=EC=9E=90=20?= =?UTF-8?q?=EC=9D=8C=EC=84=B1=20=EB=85=B9=EC=9D=8C=20=EB=AA=A8=EB=93=88=20?= =?UTF-8?q?=EB=A6=AC=ED=8C=A9=ED=86=A0=EB=A7=81?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- mediaServer/src/models/FfmpegCommand.ts | 35 ------------- mediaServer/src/models/PeerStreamInfo.ts | 7 --- mediaServer/src/services/presenter.service.ts | 2 +- .../src/services/webrtc-connection.service.ts | 34 ++++++------ mediaServer/src/utils/ffmpeg.ts | 29 +++++++++++ mediaServer/src/utils/media-converter.ts | 52 ++++++++----------- 6 files changed, 70 insertions(+), 89 deletions(-) delete mode 100644 mediaServer/src/models/FfmpegCommand.ts create mode 100644 mediaServer/src/utils/ffmpeg.ts diff --git a/mediaServer/src/models/FfmpegCommand.ts b/mediaServer/src/models/FfmpegCommand.ts deleted file mode 100644 index d0f406f..0000000 --- a/mediaServer/src/models/FfmpegCommand.ts +++ /dev/null @@ -1,35 +0,0 @@ -import ffmpeg from 'fluent-ffmpeg'; -import { audioConfig } from '../config/ffmpeg.config'; -import { PeerStreamInfo } from './PeerStreamInfo'; - -export class FfmpegCommand { - private readonly _command: ffmpeg.FfmpegCommand; - - constructor( - audioTempFilePath: string, - recordFilePath: string, - roomId: string, - streamInfo: PeerStreamInfo, - endRecording: (roomId: string) => void - ) { - this._command = ffmpeg() - .addInput(audioTempFilePath) - .addInputOptions(audioConfig) - .on('start', () => { - console.log(`${roomId} 강의실 음성 녹화 시작`); - }) - .on('error', (err) => { - console.log(err); - }) - .on('end', async () => { - streamInfo.recordEnd = true; - await endRecording(roomId); - console.log(`${roomId} 강의실 음성 녹화 종료`); - }) - .save(recordFilePath); - } - - run = () => { - this._command.run(); - }; -} diff --git a/mediaServer/src/models/PeerStreamInfo.ts b/mediaServer/src/models/PeerStreamInfo.ts index 189f6d2..cf32884 100644 --- a/mediaServer/src/models/PeerStreamInfo.ts +++ b/mediaServer/src/models/PeerStreamInfo.ts @@ -1,5 +1,4 @@ import { PassThrough } from 'stream'; -import { FfmpegCommand } from './FfmpegCommand'; import { RTCAudioSink } from 'wrtc'; interface MediaFileNameList { @@ -12,14 +11,12 @@ export class PeerStreamInfo { private readonly _audio: PassThrough; private _audioSink: RTCAudioSink; private _recordEnd: boolean; - private _proc: FfmpegCommand | null; constructor(audioSink: RTCAudioSink, roomId: string) { this._audioSink = audioSink; this._mediaFileNameList = this.setFileName(roomId); this._audio = new PassThrough(); this._recordEnd = false; - this._proc = null; } get audioTempFileName(): string { @@ -38,10 +35,6 @@ export class PeerStreamInfo { this._recordEnd = isRecordEnd; } - set proc(FfmpegCommand: FfmpegCommand) { - this._proc = FfmpegCommand; - } - setFileName = (roomId: string): MediaFileNameList => { return { audioTempFile: `audio-${roomId}.sock`, diff --git a/mediaServer/src/services/presenter.service.ts b/mediaServer/src/services/presenter.service.ts index 67b6818..65f2450 100644 --- a/mediaServer/src/services/presenter.service.ts +++ b/mediaServer/src/services/presenter.service.ts @@ -47,7 +47,7 @@ const editWhiteboard = async (roomId: string, content: ICanvasData) => { const endLecture = async (roomId: string, email: string) => { sendDataToClient('/lecture', roomId, 'ended', new Message(MessageType.LECTURE, 'finish')); - mediaConverter.setFfmpeg(roomId); + mediaConverter.endRecording(roomId); relayServer.deleteRoom(email, roomId); await Promise.all([deleteRoomInfoById(roomId), deleteQuestionStream(roomId)]); }; diff --git a/mediaServer/src/services/webrtc-connection.service.ts b/mediaServer/src/services/webrtc-connection.service.ts index ed4c890..ca85e7d 100644 --- a/mediaServer/src/services/webrtc-connection.service.ts +++ b/mediaServer/src/services/webrtc-connection.service.ts @@ -5,27 +5,18 @@ import { mediaConverter } from '../utils/media-converter'; import { ServerAnswerDto } from '../dto/server-answer.dto'; import { setPresenterMediaStream } from './participant.service'; import { sendDataToClient } from './socket.service'; +import { RoomConnectionInfo } from '../models/RoomConnectionInfo'; const setTrackEvent = (RTCPC: RTCPeerConnection, roomId: string) => { RTCPC.ontrack = (event) => { const roomInfo = relayServer.roomConnectionInfoList.get(roomId); - if (roomInfo) { - roomInfo.stream = event.streams[0]; - roomInfo.participantIdList.forEach((participantId: string) => { - const participantConnectionInfo = relayServer.clientConnectionInfoList.get(participantId); - if (participantConnectionInfo) { - event.streams[0].getTracks().forEach(async (track: MediaStreamTrack) => { - await participantConnectionInfo.RTCPC.getSenders()[0].replaceTrack(track); - }); - } - }); - const presenterAudioSink = mediaConverter.setSink(event.streams[0]); - if (presenterAudioSink === null) { - console.log('발표자의 audio-sink가 존재하지 않습니다.'); - return; - } - mediaConverter.startRecording(presenterAudioSink, roomId); + if (!roomInfo) { + console.log('강의실이 존재하지 않습니다.'); + return; } + roomInfo.stream = event.streams[0]; + setPresenterAudioTrack(roomInfo, roomInfo.stream); + mediaConverter.startRecording(roomId, roomInfo.stream); }; }; @@ -85,4 +76,15 @@ const setParticipantWebRTCConnection = async ( RTCPC.setLocalDescription(answer); }; +const setPresenterAudioTrack = (roomInfo: RoomConnectionInfo, presenterMediaStream: MediaStream) => { + roomInfo.participantIdList.forEach((participantId: string) => { + const participantConnectionInfo = relayServer.clientConnectionInfoList.get(participantId); + if (participantConnectionInfo) { + presenterMediaStream.getTracks().forEach(async (track: MediaStreamTrack) => { + await participantConnectionInfo.RTCPC.getSenders()[0].replaceTrack(track); + }); + } + }); +}; + export { setTrackEvent, exchangeCandidate, setPresenterWebRTCConnection, setParticipantWebRTCConnection }; diff --git a/mediaServer/src/utils/ffmpeg.ts b/mediaServer/src/utils/ffmpeg.ts new file mode 100644 index 0000000..18db5b5 --- /dev/null +++ b/mediaServer/src/utils/ffmpeg.ts @@ -0,0 +1,29 @@ +import { PeerStreamInfo } from '../models/PeerStreamInfo'; +import ffmpeg from 'fluent-ffmpeg'; +import { audioConfig } from '../config/ffmpeg.config'; + +const runFfmpegCommand = ( + audioTempFilePath: string, + recordFilePath: string, + roomId: string, + streamInfo: PeerStreamInfo, + endRecording: (roomId: string) => void +) => { + ffmpeg() + .addInput(audioTempFilePath) + .addInputOptions(audioConfig) + .on('start', () => { + console.log(`${roomId} 강의실 발표자 음성 파일 변환 시작`); + }) + .on('error', (err) => { + console.log(err); + }) + .on('end', async () => { + streamInfo.recordEnd = true; + await endRecording(roomId); + console.log(`${roomId} 강의실 발표자 음성 파일 변환 완료`); + }) + .save(recordFilePath); +}; + +export { runFfmpegCommand }; diff --git a/mediaServer/src/utils/media-converter.ts b/mediaServer/src/utils/media-converter.ts index 328dfbb..3d01012 100644 --- a/mediaServer/src/utils/media-converter.ts +++ b/mediaServer/src/utils/media-converter.ts @@ -1,17 +1,16 @@ import wrtc, { RTCAudioSink } from 'wrtc'; -import { PassThrough } from 'stream'; import fs from 'fs'; const { RTCAudioSink } = wrtc.nonstandard; import ffmpeg from 'fluent-ffmpeg'; import ffmpegPath from '@ffmpeg-installer/ffmpeg'; import path from 'path'; import { PeerStreamInfo } from '../models/PeerStreamInfo'; -import { FfmpegCommand } from '../models/FfmpegCommand'; import { uploadFileToObjectStorage } from './ncp-storage'; import { RETRIABLE_ERROR, SUCCEEDED } from '../constants/clova-api-response-type.constant'; import { ClovaApiReponse } from '../dto/clova-api-response.dto'; import { ClovaApiRequest } from '../dto/clova-api-request.dto'; import { AUDIO_OUTPUT_DIR } from '../constants/media-converter.constant'; +import { runFfmpegCommand } from './ffmpeg'; ffmpeg.setFfmpegPath(ffmpegPath.path); class MediaConverter { @@ -24,25 +23,19 @@ class MediaConverter { } } - get presenterStreamInfoList() { - return this._presenterStreamInfoList; - } - getPresenterStreamInfo = (roomId: string) => { return this._presenterStreamInfoList.get(roomId); }; - setSink = (tracks: MediaStream): RTCAudioSink | null => { - let audioSink = null; - tracks.getTracks().forEach((track) => { - if (track.kind === 'audio') { - audioSink = new RTCAudioSink(track); - } - }); - return audioSink; + startRecording = (roomId: string, tracks: MediaStream) => { + tracks.getTracks().forEach((track) => this.setAudioSampleDataEventListener(roomId, track)); }; - startRecording = (audioSink: RTCAudioSink, roomId: string) => { + setAudioSampleDataEventListener = (roomId: string, track: MediaStreamTrack) => { + if (track.kind !== 'audio') { + return; + } + const audioSink = new RTCAudioSink(track); if (this._presenterStreamInfoList.has(roomId)) { const presenterStreamInfo = this._presenterStreamInfoList.get(roomId) as PeerStreamInfo; presenterStreamInfo.pauseRecording(); @@ -62,30 +55,29 @@ class MediaConverter { } }; - setFfmpeg = async (roomId: string): Promise => { + endRecording = async (roomId: string): Promise => { const streamInfo = this._presenterStreamInfoList.get(roomId); if (!streamInfo) { console.log('해당 강의실 발표자가 존재하지 않습니다.'); return; } - await this.mediaStreamToFile(streamInfo.audio, streamInfo.audioTempFileName); - streamInfo.proc = new FfmpegCommand( - this.getOutputAbsolutePath(streamInfo.audioTempFileName), - this.getOutputAbsolutePath(streamInfo.recordFileName), + this.pipeMediaStreamToFile(roomId); + runFfmpegCommand( + this.getAbsoluteOutputPath(streamInfo.audioTempFileName), + this.getAbsoluteOutputPath(streamInfo.recordFileName), roomId, streamInfo, - this.endRecording + this.finalizeRecording ); }; - mediaStreamToFile = async (stream: PassThrough, fileName: string): Promise => { - const outputPath = path.join(AUDIO_OUTPUT_DIR, fileName); - const outputFile = fs.createWriteStream(outputPath); - stream.pipe(outputFile); - return outputPath; + pipeMediaStreamToFile = (roomId: string) => { + const streamInfo = this._presenterStreamInfoList.get(roomId) as PeerStreamInfo; + const outputFile = fs.createWriteStream(this.getAbsoluteOutputPath(streamInfo.audioTempFileName)); + streamInfo.audio.pipe(outputFile); }; - endRecording = async (roomId: string) => { + finalizeRecording = async (roomId: string) => { const streamInfo = this._presenterStreamInfoList.get(roomId); if (!streamInfo) { console.log('해당 강의실 발표자가 존재하지 않습니다.'); @@ -97,7 +89,7 @@ class MediaConverter { this._presenterStreamInfoList.delete(roomId); }; - getOutputAbsolutePath = (fileName: string) => { + getAbsoluteOutputPath = (fileName: string) => { return path.join(AUDIO_OUTPUT_DIR, fileName); }; @@ -136,8 +128,8 @@ class MediaConverter { extractSubtitle = async (url: any, code: string) => { const response = await fetch(process.env.CLOVA_API_URL as string, ClovaApiRequest(url, code)); - const result = await response.json() as ClovaApiReponse; - + const result = (await response.json()) as ClovaApiReponse; + if (result.result == SUCCEEDED) { console.log(`[${result.result}] 강의 자막 저장`); } From 622ae13d5bb51db3e5eadf60c0502ce72ad77997 Mon Sep 17 00:00:00 2001 From: platinouss Date: Tue, 12 Mar 2024 03:30:24 +0900 Subject: [PATCH 2/3] =?UTF-8?q?fix:=20mp3=20=ED=8C=8C=EC=9D=BC=20=EB=B3=80?= =?UTF-8?q?=ED=99=98=20=EC=8B=9C=20readStream=EA=B3=BC=20writeStream=20?= =?UTF-8?q?=EC=82=AC=EC=9A=A9=ED=95=98=EB=8F=84=EB=A1=9D=20=EC=A0=81?= =?UTF-8?q?=EC=9A=A9=20#312?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 발표자의 음성 원시 데이터 파일의 크기가 클 경우를 대비하여 node의 Stream을 사용하여 변환하도록 구성했습니다. --- mediaServer/src/utils/ffmpeg.ts | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/mediaServer/src/utils/ffmpeg.ts b/mediaServer/src/utils/ffmpeg.ts index 18db5b5..96774e5 100644 --- a/mediaServer/src/utils/ffmpeg.ts +++ b/mediaServer/src/utils/ffmpeg.ts @@ -1,6 +1,7 @@ import { PeerStreamInfo } from '../models/PeerStreamInfo'; import ffmpeg from 'fluent-ffmpeg'; import { audioConfig } from '../config/ffmpeg.config'; +import fs from 'fs'; const runFfmpegCommand = ( audioTempFilePath: string, @@ -9,9 +10,10 @@ const runFfmpegCommand = ( streamInfo: PeerStreamInfo, endRecording: (roomId: string) => void ) => { - ffmpeg() - .addInput(audioTempFilePath) + ffmpeg(fs.createReadStream(audioTempFilePath)) .addInputOptions(audioConfig) + .format('mp3') + .audioCodec('libmp3lame') .on('start', () => { console.log(`${roomId} 강의실 발표자 음성 파일 변환 시작`); }) @@ -23,7 +25,7 @@ const runFfmpegCommand = ( await endRecording(roomId); console.log(`${roomId} 강의실 발표자 음성 파일 변환 완료`); }) - .save(recordFilePath); + .pipe(fs.createWriteStream(recordFilePath), { end: true }); }; export { runFfmpegCommand }; From dea2ba2222b9610ca21c9d5f97967260f270643d Mon Sep 17 00:00:00 2001 From: platinouss Date: Wed, 13 Mar 2024 16:30:09 +0900 Subject: [PATCH 3/3] =?UTF-8?q?fix:=20=EC=98=A4=EB=94=94=EC=98=A4=20?= =?UTF-8?q?=EC=83=98=ED=94=8C=EC=9D=B4=20=EC=A0=80=EC=9E=A5=EB=90=9C=20str?= =?UTF-8?q?eam=EA=B3=BC=20writeStream=EC=9D=84=20pipeline=EC=9C=BC?= =?UTF-8?q?=EB=A1=9C=20=EC=97=B0=EA=B2=B0=EC=8B=9C=ED=82=A8=EB=8B=A4.=20#3?= =?UTF-8?q?12?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 음성 녹음 시, 강의가 종료되기 전까지 발표자의 음성 샘플 데이터를 계속 가지고 있어 메모리를 계속 점유하고 있었다. 따라서 쓰기 스트림과 연결 시켜 destroy 되도록 구성한다. --- mediaServer/src/utils/media-converter.ts | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/mediaServer/src/utils/media-converter.ts b/mediaServer/src/utils/media-converter.ts index 3d01012..581635f 100644 --- a/mediaServer/src/utils/media-converter.ts +++ b/mediaServer/src/utils/media-converter.ts @@ -11,6 +11,7 @@ import { ClovaApiReponse } from '../dto/clova-api-response.dto'; import { ClovaApiRequest } from '../dto/clova-api-request.dto'; import { AUDIO_OUTPUT_DIR } from '../constants/media-converter.constant'; import { runFfmpegCommand } from './ffmpeg'; +import { pipeline } from 'stream'; ffmpeg.setFfmpegPath(ffmpegPath.path); class MediaConverter { @@ -42,6 +43,7 @@ class MediaConverter { presenterStreamInfo.replaceAudioSink(audioSink); } else { this._presenterStreamInfoList.set(roomId, new PeerStreamInfo(audioSink, roomId)); + this.pipeMediaStreamToFile(roomId); } audioSink.ondata = ({ samples: { buffer } }) => { const stream = this._presenterStreamInfoList.get(roomId) as PeerStreamInfo; @@ -61,7 +63,6 @@ class MediaConverter { console.log('해당 강의실 발표자가 존재하지 않습니다.'); return; } - this.pipeMediaStreamToFile(roomId); runFfmpegCommand( this.getAbsoluteOutputPath(streamInfo.audioTempFileName), this.getAbsoluteOutputPath(streamInfo.recordFileName), @@ -73,8 +74,15 @@ class MediaConverter { pipeMediaStreamToFile = (roomId: string) => { const streamInfo = this._presenterStreamInfoList.get(roomId) as PeerStreamInfo; - const outputFile = fs.createWriteStream(this.getAbsoluteOutputPath(streamInfo.audioTempFileName)); - streamInfo.audio.pipe(outputFile); + pipeline( + streamInfo.audio, + fs.createWriteStream(this.getAbsoluteOutputPath(streamInfo.audioTempFileName)), + (err) => { + if (err) { + console.log(err); + } + } + ); }; finalizeRecording = async (roomId: string) => {