Skip to content

Commit

Permalink
Merge pull request #313 from platinouss/fix/240307-audio-record
Browse files Browse the repository at this point in the history
Fix(#312): λ°œν‘œμžμ˜ μŒμ„± λ…ΉμŒ μ’…λ£Œ μ‹œκΉŒμ§€ λ―Έλ””μ–΄ μ„œλ²„μ˜ λ©”λͺ¨λ¦¬ 점유 이슈 ν•΄κ²°
  • Loading branch information
tmddus2 authored Mar 13, 2024
2 parents cf6d307 + dea2ba2 commit ea602f6
Show file tree
Hide file tree
Showing 6 changed files with 80 additions and 89 deletions.
35 changes: 0 additions & 35 deletions mediaServer/src/models/FfmpegCommand.ts

This file was deleted.

7 changes: 0 additions & 7 deletions mediaServer/src/models/PeerStreamInfo.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import { PassThrough } from 'stream';
import { FfmpegCommand } from './FfmpegCommand';
import { RTCAudioSink } from 'wrtc';

interface MediaFileNameList {
Expand All @@ -12,14 +11,12 @@ export class PeerStreamInfo {
private readonly _audio: PassThrough;
private _audioSink: RTCAudioSink;
private _recordEnd: boolean;
private _proc: FfmpegCommand | null;

constructor(audioSink: RTCAudioSink, roomId: string) {
this._audioSink = audioSink;
this._mediaFileNameList = this.setFileName(roomId);
this._audio = new PassThrough();
this._recordEnd = false;
this._proc = null;
}

get audioTempFileName(): string {
Expand All @@ -38,10 +35,6 @@ export class PeerStreamInfo {
this._recordEnd = isRecordEnd;
}

set proc(FfmpegCommand: FfmpegCommand) {
this._proc = FfmpegCommand;
}

setFileName = (roomId: string): MediaFileNameList => {
return {
audioTempFile: `audio-${roomId}.sock`,
Expand Down
2 changes: 1 addition & 1 deletion mediaServer/src/services/presenter.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ const editWhiteboard = async (roomId: string, content: ICanvasData) => {

const endLecture = async (roomId: string, email: string) => {
sendDataToClient('/lecture', roomId, 'ended', new Message(MessageType.LECTURE, 'finish'));
mediaConverter.setFfmpeg(roomId);
mediaConverter.endRecording(roomId);
relayServer.deleteRoom(email, roomId);
await Promise.all([deleteRoomInfoById(roomId), deleteQuestionStream(roomId)]);
};
Expand Down
34 changes: 18 additions & 16 deletions mediaServer/src/services/webrtc-connection.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,27 +5,18 @@ import { mediaConverter } from '../utils/media-converter';
import { ServerAnswerDto } from '../dto/server-answer.dto';
import { setPresenterMediaStream } from './participant.service';
import { sendDataToClient } from './socket.service';
import { RoomConnectionInfo } from '../models/RoomConnectionInfo';

const setTrackEvent = (RTCPC: RTCPeerConnection, roomId: string) => {
RTCPC.ontrack = (event) => {
const roomInfo = relayServer.roomConnectionInfoList.get(roomId);
if (roomInfo) {
roomInfo.stream = event.streams[0];
roomInfo.participantIdList.forEach((participantId: string) => {
const participantConnectionInfo = relayServer.clientConnectionInfoList.get(participantId);
if (participantConnectionInfo) {
event.streams[0].getTracks().forEach(async (track: MediaStreamTrack) => {
await participantConnectionInfo.RTCPC.getSenders()[0].replaceTrack(track);
});
}
});
const presenterAudioSink = mediaConverter.setSink(event.streams[0]);
if (presenterAudioSink === null) {
console.log('λ°œν‘œμžμ˜ audio-sinkκ°€ μ‘΄μž¬ν•˜μ§€ μ•ŠμŠ΅λ‹ˆλ‹€.');
return;
}
mediaConverter.startRecording(presenterAudioSink, roomId);
if (!roomInfo) {
console.log('κ°•μ˜μ‹€μ΄ μ‘΄μž¬ν•˜μ§€ μ•ŠμŠ΅λ‹ˆλ‹€.');
return;
}
roomInfo.stream = event.streams[0];
setPresenterAudioTrack(roomInfo, roomInfo.stream);
mediaConverter.startRecording(roomId, roomInfo.stream);
};
};

Expand Down Expand Up @@ -85,4 +76,15 @@ const setParticipantWebRTCConnection = async (
RTCPC.setLocalDescription(answer);
};

const setPresenterAudioTrack = (roomInfo: RoomConnectionInfo, presenterMediaStream: MediaStream) => {
roomInfo.participantIdList.forEach((participantId: string) => {
const participantConnectionInfo = relayServer.clientConnectionInfoList.get(participantId);
if (participantConnectionInfo) {
presenterMediaStream.getTracks().forEach(async (track: MediaStreamTrack) => {
await participantConnectionInfo.RTCPC.getSenders()[0].replaceTrack(track);
});
}
});
};

export { setTrackEvent, exchangeCandidate, setPresenterWebRTCConnection, setParticipantWebRTCConnection };
31 changes: 31 additions & 0 deletions mediaServer/src/utils/ffmpeg.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
import { PeerStreamInfo } from '../models/PeerStreamInfo';
import ffmpeg from 'fluent-ffmpeg';
import { audioConfig } from '../config/ffmpeg.config';
import fs from 'fs';

const runFfmpegCommand = (
audioTempFilePath: string,
recordFilePath: string,
roomId: string,
streamInfo: PeerStreamInfo,
endRecording: (roomId: string) => void
) => {
ffmpeg(fs.createReadStream(audioTempFilePath))
.addInputOptions(audioConfig)
.format('mp3')
.audioCodec('libmp3lame')
.on('start', () => {
console.log(`${roomId} κ°•μ˜μ‹€ λ°œν‘œμž μŒμ„± 파일 λ³€ν™˜ μ‹œμž‘`);
})
.on('error', (err) => {
console.log(err);
})
.on('end', async () => {
streamInfo.recordEnd = true;
await endRecording(roomId);
console.log(`${roomId} κ°•μ˜μ‹€ λ°œν‘œμž μŒμ„± 파일 λ³€ν™˜ μ™„λ£Œ`);
})
.pipe(fs.createWriteStream(recordFilePath), { end: true });
};

export { runFfmpegCommand };
60 changes: 30 additions & 30 deletions mediaServer/src/utils/media-converter.ts
Original file line number Diff line number Diff line change
@@ -1,17 +1,17 @@
import wrtc, { RTCAudioSink } from 'wrtc';
import { PassThrough } from 'stream';
import fs from 'fs';
const { RTCAudioSink } = wrtc.nonstandard;
import ffmpeg from 'fluent-ffmpeg';
import ffmpegPath from '@ffmpeg-installer/ffmpeg';
import path from 'path';
import { PeerStreamInfo } from '../models/PeerStreamInfo';
import { FfmpegCommand } from '../models/FfmpegCommand';
import { uploadFileToObjectStorage } from './ncp-storage';
import { RETRIABLE_ERROR, SUCCEEDED } from '../constants/clova-api-response-type.constant';
import { ClovaApiReponse } from '../dto/clova-api-response.dto';
import { ClovaApiRequest } from '../dto/clova-api-request.dto';
import { AUDIO_OUTPUT_DIR } from '../constants/media-converter.constant';
import { runFfmpegCommand } from './ffmpeg';
import { pipeline } from 'stream';
ffmpeg.setFfmpegPath(ffmpegPath.path);

class MediaConverter {
Expand All @@ -24,31 +24,26 @@ class MediaConverter {
}
}

get presenterStreamInfoList() {
return this._presenterStreamInfoList;
}

getPresenterStreamInfo = (roomId: string) => {
return this._presenterStreamInfoList.get(roomId);
};

setSink = (tracks: MediaStream): RTCAudioSink | null => {
let audioSink = null;
tracks.getTracks().forEach((track) => {
if (track.kind === 'audio') {
audioSink = new RTCAudioSink(track);
}
});
return audioSink;
startRecording = (roomId: string, tracks: MediaStream) => {
tracks.getTracks().forEach((track) => this.setAudioSampleDataEventListener(roomId, track));
};

startRecording = (audioSink: RTCAudioSink, roomId: string) => {
setAudioSampleDataEventListener = (roomId: string, track: MediaStreamTrack) => {
if (track.kind !== 'audio') {
return;
}
const audioSink = new RTCAudioSink(track);
if (this._presenterStreamInfoList.has(roomId)) {
const presenterStreamInfo = this._presenterStreamInfoList.get(roomId) as PeerStreamInfo;
presenterStreamInfo.pauseRecording();
presenterStreamInfo.replaceAudioSink(audioSink);
} else {
this._presenterStreamInfoList.set(roomId, new PeerStreamInfo(audioSink, roomId));
this.pipeMediaStreamToFile(roomId);
}
audioSink.ondata = ({ samples: { buffer } }) => {
const stream = this._presenterStreamInfoList.get(roomId) as PeerStreamInfo;
Expand All @@ -62,30 +57,35 @@ class MediaConverter {
}
};

setFfmpeg = async (roomId: string): Promise<void> => {
endRecording = async (roomId: string): Promise<void> => {
const streamInfo = this._presenterStreamInfoList.get(roomId);
if (!streamInfo) {
console.log('ν•΄λ‹Ή κ°•μ˜μ‹€ λ°œν‘œμžκ°€ μ‘΄μž¬ν•˜μ§€ μ•ŠμŠ΅λ‹ˆλ‹€.');
return;
}
await this.mediaStreamToFile(streamInfo.audio, streamInfo.audioTempFileName);
streamInfo.proc = new FfmpegCommand(
this.getOutputAbsolutePath(streamInfo.audioTempFileName),
this.getOutputAbsolutePath(streamInfo.recordFileName),
runFfmpegCommand(
this.getAbsoluteOutputPath(streamInfo.audioTempFileName),
this.getAbsoluteOutputPath(streamInfo.recordFileName),
roomId,
streamInfo,
this.endRecording
this.finalizeRecording
);
};

mediaStreamToFile = async (stream: PassThrough, fileName: string): Promise<string> => {
const outputPath = path.join(AUDIO_OUTPUT_DIR, fileName);
const outputFile = fs.createWriteStream(outputPath);
stream.pipe(outputFile);
return outputPath;
pipeMediaStreamToFile = (roomId: string) => {
const streamInfo = this._presenterStreamInfoList.get(roomId) as PeerStreamInfo;
pipeline(
streamInfo.audio,
fs.createWriteStream(this.getAbsoluteOutputPath(streamInfo.audioTempFileName)),
(err) => {
if (err) {
console.log(err);
}
}
);
};

endRecording = async (roomId: string) => {
finalizeRecording = async (roomId: string) => {
const streamInfo = this._presenterStreamInfoList.get(roomId);
if (!streamInfo) {
console.log('ν•΄λ‹Ή κ°•μ˜μ‹€ λ°œν‘œμžκ°€ μ‘΄μž¬ν•˜μ§€ μ•ŠμŠ΅λ‹ˆλ‹€.');
Expand All @@ -97,7 +97,7 @@ class MediaConverter {
this._presenterStreamInfoList.delete(roomId);
};

getOutputAbsolutePath = (fileName: string) => {
getAbsoluteOutputPath = (fileName: string) => {
return path.join(AUDIO_OUTPUT_DIR, fileName);
};

Expand Down Expand Up @@ -136,8 +136,8 @@ class MediaConverter {

extractSubtitle = async (url: any, code: string) => {
const response = await fetch(process.env.CLOVA_API_URL as string, ClovaApiRequest(url, code));
const result = await response.json() as ClovaApiReponse;
const result = (await response.json()) as ClovaApiReponse;

if (result.result == SUCCEEDED) {
console.log(`[${result.result}] κ°•μ˜ μžλ§‰ μ €μž₯`);
}
Expand Down

0 comments on commit ea602f6

Please sign in to comment.