Skip to content

Commit

Permalink
RTMP server audio continued.
Browse files Browse the repository at this point in the history
  • Loading branch information
eerimoq committed Apr 16, 2024
1 parent b29290a commit 4197df1
Show file tree
Hide file tree
Showing 5 changed files with 120 additions and 20 deletions.
6 changes: 5 additions & 1 deletion Moblin/RtmpServer/RtmpServer.swift
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import AVFAudio
import CoreMedia
import Foundation
import HaishinKit
Expand All @@ -21,6 +22,7 @@ class RtmpServer {
var onPublishStart: (String) -> Void
var onPublishStop: (String) -> Void
var onFrame: (String, CMSampleBuffer) -> Void
var onAudioBuffer: (String, AVAudioPCMBuffer) -> Void
var settings: SettingsRtmpServer
private var periodicTimer: DispatchSourceTimer?
var totalBytesReceived: UInt64 = 0
Expand All @@ -29,12 +31,14 @@ class RtmpServer {
init(settings: SettingsRtmpServer,
onPublishStart: @escaping (String) -> Void,
onPublishStop: @escaping (String) -> Void,
onFrame: @escaping (String, CMSampleBuffer) -> Void)
onFrame: @escaping (String, CMSampleBuffer) -> Void,
onAudioBuffer: @escaping (String, AVAudioPCMBuffer) -> Void)
{
self.settings = settings
self.onPublishStart = onPublishStart
self.onPublishStop = onPublishStop
self.onFrame = onFrame
self.onAudioBuffer = onAudioBuffer
clients = []
}

Expand Down
114 changes: 96 additions & 18 deletions Moblin/RtmpServer/RtmpServerChunkStream.swift
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,12 @@ class RtmpServerChunkStream: VideoCodecDelegate {
private var videoTimestampZero: Double
private var videoTimestamp: Double
private var formatDescription: CMVideoFormatDescription?
private var videoCodec: VideoCodec?
private var videoDecoder: VideoCodec?
private var numberOfFrames: UInt64 = 0
private var videoCodecLockQueue = DispatchQueue(label: "com.eerimoq.Moblin.VideoCodec")
private var audioBuffer: AVAudioCompressedBuffer?
private var audioDecoder: AVAudioConverter?
private var pcmAudioFormat: AVAudioFormat?

init(client: RtmpServerClient, streamId: UInt16) {
self.client = client
Expand All @@ -49,8 +52,8 @@ class RtmpServerChunkStream: VideoCodecDelegate {
}

func stop() {
videoCodec?.stopRunning()
videoCodec = nil
videoDecoder?.stopRunning()
videoDecoder = nil
client = nil
}

Expand Down Expand Up @@ -324,20 +327,95 @@ class RtmpServerChunkStream: VideoCodecDelegate {
PTS: \(timing.presentationTimeStamp.seconds), \
DTS: \(timing.decodeTimeStamp.seconds)
""") */
switch messageData[1] {
case FLVAACPacketType.seq.rawValue:
if let config =
AudioSpecificConfig(bytes: [UInt8](messageData[codec.headerSize ..< messageData.count]))
{
logger.info("rtmp-server: client: \(config.audioStreamBasicDescription())")
}
case FLVAACPacketType.raw.rawValue:
break
switch FLVAACPacketType(rawValue: messageData[1]) {
case .seq:
processMessageAudioTypeSeq(client: client, codec: codec)
case .raw:
processMessageAudioTypeRaw(client: client, codec: codec)
default:
break
}
}

private func processMessageAudioTypeSeq(client _: RtmpServerClient, codec: FLVAudioCodec) {
if let config =
AudioSpecificConfig(bytes: [UInt8](messageData[codec.headerSize ..< messageData.count]))
{
var streamDescription = config.audioStreamBasicDescription()
logger.info("rtmp-server: client: \(streamDescription)")
if let audioFormat = AVAudioFormat(streamDescription: &streamDescription) {
logger.info("rtmp-server: client: \(audioFormat)")
audioBuffer = AVAudioCompressedBuffer(
format: audioFormat,
packetCapacity: 1,
maximumPacketSize: 1024 * Int(audioFormat.channelCount)
)
pcmAudioFormat = AVAudioFormat(
commonFormat: .pcmFormatInt16,
sampleRate: audioFormat.sampleRate,
channels: audioFormat.channelCount,
interleaved: audioFormat.isInterleaved
)
guard let pcmAudioFormat else {
logger.info("rtmp-server: client: Failed to create PCM audio format")
return
}
audioDecoder = AVAudioConverter(from: audioFormat, to: pcmAudioFormat)
guard let audioDecoder else {
logger.info("rtmp-server: client: Failed to create audio decdoer")
return
}
} else {
logger.info("rtmp-server: client: Failed to create audio format")
audioBuffer = nil
audioDecoder = nil
}
}
}

private func processMessageAudioTypeRaw(client _: RtmpServerClient, codec: FLVAudioCodec) {
guard let audioBuffer else {
return
}
let length = messageData.count - codec.headerSize
messageData.withUnsafeBytes { (buffer: UnsafeRawBufferPointer) in
guard let baseAddress = buffer.baseAddress else {
return
}
audioBuffer.packetDescriptions?.pointee = AudioStreamPacketDescription(
mStartOffset: 0,
mVariableFramesInPacket: 0,
mDataByteSize: UInt32(length)
)
audioBuffer.packetCount = 1
audioBuffer.byteLength = UInt32(length)
audioBuffer.data.copyMemory(from: baseAddress.advanced(by: codec.headerSize), byteCount: length)
}
guard let audioDecoder, let pcmAudioFormat else {
return
}
var error: NSError?
guard let outputBuffer = AVAudioPCMBuffer(pcmFormat: pcmAudioFormat, frameCapacity: 1024) else {
return
}
let outputStatus = audioDecoder.convert(to: outputBuffer, error: &error) { _, inputStatus in
inputStatus.pointee = .haveData
return self.audioBuffer
}
switch outputStatus {
case .haveData:
client?.handleAudioBuffer(audioBuffer: outputBuffer)
case .error:
if let error {
logger.info("rtmp-server: client: Error \(error)")
} else {
logger.info("rtmp-server: client: Unknown error")
}
default:
logger.info("rtmp-server: client: Output status \(outputStatus.rawValue)")
}
}

private func processMessageVideo() {
guard let client else {
return
Expand Down Expand Up @@ -381,17 +459,17 @@ class RtmpServerChunkStream: VideoCodecDelegate {
)
return
}
guard videoCodec == nil else {
guard videoDecoder == nil else {
return
}
var config = AVCDecoderConfigurationRecord()
config.data = messageData.subdata(in: FLVTagType.video.headerSize ..< messageData.count)
let status = config.makeFormatDescription(&formatDescription)
if status == noErr {
videoCodec = VideoCodec(lockQueue: videoCodecLockQueue)
videoCodec!.formatDescription = formatDescription
videoCodec!.delegate = self
videoCodec!.startRunning()
videoDecoder = VideoCodec(lockQueue: videoCodecLockQueue)
videoDecoder!.formatDescription = formatDescription
videoDecoder!.delegate = self
videoDecoder!.startRunning()
} else {
client.stopInternal(reason: "Format description error \(status)")
}
Expand All @@ -403,7 +481,7 @@ class RtmpServerChunkStream: VideoCodecDelegate {
return
}
if let sampleBuffer = makeSampleBuffer(client: client) {
videoCodec?.appendSampleBuffer(sampleBuffer)
videoDecoder?.appendSampleBuffer(sampleBuffer)
} else {
client.stopInternal(reason: "Make sample buffer failed")
}
Expand Down
5 changes: 5 additions & 0 deletions Moblin/RtmpServer/RtmpServerClient.swift
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import AVFAudio
import CoreMedia
import Foundation
import HaishinKit
Expand Down Expand Up @@ -88,6 +89,10 @@ class RtmpServerClient {
server?.onFrame(streamKey, sampleBuffer)
}

func handleAudioBuffer(audioBuffer: AVAudioPCMBuffer) {
server?.onAudioBuffer(streamKey, audioBuffer)
}

private func handleData(data: Data) {
switch state {
case .uninitialized:
Expand Down
5 changes: 5 additions & 0 deletions Moblin/Various/Media.swift
Original file line number Diff line number Diff line change
Expand Up @@ -516,6 +516,11 @@ final class Media: NSObject {
netStream.addReplaceVideoSampleBuffer(id: cameraId, sampleBuffer)
}

func addRtmpAudioBuffer(cameraId _: UUID, audioBuffer _: AVAudioPCMBuffer) {
// logger.info("RTMP audio buffer \(audioBuffer)")
// netStream.addReplaceVideoSampleBuffer(id: cameraId, sampleBuffer)
}

func addRtmpCamera(cameraId: UUID, latency: Double) {
netStream.addReplaceVideo(cameraId: cameraId, latency: latency)
}
Expand Down
10 changes: 9 additions & 1 deletion Moblin/Various/Model.swift
Original file line number Diff line number Diff line change
Expand Up @@ -1072,7 +1072,8 @@ final class Model: NSObject, ObservableObject {
rtmpServer = RtmpServer(settings: database.rtmpServer!.clone(),
onPublishStart: handleRtmpServerPublishStart,
onPublishStop: handleRtmpServerPublishStop,
onFrame: handleRtmpServerFrame)
onFrame: handleRtmpServerFrame,
onAudioBuffer: handleRtmpServerAudioBuffer)
rtmpServer!.start()
}
}
Expand Down Expand Up @@ -1106,6 +1107,13 @@ final class Model: NSObject, ObservableObject {
media.addRtmpSampleBuffer(cameraId: cameraId, sampleBuffer: sampleBuffer)
}

func handleRtmpServerAudioBuffer(streamKey: String, audioBuffer: AVAudioPCMBuffer) {
guard let cameraId = getRtmpStream(streamKey: streamKey)?.id else {
return
}
media.addRtmpAudioBuffer(cameraId: cameraId, audioBuffer: audioBuffer)
}

private func listCameras(position: AVCaptureDevice.Position) -> [Camera] {
var deviceTypes: [AVCaptureDevice.DeviceType] = [
.builtInTripleCamera,
Expand Down

0 comments on commit 4197df1

Please sign in to comment.