From 4a0cf768f1f381991982966cddab9dec16460722 Mon Sep 17 00:00:00 2001 From: shogo4405 Date: Mon, 14 Oct 2024 22:21:30 +0900 Subject: [PATCH] Miguration #1603 --- HaishinKit.xcodeproj/project.pbxproj | 8 ------ Sources/Codec/VTSessionMode.swift | 4 +-- Sources/Codec/VideoCodec.swift | 42 ++++++++++++++++------------ Sources/RTMP/RTMPStream.swift | 34 +++++++++++++--------- Sources/RTMP/RTMPTimestamp.swift | 26 ++++++++++------- Tests/RTMP/RTMPTimestampTests.swift | 28 +++++++++---------- 6 files changed, 77 insertions(+), 65 deletions(-) diff --git a/HaishinKit.xcodeproj/project.pbxproj b/HaishinKit.xcodeproj/project.pbxproj index f33273997..7f0fa497a 100644 --- a/HaishinKit.xcodeproj/project.pbxproj +++ b/HaishinKit.xcodeproj/project.pbxproj @@ -1005,7 +1005,6 @@ BC0B5B1D2BE9310800D83F8E /* CMVideoSampleBufferFactory.swift */, 295018191FFA196800358E10 /* Codec */, BC03945D2AA8AFDD006EDE38 /* Extension */, - BCCB426A2C6A2D1D003B1168 /* HKStream */, 29798E5D1CE60E5300F5CBD0 /* Info.plist */, 291C2ACF1CE9FF2B006F042B /* ISO */, BC0BF4F329866FB700D72CB4 /* Mixer */, @@ -1256,13 +1255,6 @@ path = Network; sourceTree = ""; }; - BCCB426A2C6A2D1D003B1168 /* HKStream */ = { - isa = PBXGroup; - children = ( - ); - path = HKStream; - sourceTree = ""; - }; BCCC45972AA289FA0016EFE8 /* SRTHaishinKit */ = { isa = PBXGroup; children = ( diff --git a/Sources/Codec/VTSessionMode.swift b/Sources/Codec/VTSessionMode.swift index 9d6cef693..eb747adf7 100644 --- a/Sources/Codec/VTSessionMode.swift +++ b/Sources/Codec/VTSessionMode.swift @@ -15,7 +15,7 @@ enum VTSessionMode { height: Int32(videoCodec.settings.videoSize.height), codecType: videoCodec.settings.format.codecType, encoderSpecification: nil, - imageBufferAttributes: videoCodec.imageBufferAttributes(.compression) as CFDictionary?, + imageBufferAttributes: videoCodec.makeImageBufferAttributes(.compression) as CFDictionary?, compressedDataAllocator: nil, outputCallback: nil, refcon: nil, @@ -43,7 +43,7 @@ enum VTSessionMode { allocator: kCFAllocatorDefault, formatDescription: formatDescription, decoderSpecification: nil, - imageBufferAttributes: videoCodec.imageBufferAttributes(.decompression) as CFDictionary?, + imageBufferAttributes: videoCodec.makeImageBufferAttributes(.decompression) as CFDictionary?, outputCallback: nil, decompressionSessionOut: &session ) diff --git a/Sources/Codec/VideoCodec.swift b/Sources/Codec/VideoCodec.swift index 863bc5720..727e6e010 100644 --- a/Sources/Codec/VideoCodec.swift +++ b/Sources/Codec/VideoCodec.swift @@ -25,9 +25,17 @@ final class VideoCodec { } var needsSync = true var passthrough = true + var outputStream: AsyncStream { + AsyncStream { continuation in + self.continuation = continuation + } + } var frameInterval = VideoCodec.frameInterval var expectedFrameRate = MediaMixer.defaultFrameRate - /// The running value indicating whether the VideoCodec is running. + private var startedAt: CMTime = .zero + private var continuation: AsyncStream.Continuation? + private var isInvalidateSession = true + private var presentationTimeStamp: CMTime = .zero private(set) var isRunning = false private(set) var inputFormat: CMFormatDescription? { didSet { @@ -45,15 +53,6 @@ final class VideoCodec { } } private(set) var outputFormat: CMFormatDescription? - var outputStream: AsyncStream { - AsyncStream { continuation in - self.continuation = continuation - } - } - private var startedAt: CMTime = .zero - private var continuation: AsyncStream.Continuation? - private var isInvalidateSession = true - private var presentationTimeStamp: CMTime = .invalid func append(_ sampleBuffer: CMSampleBuffer) { guard isRunning else { @@ -68,18 +67,22 @@ final class VideoCodec { session = try VTSessionMode.compression.makeSession(self) } } - guard let session else { + guard let session, let continuation else { return } - if let continuation { + if sampleBuffer.formatDescription?.isCompressed == true { try session.convert(sampleBuffer, continuation: continuation) + } else { + if useFrame(sampleBuffer.presentationTimeStamp) { + try session.convert(sampleBuffer, continuation: continuation) + } } } catch { logger.error(error) } } - func imageBufferAttributes(_ mode: VTSessionMode) -> [NSString: AnyObject]? { + func makeImageBufferAttributes(_ mode: VTSessionMode) -> [NSString: AnyObject]? { switch mode { case .compression: var attributes: [NSString: AnyObject] = [:] @@ -96,14 +99,17 @@ final class VideoCodec { } } - private func willDropFrame(_ presentationTimeStamp: CMTime) -> Bool { + private func useFrame(_ presentationTimeStamp: CMTime) -> Bool { guard startedAt <= presentationTimeStamp else { - return true + return false } - guard Self.frameInterval < frameInterval else { + guard self.presentationTimeStamp < presentationTimeStamp else { return false } - return presentationTimeStamp.seconds - self.presentationTimeStamp.seconds <= frameInterval + guard Self.frameInterval < frameInterval else { + return true + } + return frameInterval <= presentationTimeStamp.seconds - self.presentationTimeStamp.seconds } #if os(iOS) || os(tvOS) || os(visionOS) @@ -164,7 +170,7 @@ extension VideoCodec: Runner { needsSync = true inputFormat = nil outputFormat = nil - presentationTimeStamp = .invalid + presentationTimeStamp = .zero continuation?.finish() startedAt = .zero #if os(iOS) || os(tvOS) || os(visionOS) diff --git a/Sources/RTMP/RTMPStream.swift b/Sources/RTMP/RTMPStream.swift index 2edc05853..750dae2d7 100644 --- a/Sources/RTMP/RTMPStream.swift +++ b/Sources/RTMP/RTMPStream.swift @@ -739,15 +739,19 @@ extension RTMPStream: HKStream { switch sampleBuffer.formatDescription?.mediaType { case .video: if sampleBuffer.formatDescription?.isCompressed == true { - let decodeTimeStamp = sampleBuffer.decodeTimeStamp.isValid ? sampleBuffer.decodeTimeStamp : sampleBuffer.presentationTimeStamp - let compositionTime = videoTimestamp.getCompositionTime(sampleBuffer) - let timedelta = videoTimestamp.update(decodeTimeStamp) - frameCount += 1 - videoFormat = sampleBuffer.formatDescription - guard let message = RTMPVideoMessage(streamId: id, timestamp: timedelta, compositionTime: compositionTime, sampleBuffer: sampleBuffer) else { - return + do { + let decodeTimeStamp = sampleBuffer.decodeTimeStamp.isValid ? sampleBuffer.decodeTimeStamp : sampleBuffer.presentationTimeStamp + let compositionTime = videoTimestamp.getCompositionTime(sampleBuffer) + let timedelta = try videoTimestamp.update(decodeTimeStamp) + frameCount += 1 + videoFormat = sampleBuffer.formatDescription + guard let message = RTMPVideoMessage(streamId: id, timestamp: timedelta, compositionTime: compositionTime, sampleBuffer: sampleBuffer) else { + return + } + doOutput(.one, chunkStreamId: .video, message: message) + } catch { + logger.warn(error) } - doOutput(.one, chunkStreamId: .video, message: message) } else { outgoing.append(sampleBuffer) if sampleBuffer.formatDescription?.isCompressed == false { @@ -775,12 +779,16 @@ extension RTMPStream: HKStream { public func append(_ audioBuffer: AVAudioBuffer, when: AVAudioTime) { switch audioBuffer { case let audioBuffer as AVAudioCompressedBuffer: - let timedelta = audioTimestamp.update(when) - audioFormat = audioBuffer.format - guard let message = RTMPAudioMessage(streamId: id, timestamp: timedelta, audioBuffer: audioBuffer) else { - return + do { + let timedelta = try audioTimestamp.update(when) + audioFormat = audioBuffer.format + guard let message = RTMPAudioMessage(streamId: id, timestamp: timedelta, audioBuffer: audioBuffer) else { + return + } + doOutput(.one, chunkStreamId: .audio, message: message) + } catch { + logger.warn(error) } - doOutput(.one, chunkStreamId: .audio, message: message) default: outgoing.append(audioBuffer, when: when) if audioBuffer is AVAudioPCMBuffer && audioSampleAccess { diff --git a/Sources/RTMP/RTMPTimestamp.swift b/Sources/RTMP/RTMPTimestamp.swift index f30b44aeb..6ac648e8e 100644 --- a/Sources/RTMP/RTMPTimestamp.swift +++ b/Sources/RTMP/RTMPTimestamp.swift @@ -10,25 +10,31 @@ private let kRTMPTimestamp_defaultTimeInterval: TimeInterval = 0 private let kRTMPTimestamp_compositiionTimeOffset = CMTime(value: 3, timescale: 30) struct RTMPTimestamp { + enum Error: Swift.Error { + case invalidSequence + } + private var startedAt = kRTMPTimestamp_defaultTimeInterval private var updatedAt = kRTMPTimestamp_defaultTimeInterval private var timedeltaFraction: TimeInterval = kRTMPTimestamp_defaultTimeInterval - mutating func update(_ value: T) -> UInt32 { + mutating func update(_ value: T) throws -> UInt32 { + guard updatedAt < value.seconds else { + throw Error.invalidSequence + } if startedAt == 0 { startedAt = value.seconds updatedAt = value.seconds return 0 - } else { - var timedelta = (value.seconds - updatedAt) * 1000 - timedeltaFraction += timedelta.truncatingRemainder(dividingBy: 1) - if 1 <= timedeltaFraction { - timedeltaFraction -= 1 - timedelta += 1 - } - updatedAt = value.seconds - return 0.0 <= timedelta ? UInt32(timedelta) : UInt32.min } + var timedelta = (value.seconds - updatedAt) * 1000 + timedeltaFraction += timedelta.truncatingRemainder(dividingBy: 1) + if 1 <= timedeltaFraction { + timedeltaFraction -= 1 + timedelta += 1 + } + updatedAt = value.seconds + return UInt32(timedelta) } mutating func update(_ message: some RTMPMessage, chunkType: RTMPChunkType) { diff --git a/Tests/RTMP/RTMPTimestampTests.swift b/Tests/RTMP/RTMPTimestampTests.swift index a5f019ce2..570f1d2c0 100644 --- a/Tests/RTMP/RTMPTimestampTests.swift +++ b/Tests/RTMP/RTMPTimestampTests.swift @@ -4,7 +4,7 @@ import AVFoundation @testable import HaishinKit @Suite struct RTMPTimestampTests { - @Test func cMTime() { + @Test func updateCMTime() { let times: [CMTime] = [ CMTime(value: 286340171565869, timescale: 1000000000), CMTime(value: 286340204889958, timescale: 1000000000), @@ -15,15 +15,15 @@ import AVFoundation CMTime(value: 286340338232723, timescale: 1000000000), ] var timestamp = RTMPTimestamp() - #expect(0 == timestamp.update(times[0])) - #expect(33 == timestamp.update(times[1])) - #expect(33 == timestamp.update(times[2])) - #expect(33 == timestamp.update(times[3])) - #expect(34 == timestamp.update(times[4])) - #expect(33 == timestamp.update(times[5])) + #expect(try! timestamp.update(times[0]) == 0) + #expect(try! timestamp.update(times[1]) == 33) + #expect(try! timestamp.update(times[2]) == 33) + #expect(try! timestamp.update(times[3]) == 33) + #expect(try! timestamp.update(times[4]) == 34) + #expect(try! timestamp.update(times[5]) == 33) } - @Test func aVAudioTime() { + @Test func updateAVAudioTime() { let times: [AVAudioTime] = [ .init(hostTime: 6901294874500, sampleTime: 13802589749, atRate: 48000), .init(hostTime: 6901295386500, sampleTime: 13802590773, atRate: 48000), @@ -33,11 +33,11 @@ import AVFoundation .init(hostTime: 6901297434500, sampleTime: 13802594869, atRate: 48000), ] var timestamp = RTMPTimestamp() - #expect(0 == timestamp.update(times[0])) - #expect(21 == timestamp.update(times[1])) - #expect(21 == timestamp.update(times[2])) - #expect(22 == timestamp.update(times[3])) - #expect(21 == timestamp.update(times[4])) - #expect(21 == timestamp.update(times[5])) + #expect(try! timestamp.update(times[0]) == 0) + #expect(try! timestamp.update(times[1]) == 21) + #expect(try! timestamp.update(times[2]) == 21) + #expect(try! timestamp.update(times[3]) == 22) + #expect(try! timestamp.update(times[4]) == 21) + #expect(try! timestamp.update(times[5]) == 21) } }