Skip to content

Commit

Permalink
actor IOMixer and Screen.
Browse files Browse the repository at this point in the history
  • Loading branch information
shogo4405 committed Aug 7, 2024
1 parent 39ee8e3 commit 5a36e75
Show file tree
Hide file tree
Showing 26 changed files with 557 additions and 640 deletions.
153 changes: 87 additions & 66 deletions Examples/iOS/IngestViewController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -18,59 +18,60 @@ final class IngestViewController: UIViewController {
@IBOutlet private weak var audioDevicePicker: UIPickerView!
@IBOutlet private weak var audioMonoStereoSegmentCOntrol: UISegmentedControl!

private var currentEffect: VideoEffect?
@ScreenActor
private var currentEffect: (any VideoEffect)?
private var currentPosition: AVCaptureDevice.Position = .back
private var retryCount: Int = 0
private var preferedStereo = false
private let netStreamSwitcher: NetStreamSwitcher = .init()
private var mixer = IOMixer()
private lazy var mixer = IOMixer()
private lazy var audioCapture: AudioCapture = {
let audioCapture = AudioCapture()
audioCapture.delegate = self
return audioCapture
}()
@ScreenActor
private var videoScreenObject = VideoTrackScreenObject()

override func viewDidLoad() {
super.viewDidLoad()

Task {
// If you want to use the multi-camera feature, please make sure stream.isMultiCamSessionEnabled = true. Before attachCamera or attachAudio.
// mixer.isMultiCamSessionEnabled = true
if let orientation = DeviceUtil.videoOrientation(by: UIApplication.shared.statusBarOrientation) {
await mixer.setVideoOrientation(orientation)
}
await mixer.setMonitoringEnabled(DeviceUtil.isHeadphoneConnected())
var videoMixerSettings = await mixer.videoMixerSettings
videoMixerSettings.mode = .offscreen
await mixer.setVideoMixerSettings(videoMixerSettings)
await netStreamSwitcher.setPreference(Preference.default)
if let stream = await netStreamSwitcher.stream {
mixer.addStream(stream)
await mixer.addStream(stream)
if let view = view as? (any IOStreamObserver) {
await stream.addObserver(view)
}
}
}

mixer.screen.size = .init(width: 720, height: 1280)
mixer.screen.backgroundColor = UIColor.white.cgColor

videoScreenObject.cornerRadius = 16.0
videoScreenObject.track = 1
videoScreenObject.horizontalAlignment = .right
videoScreenObject.layoutMargin = .init(top: 16, left: 0, bottom: 0, right: 16)
videoScreenObject.size = .init(width: 160 * 2, height: 90 * 2)
try? mixer.screen.addChild(videoScreenObject)

// If you want to use the multi-camera feature, please make sure stream.isMultiCamSessionEnabled = true. Before attachCamera or attachAudio.
mixer.isMultiCamSessionEnabled = true
if let orientation = DeviceUtil.videoOrientation(by: UIApplication.shared.statusBarOrientation) {
mixer.videoOrientation = orientation
Task { @ScreenActor in
videoScreenObject.cornerRadius = 16.0
videoScreenObject.track = 1
videoScreenObject.horizontalAlignment = .right
videoScreenObject.layoutMargin = .init(top: 16, left: 0, bottom: 0, right: 16)
videoScreenObject.size = .init(width: 160 * 2, height: 90 * 2)
await mixer.screen.size = .init(width: 720, height: 1280)
await mixer.screen.backgroundColor = UIColor.white.cgColor
try? await mixer.screen.addChild(videoScreenObject)
}
mixer.isMonitoringEnabled = DeviceUtil.isHeadphoneConnected()

videoBitrateSlider?.value = Float(VideoCodecSettings.default.bitRate) / 1000
audioBitrateSlider?.value = Float(AudioCodecSettings.default.bitRate) / 1000
}

override func viewWillAppear(_ animated: Bool) {
logger.info("viewWillAppear")
super.viewWillAppear(animated)

mixer.videoMixerSettings.mode = .offscreen
mixer.screen.startRunning()

Task {
let back = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: currentPosition)
try? await mixer.attachCamera(back, track: 0)
Expand All @@ -92,43 +93,55 @@ final class IngestViewController: UIViewController {
try? await mixer.attachAudio(nil)
try? await mixer.attachCamera(nil, track: 0)
try? await mixer.attachCamera(nil, track: 1)
mixer.screen.stopRunning()
}
// swiftlint:disable:next notification_center_detachment
NotificationCenter.default.removeObserver(self)
}

override func viewWillTransition(to size: CGSize, with coordinator: any UIViewControllerTransitionCoordinator) {
if UIDevice.current.orientation.isLandscape {
mixer.screen.size = .init(width: 1280, height: 720)
} else {
mixer.screen.size = .init(width: 720, height: 1280)
Task { @ScreenActor in
if await UIDevice.current.orientation.isLandscape {
await mixer.screen.size = .init(width: 1280, height: 720)
} else {
await mixer.screen.size = .init(width: 720, height: 1280)
}
}
}

@IBAction func rotateCamera(_ sender: UIButton) {
logger.info("rotateCamera")
if mixer.isMultiCamSessionEnabled {
if mixer.videoMixerSettings.mainTrack == 0 {
mixer.videoMixerSettings.mainTrack = 1
videoScreenObject.track = 0

Task {
if await mixer.isMultiCamSessionEnabled {
var videoMixerSettings = await mixer.videoMixerSettings

if videoMixerSettings.mainTrack == 0 {
videoMixerSettings.mainTrack = 1
await mixer.setVideoMixerSettings(videoMixerSettings)
Task { @ScreenActor in
videoScreenObject.track = 0
}
} else {
videoMixerSettings.mainTrack = 0
Task { @ScreenActor in
videoScreenObject.track = 1
}
}
} else {
mixer.videoMixerSettings.mainTrack = 0
videoScreenObject.track = 1
}
} else {
let position: AVCaptureDevice.Position = currentPosition == .back ? .front : .back
Task {
let position: AVCaptureDevice.Position = currentPosition == .back ? .front : .back
try? await mixer.attachCamera(AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: position)) { videoUnit in
videoUnit?.isVideoMirrored = position == .front
}
currentPosition = position
}
currentPosition = position
}
}

@IBAction func toggleTorch(_ sender: UIButton) {
mixer.torch.toggle()
Task {
let isTorchEnabled = await mixer.isTorchEnabled
await mixer.setTorchEnabled(!isTorchEnabled)
}
}

@IBAction func on(slider: UISlider) {
Expand Down Expand Up @@ -219,31 +232,35 @@ final class IngestViewController: UIViewController {
}

@IBAction private func onFPSValueChanged(_ segment: UISegmentedControl) {
switch segment.selectedSegmentIndex {
case 0:
mixer.frameRate = 15
case 1:
mixer.frameRate = 30
case 2:
mixer.frameRate = 60
default:
break
Task {
switch segment.selectedSegmentIndex {
case 0:
await mixer.setFrameRate(15)
case 1:
await mixer.setFrameRate(30)
case 2:
await mixer.setFrameRate(60)
default:
break
}
}
}

@IBAction private func onEffectValueChanged(_ segment: UISegmentedControl) {
if let currentEffect: VideoEffect = currentEffect {
_ = mixer.unregisterVideoEffect(currentEffect)
}
switch segment.selectedSegmentIndex {
case 1:
currentEffect = MonochromeEffect()
_ = mixer.registerVideoEffect(currentEffect!)
case 2:
currentEffect = PronamaEffect()
_ = mixer.registerVideoEffect(currentEffect!)
default:
break
Task { @ScreenActor in
if let currentEffect = await currentEffect {
_ = await mixer.screen.unregisterVideoEffect(currentEffect)
}
switch segment.selectedSegmentIndex {

Check failure on line 254 in Examples/iOS/IngestViewController.swift

View workflow job for this annotation

GitHub Actions / build

expression is 'async' but is not marked with 'await'
case 1:
currentEffect = MonochromeEffect()
_ = await mixer.screen.registerVideoEffect(currentEffect!)
case 2:
currentEffect = PronamaEffect()
_ = await mixer.screen.registerVideoEffect(currentEffect!)
default:
break
}
}
}

Expand Down Expand Up @@ -277,10 +294,12 @@ final class IngestViewController: UIViewController {
audioDevicePicker.isHidden = false
}
audioDevicePicker.reloadAllComponents()
if DeviceUtil.isHeadphoneDisconnected(notification) {
mixer.isMonitoringEnabled = false
} else {
mixer.isMonitoringEnabled = DeviceUtil.isHeadphoneConnected()
Task {
if DeviceUtil.isHeadphoneDisconnected(notification) {
await mixer.setMonitoringEnabled(false)
} else {
await mixer.setMonitoringEnabled(DeviceUtil.isHeadphoneConnected())
}
}
}

Expand All @@ -289,7 +308,9 @@ final class IngestViewController: UIViewController {
guard let orientation = DeviceUtil.videoOrientation(by: UIApplication.shared.statusBarOrientation) else {
return
}
mixer.videoOrientation = orientation
Task {
await mixer.setVideoOrientation(orientation)
}
}
}

Expand Down
16 changes: 8 additions & 8 deletions Examples/iOS/Screencast/SampleHandler.swift
Original file line number Diff line number Diff line change
Expand Up @@ -40,12 +40,12 @@ final class SampleHandler: RPBroadcastSampleHandler, @unchecked Sendable {
logger.level = .debug
*/
LBLogger.with(HaishinKitIdentifier).level = .info
mixer.audioMixerSettings.tracks[1] = .default
// mixer.audioMixerSettings.tracks[1] = .default
isVideoRotationEnabled = true
Task {
await netStreamSwitcher.setPreference(Preference.default)
if let stream = await netStreamSwitcher.stream {
mixer.addStream(stream)
await mixer.addStream(stream)
}
await netStreamSwitcher.open(.ingest)
}
Expand All @@ -58,7 +58,7 @@ final class SampleHandler: RPBroadcastSampleHandler, @unchecked Sendable {
}
}

@MainActor override func processSampleBuffer(_ sampleBuffer: CMSampleBuffer, with sampleBufferType: RPSampleBufferType) {
override func processSampleBuffer(_ sampleBuffer: CMSampleBuffer, with sampleBufferType: RPSampleBufferType) {
switch sampleBufferType {
case .video:
Task {
Expand All @@ -78,23 +78,23 @@ final class SampleHandler: RPBroadcastSampleHandler, @unchecked Sendable {
if #available(iOS 16.0, tvOS 16.0, macOS 13.0, *), let rotator {
switch rotator.rotate(buffer: sampleBuffer) {
case .success(let rotatedBuffer):
mixer.append(rotatedBuffer)
Task { await mixer.append(rotatedBuffer) }
case .failure(let error):
logger.error(error)
}
} else {
mixer.append(sampleBuffer)
Task { await mixer.append(sampleBuffer) }
}
case .audioMic:
if CMSampleBufferDataIsReady(sampleBuffer) {
mixer.append(sampleBuffer, track: 0)
Task { await mixer.append(sampleBuffer, track: 0) }
}
case .audioApp:
if let volume = slider?.value {
mixer.audioMixerSettings.tracks[1]?.volume = volume * 0.5
// mixer.audioMixerSettings.tracks[1]?.volume = volume * 0.5
}
if CMSampleBufferDataIsReady(sampleBuffer) {
mixer.append(sampleBuffer, track: 1)
Task { await mixer.append(sampleBuffer, track: 1) }
}
@unknown default:
break
Expand Down
8 changes: 2 additions & 6 deletions Examples/iOS/VisualEffect.swift
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,7 @@ final class PronamaEffect: VideoEffect {
}
var pronama: CIImage?

override init() {
super.init()
}

override func execute(_ image: CIImage, info: CMSampleBuffer?) -> CIImage {
func execute(_ image: CIImage, info: CMSampleBuffer?) -> CIImage {
guard let filter: CIFilter = filter else {
return image
}
Expand All @@ -37,7 +33,7 @@ final class PronamaEffect: VideoEffect {
final class MonochromeEffect: VideoEffect {
let filter: CIFilter? = CIFilter(name: "CIColorMonochrome")

override func execute(_ image: CIImage, info: CMSampleBuffer?) -> CIImage {
func execute(_ image: CIImage, info: CMSampleBuffer?) -> CIImage {
guard let filter: CIFilter = filter else {
return image
}
Expand Down
Loading

0 comments on commit 5a36e75

Please sign in to comment.